summaryrefslogtreecommitdiffstats
path: root/vendor
diff options
context:
space:
mode:
Diffstat (limited to 'vendor')
-rw-r--r--vendor/addr2line-0.17.0/.cargo-checksum.json1
-rw-r--r--vendor/addr2line-0.17.0/CHANGELOG.md260
-rw-r--r--vendor/addr2line-0.17.0/Cargo.lock430
-rw-r--r--vendor/addr2line-0.17.0/Cargo.toml120
-rw-r--r--vendor/addr2line-0.17.0/LICENSE-APACHE (renamed from vendor/parking_lot_core-0.8.5/LICENSE-APACHE)0
-rw-r--r--vendor/addr2line-0.17.0/LICENSE-MIT25
-rw-r--r--vendor/addr2line-0.17.0/README.md48
-rw-r--r--vendor/addr2line-0.17.0/bench.plot.r23
-rwxr-xr-xvendor/addr2line-0.17.0/benchmark.sh112
-rw-r--r--vendor/addr2line-0.17.0/coverage.sh5
-rw-r--r--vendor/addr2line-0.17.0/examples/addr2line.rs299
-rw-r--r--vendor/addr2line-0.17.0/rustfmt.toml1
-rw-r--r--vendor/addr2line-0.17.0/src/function.rs520
-rw-r--r--vendor/addr2line-0.17.0/src/lazy.rs29
-rw-r--r--vendor/addr2line-0.17.0/src/lib.rs1192
-rw-r--r--vendor/addr2line-0.17.0/tests/correctness.rs91
-rw-r--r--vendor/addr2line-0.17.0/tests/output_equivalence.rs145
-rw-r--r--vendor/addr2line-0.17.0/tests/parse.rs118
-rw-r--r--vendor/addr2line/.cargo-checksum.json2
-rw-r--r--vendor/addr2line/CHANGELOG.md27
-rw-r--r--vendor/addr2line/Cargo.lock180
-rw-r--r--vendor/addr2line/Cargo.toml72
-rw-r--r--vendor/addr2line/examples/addr2line.rs212
-rw-r--r--vendor/addr2line/src/function.rs30
-rw-r--r--vendor/addr2line/src/lazy.rs24
-rw-r--r--vendor/addr2line/src/lib.rs246
-rw-r--r--vendor/addr2line/tests/correctness.rs11
-rw-r--r--vendor/addr2line/tests/output_equivalence.rs1
-rw-r--r--vendor/addr2line/tests/parse.rs4
-rw-r--r--vendor/ansi_term/.cargo-checksum.json1
-rw-r--r--vendor/ansi_term/Cargo.lock168
-rw-r--r--vendor/ansi_term/Cargo.toml43
-rw-r--r--vendor/ansi_term/examples/basic_colours.rs18
-rw-r--r--vendor/ansi_term/examples/rgb_colours.rs23
-rw-r--r--vendor/ansi_term/src/style.rs521
-rw-r--r--vendor/anyhow/.cargo-checksum.json2
-rw-r--r--vendor/anyhow/Cargo.toml7
-rw-r--r--vendor/anyhow/README.md4
-rw-r--r--vendor/anyhow/src/context.rs19
-rw-r--r--vendor/anyhow/src/lib.rs2
-rw-r--r--vendor/anyhow/tests/test_ensure.rs1
-rw-r--r--vendor/anyhow/tests/ui/empty-ensure.stderr5
-rw-r--r--vendor/anyhow/tests/ui/no-impl.stderr2
-rw-r--r--vendor/anyhow/tests/ui/temporary-value.stderr2
-rw-r--r--vendor/backtrace/.cargo-checksum.json2
-rw-r--r--vendor/backtrace/Cargo.lock58
-rw-r--r--vendor/backtrace/Cargo.toml18
-rw-r--r--vendor/backtrace/src/backtrace/miri.rs6
-rw-r--r--vendor/backtrace/src/print.rs2
-rw-r--r--vendor/backtrace/src/symbolize/gimli.rs2
-rw-r--r--vendor/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs20
-rw-r--r--vendor/backtrace/src/symbolize/gimli/libs_macos.rs4
-rw-r--r--vendor/backtrace/src/symbolize/gimli/macho.rs4
-rw-r--r--vendor/backtrace/src/symbolize/gimli/parse_running_mmaps_unix.rs242
-rw-r--r--vendor/backtrace/src/windows.rs6
-rw-r--r--vendor/backtrace/tests/common/mod.rs14
-rw-r--r--vendor/backtrace/tests/concurrent-panics.rs14
-rw-r--r--vendor/backtrace/tests/current-exe-mismatch.rs137
-rw-r--r--vendor/backtrace/tests/skip_inner_frames.rs2
-rw-r--r--vendor/camino/.cargo-checksum.json2
-rw-r--r--vendor/camino/CHANGELOG.md8
-rw-r--r--vendor/camino/Cargo.toml2
-rw-r--r--vendor/camino/release.toml8
-rw-r--r--vendor/camino/src/lib.rs42
-rw-r--r--vendor/camino/src/proptest_impls.rs3
-rw-r--r--vendor/camino/tests/integration_tests.rs2
-rw-r--r--vendor/cargo_metadata/.cargo-checksum.json2
-rw-r--r--vendor/cargo_metadata/CHANGELOG.md38
-rw-r--r--vendor/cargo_metadata/Cargo.toml5
-rw-r--r--vendor/cargo_metadata/src/errors.rs82
-rw-r--r--vendor/cargo_metadata/src/lib.rs154
-rw-r--r--vendor/cargo_metadata/src/messages.rs3
-rw-r--r--vendor/cargo_metadata/tests/test_samples.rs27
-rw-r--r--vendor/cc/.cargo-checksum.json2
-rw-r--r--vendor/cc/Cargo.lock6
-rw-r--r--vendor/cc/Cargo.toml2
-rw-r--r--vendor/cc/src/lib.rs95
-rw-r--r--vendor/compiler_builtins/.cargo-checksum.json2
-rw-r--r--vendor/compiler_builtins/Cargo.lock2
-rw-r--r--vendor/compiler_builtins/Cargo.toml2
-rw-r--r--vendor/compiler_builtins/src/float/conv.rs20
-rw-r--r--vendor/compiler_builtins/src/int/shift.rs9
-rw-r--r--vendor/dissimilar/.cargo-checksum.json2
-rw-r--r--vendor/dissimilar/Cargo.toml10
-rw-r--r--vendor/dissimilar/LICENSE-APACHE25
-rw-r--r--vendor/dissimilar/README.md6
-rw-r--r--vendor/dissimilar/src/find.rs14
-rw-r--r--vendor/dissimilar/src/lib.rs169
-rw-r--r--vendor/dissimilar/src/range.rs49
-rw-r--r--vendor/dissimilar/src/tests.rs131
-rw-r--r--vendor/dissimilar/tests/test.rs16
-rw-r--r--vendor/elsa/.cargo-checksum.json1
-rw-r--r--vendor/elsa/Cargo.lock39
-rw-r--r--vendor/elsa/Cargo.toml47
-rw-r--r--vendor/elsa/LICENSE-APACHE (renamed from vendor/toml/LICENSE-APACHE)0
-rw-r--r--vendor/elsa/LICENSE-MIT27
-rw-r--r--vendor/elsa/README.md19
-rw-r--r--vendor/elsa/examples/arena.rs56
-rw-r--r--vendor/elsa/examples/fluentresource.rs50
-rw-r--r--vendor/elsa/examples/mutable_arena.rs79
-rw-r--r--vendor/elsa/examples/string_interner.rs61
-rw-r--r--vendor/elsa/examples/sync.rs26
-rw-r--r--vendor/elsa/src/index_map.rs215
-rw-r--r--vendor/elsa/src/index_set.rs180
-rw-r--r--vendor/elsa/src/lib.rs29
-rw-r--r--vendor/elsa/src/map.rs451
-rw-r--r--vendor/elsa/src/sync.rs624
-rw-r--r--vendor/elsa/src/vec.rs347
-rw-r--r--vendor/ena/.cargo-checksum.json2
-rw-r--r--vendor/ena/Cargo.toml23
-rw-r--r--vendor/ena/README.md2
-rw-r--r--vendor/ena/src/unify/mod.rs24
-rw-r--r--vendor/filetime/.cargo-checksum.json2
-rw-r--r--vendor/filetime/Cargo.toml2
-rw-r--r--vendor/filetime/src/unix/linux.rs33
-rw-r--r--vendor/filetime/src/unix/mod.rs15
-rw-r--r--vendor/flate2/.cargo-checksum.json2
-rw-r--r--vendor/flate2/Cargo.lock38
-rw-r--r--vendor/flate2/Cargo.toml8
-rw-r--r--vendor/gimli-0.26.2/.cargo-checksum.json1
-rw-r--r--vendor/gimli-0.26.2/CHANGELOG.md873
-rw-r--r--vendor/gimli-0.26.2/CONTRIBUTING.md137
-rw-r--r--vendor/gimli-0.26.2/Cargo.lock358
-rw-r--r--vendor/gimli-0.26.2/Cargo.toml146
-rw-r--r--vendor/gimli-0.26.2/LICENSE-APACHE201
-rw-r--r--vendor/gimli-0.26.2/LICENSE-MIT25
-rw-r--r--vendor/gimli-0.26.2/README.md78
-rw-r--r--vendor/gimli-0.26.2/benches/bench.rs807
-rw-r--r--vendor/gimli-0.26.2/examples/dwarf-validate.rs267
-rw-r--r--vendor/gimli-0.26.2/examples/dwarfdump.rs2417
-rw-r--r--vendor/gimli-0.26.2/examples/simple.rs67
-rw-r--r--vendor/gimli-0.26.2/examples/simple_line.rs106
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/README.md147
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_abbrevbin0 -> 1865 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_arangesbin0 -> 16304 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_infobin0 -> 392832 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_inlinedbin0 -> 25062 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_linebin0 -> 109251 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_locbin0 -> 283588 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_pubnamesbin0 -> 138556 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_pubtypesbin0 -> 52984 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_rangesbin0 -> 186016 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/debug_strbin0 -> 145794 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/eh_framebin0 -> 147656 bytes
-rw-r--r--vendor/gimli-0.26.2/fixtures/self/eh_frame_hdrbin0 -> 108732 bytes
-rw-r--r--vendor/gimli-0.26.2/rustfmt.toml0
-rw-r--r--vendor/gimli-0.26.2/src/arch.rs603
-rw-r--r--vendor/gimli-0.26.2/src/common.rs363
-rw-r--r--vendor/gimli-0.26.2/src/constants.rs1425
-rw-r--r--vendor/gimli-0.26.2/src/endianity.rs256
-rw-r--r--vendor/gimli-0.26.2/src/leb128.rs612
-rw-r--r--vendor/gimli-0.26.2/src/lib.rs76
-rw-r--r--vendor/gimli-0.26.2/src/read/abbrev.rs996
-rw-r--r--vendor/gimli-0.26.2/src/read/addr.rs128
-rw-r--r--vendor/gimli-0.26.2/src/read/aranges.rs660
-rw-r--r--vendor/gimli-0.26.2/src/read/cfi.rs7585
-rw-r--r--vendor/gimli-0.26.2/src/read/dwarf.rs1143
-rw-r--r--vendor/gimli-0.26.2/src/read/endian_reader.rs639
-rw-r--r--vendor/gimli-0.26.2/src/read/endian_slice.rs350
-rw-r--r--vendor/gimli-0.26.2/src/read/index.rs535
-rw-r--r--vendor/gimli-0.26.2/src/read/line.rs3030
-rw-r--r--vendor/gimli-0.26.2/src/read/lists.rs68
-rw-r--r--vendor/gimli-0.26.2/src/read/loclists.rs1514
-rw-r--r--vendor/gimli-0.26.2/src/read/lookup.rs202
-rw-r--r--vendor/gimli-0.26.2/src/read/mod.rs821
-rw-r--r--vendor/gimli-0.26.2/src/read/op.rs4114
-rw-r--r--vendor/gimli-0.26.2/src/read/pubnames.rs141
-rw-r--r--vendor/gimli-0.26.2/src/read/pubtypes.rs141
-rw-r--r--vendor/gimli-0.26.2/src/read/reader.rs502
-rw-r--r--vendor/gimli-0.26.2/src/read/rnglists.rs1354
-rw-r--r--vendor/gimli-0.26.2/src/read/str.rs321
-rw-r--r--vendor/gimli-0.26.2/src/read/unit.rs6146
-rw-r--r--vendor/gimli-0.26.2/src/read/util.rs250
-rw-r--r--vendor/gimli-0.26.2/src/read/value.rs1621
-rw-r--r--vendor/gimli-0.26.2/src/test_util.rs53
-rw-r--r--vendor/gimli-0.26.2/src/write/abbrev.rs188
-rw-r--r--vendor/gimli-0.26.2/src/write/cfi.rs1025
-rw-r--r--vendor/gimli-0.26.2/src/write/dwarf.rs138
-rw-r--r--vendor/gimli-0.26.2/src/write/endian_vec.rs117
-rw-r--r--vendor/gimli-0.26.2/src/write/line.rs1960
-rw-r--r--vendor/gimli-0.26.2/src/write/loc.rs549
-rw-r--r--vendor/gimli-0.26.2/src/write/mod.rs425
-rw-r--r--vendor/gimli-0.26.2/src/write/op.rs1621
-rw-r--r--vendor/gimli-0.26.2/src/write/range.rs415
-rw-r--r--vendor/gimli-0.26.2/src/write/section.rs172
-rw-r--r--vendor/gimli-0.26.2/src/write/str.rs172
-rw-r--r--vendor/gimli-0.26.2/src/write/unit.rs3157
-rw-r--r--vendor/gimli-0.26.2/src/write/writer.rs497
-rw-r--r--vendor/gimli-0.26.2/tests/convert_self.rs158
-rwxr-xr-xvendor/gimli-0.26.2/tests/parse_self.rs431
-rw-r--r--vendor/gimli/.cargo-checksum.json2
-rw-r--r--vendor/gimli/CHANGELOG.md21
-rw-r--r--vendor/gimli/Cargo.lock120
-rw-r--r--vendor/gimli/Cargo.toml4
-rw-r--r--vendor/gimli/README.md4
-rw-r--r--vendor/gimli/clippy.toml1
-rw-r--r--vendor/gimli/examples/dwarfdump.rs164
-rw-r--r--vendor/gimli/src/arch.rs148
-rw-r--r--vendor/gimli/src/lib.rs15
-rw-r--r--vendor/gimli/src/read/abbrev.rs129
-rw-r--r--vendor/gimli/src/read/cfi.rs23
-rw-r--r--vendor/gimli/src/read/dwarf.rs26
-rw-r--r--vendor/gimli/src/read/endian_slice.rs6
-rw-r--r--vendor/gimli/src/read/lazy.rs116
-rw-r--r--vendor/gimli/src/read/line.rs2
-rw-r--r--vendor/gimli/src/read/loclists.rs241
-rw-r--r--vendor/gimli/src/read/mod.rs3
-rw-r--r--vendor/gimli/src/read/op.rs7
-rw-r--r--vendor/gimli/src/read/rnglists.rs206
-rw-r--r--vendor/gimli/src/read/unit.rs13
-rw-r--r--vendor/gimli/src/read/util.rs5
-rw-r--r--vendor/gimli/src/write/line.rs5
-rw-r--r--vendor/gimli/src/write/loc.rs3
-rw-r--r--vendor/gimli/src/write/op.rs15
-rw-r--r--vendor/gimli/src/write/range.rs3
-rw-r--r--vendor/gimli/src/write/section.rs2
-rw-r--r--vendor/gimli/src/write/unit.rs17
-rw-r--r--vendor/gimli/src/write/writer.rs5
-rw-r--r--vendor/hermit-abi/.cargo-checksum.json2
-rw-r--r--vendor/hermit-abi/Cargo.toml13
-rw-r--r--vendor/hermit-abi/src/net.rs232
-rw-r--r--vendor/hermit-abi/src/net_old.rs302
-rw-r--r--vendor/icu_list/.cargo-checksum.json2
-rw-r--r--vendor/icu_list/Cargo.lock761
-rw-r--r--vendor/icu_list/Cargo.toml43
-rw-r--r--vendor/icu_list/examples/and_list.rs4
-rw-r--r--vendor/icu_list/src/lazy_automaton.rs79
-rw-r--r--vendor/icu_list/src/lib.rs3
-rw-r--r--vendor/icu_list/src/list_formatter.rs43
-rw-r--r--vendor/icu_list/src/patterns.rs283
-rw-r--r--vendor/icu_list/src/provider.rs465
-rw-r--r--vendor/icu_list/src/provider/mod.rs261
-rw-r--r--vendor/icu_list/src/provider/serde_dfa.rs244
-rw-r--r--vendor/icu_list/src/string_matcher.rs213
-rw-r--r--vendor/icu_locid/.cargo-checksum.json2
-rw-r--r--vendor/icu_locid/Cargo.lock132
-rw-r--r--vendor/icu_locid/Cargo.toml30
-rw-r--r--vendor/icu_locid/README.md31
-rw-r--r--vendor/icu_locid/benches/iai_langid.rs8
-rw-r--r--vendor/icu_locid/examples/filter_langids.rs5
-rw-r--r--vendor/icu_locid/src/extensions/mod.rs29
-rw-r--r--vendor/icu_locid/src/extensions/other/mod.rs61
-rw-r--r--vendor/icu_locid/src/extensions/other/subtag.rs6
-rw-r--r--vendor/icu_locid/src/extensions/private/mod.rs26
-rw-r--r--vendor/icu_locid/src/extensions/transform/fields.rs30
-rw-r--r--vendor/icu_locid/src/extensions/transform/mod.rs6
-rw-r--r--vendor/icu_locid/src/extensions/transform/value.rs35
-rw-r--r--vendor/icu_locid/src/extensions/unicode/attributes.rs15
-rw-r--r--vendor/icu_locid/src/extensions/unicode/keywords.rs65
-rw-r--r--vendor/icu_locid/src/extensions/unicode/mod.rs46
-rw-r--r--vendor/icu_locid/src/extensions/unicode/value.rs33
-rw-r--r--vendor/icu_locid/src/helpers.rs46
-rw-r--r--vendor/icu_locid/src/langid.rs109
-rw-r--r--vendor/icu_locid/src/lib.rs32
-rw-r--r--vendor/icu_locid/src/locale.rs106
-rw-r--r--vendor/icu_locid/src/parser/errors.rs16
-rw-r--r--vendor/icu_locid/src/parser/langid.rs47
-rw-r--r--vendor/icu_locid/src/parser/locale.rs6
-rw-r--r--vendor/icu_locid/src/parser/mod.rs231
-rw-r--r--vendor/icu_locid/src/subtags/language.rs11
-rw-r--r--vendor/icu_locid/src/subtags/variants.rs28
-rw-r--r--vendor/icu_locid/tests/fixtures/invalid-extensions.json40
-rw-r--r--vendor/icu_locid/tests/fixtures/invalid.json49
-rw-r--r--vendor/icu_locid/tests/fixtures/mod.rs1
-rw-r--r--vendor/icu_locid/tests/langid.rs15
-rw-r--r--vendor/icu_locid/tests/locale.rs16
-rw-r--r--vendor/icu_provider/.cargo-checksum.json2
-rw-r--r--vendor/icu_provider/Cargo.toml49
-rw-r--r--vendor/icu_provider/README.md2
-rw-r--r--vendor/icu_provider/src/any.rs60
-rw-r--r--vendor/icu_provider/src/buf.rs6
-rw-r--r--vendor/icu_provider/src/constructors.rs6
-rw-r--r--vendor/icu_provider/src/datagen/mod.rs2
-rw-r--r--vendor/icu_provider/src/error.rs12
-rw-r--r--vendor/icu_provider/src/hello_world.rs2
-rw-r--r--vendor/icu_provider/src/key.rs6
-rw-r--r--vendor/icu_provider/src/lib.rs2
-rw-r--r--vendor/icu_provider/src/request.rs40
-rw-r--r--vendor/icu_provider/src/response.rs10
-rw-r--r--vendor/icu_provider/src/serde/mod.rs18
-rw-r--r--vendor/icu_provider_adapters/.cargo-checksum.json2
-rw-r--r--vendor/icu_provider_adapters/Cargo.toml28
-rw-r--r--vendor/icu_provider_adapters/src/empty.rs23
-rw-r--r--vendor/icu_provider_adapters/src/fallback/adapter.rs12
-rw-r--r--vendor/icu_provider_adapters/src/fallback/algorithms.rs26
-rw-r--r--vendor/icu_provider_adapters/src/fallback/mod.rs94
-rw-r--r--vendor/icu_provider_adapters/src/fork/by_error.rs39
-rw-r--r--vendor/icu_provider_macros/.cargo-checksum.json2
-rw-r--r--vendor/icu_provider_macros/Cargo.toml2
-rw-r--r--vendor/icu_provider_macros/src/lib.rs2
-rw-r--r--vendor/icu_provider_macros/src/tests.rs2
-rw-r--r--vendor/itoa/.cargo-checksum.json2
-rw-r--r--vendor/itoa/Cargo.toml2
-rw-r--r--vendor/itoa/README.md2
-rw-r--r--vendor/itoa/src/lib.rs2
-rw-r--r--vendor/jobserver/.cargo-checksum.json2
-rw-r--r--vendor/jobserver/Cargo.toml22
-rw-r--r--vendor/jobserver/src/lib.rs54
-rw-r--r--vendor/jobserver/src/unix.rs122
-rw-r--r--vendor/jobserver/src/wasm.rs5
-rw-r--r--vendor/jobserver/src/windows.rs20
-rw-r--r--vendor/jobserver/tests/client.rs3
-rw-r--r--vendor/jobserver/tests/make-as-a-client.rs3
-rw-r--r--vendor/jobserver/tests/server.rs29
-rw-r--r--vendor/libc/.cargo-checksum.json2
-rw-r--r--vendor/libc/Cargo.toml2
-rw-r--r--vendor/libc/src/fuchsia/mod.rs6
-rw-r--r--vendor/libc/src/unix/bsd/mod.rs1
-rw-r--r--vendor/libc/src/unix/haiku/mod.rs1
-rw-r--r--vendor/libc/src/unix/hermit/mod.rs1
-rw-r--r--vendor/libc/src/unix/linux_like/android/b32/arm.rs3
-rw-r--r--vendor/libc/src/unix/linux_like/android/b32/x86/mod.rs3
-rw-r--r--vendor/libc/src/unix/linux_like/android/b64/aarch64/mod.rs3
-rw-r--r--vendor/libc/src/unix/linux_like/android/b64/x86_64/mod.rs3
-rw-r--r--vendor/libc/src/unix/linux_like/android/mod.rs27
-rw-r--r--vendor/libc/src/unix/linux_like/emscripten/mod.rs1
-rw-r--r--vendor/libc/src/unix/linux_like/linux/mod.rs11
-rw-r--r--vendor/libc/src/unix/linux_like/mod.rs8
-rw-r--r--vendor/libc/src/unix/mod.rs151
-rw-r--r--vendor/libc/src/unix/newlib/mod.rs1
-rw-r--r--vendor/libc/src/unix/nto/aarch64.rs36
-rw-r--r--vendor/libc/src/unix/nto/mod.rs3286
-rw-r--r--vendor/libc/src/unix/nto/neutrino.rs1288
-rw-r--r--vendor/libc/src/unix/nto/x86_64.rs132
-rw-r--r--vendor/libc/src/unix/solarish/mod.rs1
-rw-r--r--vendor/libc/src/vxworks/mod.rs5
-rw-r--r--vendor/libc/src/wasi.rs6
-rw-r--r--vendor/libc/src/windows/mod.rs6
-rw-r--r--vendor/litemap/.cargo-checksum.json2
-rw-r--r--vendor/litemap/Cargo.lock322
-rw-r--r--vendor/litemap/Cargo.toml18
-rw-r--r--vendor/litemap/benches/litemap.rs17
-rw-r--r--vendor/litemap/src/map.rs4
-rw-r--r--vendor/litemap/src/store/mod.rs2
-rw-r--r--vendor/litemap/tests/rkyv.rs15
-rw-r--r--vendor/lsp-types/.cargo-checksum.json2
-rw-r--r--vendor/lsp-types/CHANGELOG.md584
-rw-r--r--vendor/lsp-types/Cargo.toml3
-rw-r--r--vendor/lsp-types/LICENSE44
-rw-r--r--vendor/lsp-types/README.md28
-rw-r--r--[-rwxr-xr-x]vendor/lsp-types/release.sh30
-rw-r--r--vendor/lsp-types/release.toml3
-rw-r--r--vendor/lsp-types/src/call_hierarchy.rs254
-rw-r--r--vendor/lsp-types/src/code_action.rs755
-rw-r--r--vendor/lsp-types/src/code_lens.rs132
-rw-r--r--vendor/lsp-types/src/color.rs244
-rw-r--r--vendor/lsp-types/src/completion.rs1216
-rw-r--r--vendor/lsp-types/src/document_highlight.rs102
-rw-r--r--vendor/lsp-types/src/document_link.rs134
-rw-r--r--vendor/lsp-types/src/document_symbols.rs264
-rw-r--r--vendor/lsp-types/src/error_codes.rs73
-rw-r--r--vendor/lsp-types/src/file_operations.rs426
-rw-r--r--vendor/lsp-types/src/folding_range.rs246
-rw-r--r--vendor/lsp-types/src/formatting.rs306
-rw-r--r--vendor/lsp-types/src/hover.rs172
-rw-r--r--vendor/lsp-types/src/inlay_hint.rs560
-rw-r--r--vendor/lsp-types/src/inline_value.rs217
-rw-r--r--vendor/lsp-types/src/lib.rs5490
-rw-r--r--vendor/lsp-types/src/linked_editing.rs122
-rw-r--r--vendor/lsp-types/src/lsif.rs676
-rw-r--r--vendor/lsp-types/src/moniker.rs184
-rw-r--r--vendor/lsp-types/src/notification.rs722
-rw-r--r--vendor/lsp-types/src/progress.rs268
-rw-r--r--vendor/lsp-types/src/references.rs60
-rw-r--r--vendor/lsp-types/src/rename.rs176
-rw-r--r--vendor/lsp-types/src/request.rs1890
-rw-r--r--vendor/lsp-types/src/selection_range.rs172
-rw-r--r--vendor/lsp-types/src/semantic_tokens.rs1472
-rw-r--r--vendor/lsp-types/src/signature_help.rs414
-rw-r--r--vendor/lsp-types/src/trace.rs164
-rw-r--r--vendor/lsp-types/src/type_hierarchy.rs90
-rw-r--r--vendor/lsp-types/src/window.rs352
-rw-r--r--vendor/lsp-types/src/workspace_folders.rs98
-rw-r--r--vendor/lsp-types/src/workspace_symbols.rs148
-rw-r--r--vendor/lsp-types/tests/lsif.rs32
-rw-r--r--vendor/lsp-types/tests/tsc-unix.lsif270
-rw-r--r--vendor/memmap2/.cargo-checksum.json2
-rw-r--r--vendor/memmap2/CHANGELOG.md9
-rw-r--r--vendor/memmap2/Cargo.lock2
-rw-r--r--vendor/memmap2/Cargo.toml4
-rw-r--r--vendor/memmap2/src/lib.rs34
-rw-r--r--vendor/miniz_oxide-0.5.3/.cargo-checksum.json1
-rw-r--r--vendor/miniz_oxide-0.5.3/Cargo.toml55
-rw-r--r--vendor/miniz_oxide-0.5.3/LICENSE (renamed from vendor/time-macros/LICENSE-MIT)4
-rw-r--r--vendor/miniz_oxide-0.5.3/LICENSE-APACHE.md (renamed from vendor/time-macros/LICENSE-Apache)25
-rw-r--r--vendor/miniz_oxide-0.5.3/LICENSE-MIT.md21
-rw-r--r--vendor/miniz_oxide-0.5.3/LICENSE-ZLIB.md11
-rw-r--r--vendor/miniz_oxide-0.5.3/Readme.md35
-rw-r--r--vendor/miniz_oxide-0.5.3/src/deflate/buffer.rs58
-rw-r--r--vendor/miniz_oxide-0.5.3/src/deflate/core.rs2463
-rw-r--r--vendor/miniz_oxide-0.5.3/src/deflate/mod.rs227
-rw-r--r--vendor/miniz_oxide-0.5.3/src/deflate/stream.rs121
-rw-r--r--vendor/miniz_oxide-0.5.3/src/inflate/core.rs1931
-rw-r--r--vendor/miniz_oxide-0.5.3/src/inflate/mod.rs279
-rw-r--r--vendor/miniz_oxide-0.5.3/src/inflate/output_buffer.rs60
-rw-r--r--vendor/miniz_oxide-0.5.3/src/inflate/stream.rs415
-rw-r--r--vendor/miniz_oxide-0.5.3/src/lib.rs208
-rw-r--r--vendor/miniz_oxide-0.5.3/src/shared.rs25
-rw-r--r--vendor/miniz_oxide/.cargo-checksum.json2
-rw-r--r--vendor/miniz_oxide/Cargo.toml6
-rw-r--r--vendor/miniz_oxide/Readme.md19
-rw-r--r--vendor/miniz_oxide/src/inflate/core.rs5
-rw-r--r--vendor/miniz_oxide/src/inflate/mod.rs102
-rw-r--r--vendor/miniz_oxide/src/inflate/stream.rs3
-rw-r--r--vendor/miniz_oxide/src/lib.rs10
-rw-r--r--vendor/nu-ansi-term/.cargo-checksum.json1
-rw-r--r--vendor/nu-ansi-term/Cargo.lock159
-rw-r--r--vendor/nu-ansi-term/Cargo.toml57
-rw-r--r--vendor/nu-ansi-term/LICENCE (renamed from vendor/ansi_term/LICENCE)1
-rw-r--r--vendor/nu-ansi-term/README.md (renamed from vendor/ansi_term/README.md)107
-rw-r--r--vendor/nu-ansi-term/examples/256_colors.rs (renamed from vendor/ansi_term/examples/256_colours.rs)33
-rw-r--r--vendor/nu-ansi-term/examples/basic_colors.rs18
-rw-r--r--vendor/nu-ansi-term/examples/gradient_colors.rs37
-rw-r--r--vendor/nu-ansi-term/examples/rgb_colors.rs23
-rw-r--r--vendor/nu-ansi-term/src/ansi.rs (renamed from vendor/ansi_term/src/ansi.rs)229
-rw-r--r--vendor/nu-ansi-term/src/debug.rs (renamed from vendor/ansi_term/src/debug.rs)84
-rw-r--r--vendor/nu-ansi-term/src/difference.rs (renamed from vendor/ansi_term/src/difference.rs)25
-rw-r--r--vendor/nu-ansi-term/src/display.rs (renamed from vendor/ansi_term/src/display.rs)220
-rw-r--r--vendor/nu-ansi-term/src/gradient.rs105
-rw-r--r--vendor/nu-ansi-term/src/lib.rs (renamed from vendor/ansi_term/src/lib.rs)145
-rw-r--r--vendor/nu-ansi-term/src/rgb.rs173
-rw-r--r--vendor/nu-ansi-term/src/style.rs629
-rw-r--r--vendor/nu-ansi-term/src/util.rs (renamed from vendor/ansi_term/src/util.rs)40
-rw-r--r--vendor/nu-ansi-term/src/windows.rs (renamed from vendor/ansi_term/src/windows.rs)17
-rw-r--r--vendor/nu-ansi-term/src/write.rs (renamed from vendor/ansi_term/src/write.rs)19
-rw-r--r--vendor/num_cpus/.cargo-checksum.json2
-rw-r--r--vendor/num_cpus/CHANGELOG.md13
-rw-r--r--vendor/num_cpus/Cargo.lock6
-rw-r--r--vendor/num_cpus/Cargo.toml12
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/cgroups/ceil/cpu.max1
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/cgroups/good/cpu.max1
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/cgroups/zero-period/cpu.max1
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup2
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup_multi3
-rw-r--r--vendor/num_cpus/fixtures/cgroups2/proc/cgroups/mountinfo5
-rw-r--r--vendor/num_cpus/src/linux.rs407
-rw-r--r--vendor/object/.cargo-checksum.json2
-rw-r--r--vendor/object/CHANGELOG.md11
-rw-r--r--vendor/object/Cargo.toml2
-rw-r--r--vendor/object/src/elf.rs116
-rw-r--r--vendor/once_cell/.cargo-checksum.json2
-rw-r--r--vendor/once_cell/CHANGELOG.md4
-rw-r--r--vendor/once_cell/Cargo.lock2
-rw-r--r--vendor/once_cell/Cargo.toml2
-rw-r--r--vendor/once_cell/README.md2
-rw-r--r--vendor/once_cell/src/lib.rs14
-rw-r--r--vendor/once_cell/src/race.rs92
-rw-r--r--vendor/overload/.cargo-checksum.json1
-rw-r--r--vendor/overload/Cargo.toml (renamed from vendor/rls-data/Cargo.toml)24
-rw-r--r--vendor/overload/LICENSE21
-rw-r--r--vendor/overload/README.md64
-rw-r--r--vendor/overload/logo.pngbin0 -> 19188 bytes
-rw-r--r--vendor/overload/src/assignment.rs27
-rw-r--r--vendor/overload/src/binary.rs28
-rw-r--r--vendor/overload/src/lib.rs257
-rw-r--r--vendor/overload/src/unary.rs20
-rw-r--r--vendor/overload/tests/assignment.rs89
-rw-r--r--vendor/overload/tests/binary.rs72
-rw-r--r--vendor/overload/tests/unary.rs21
-rw-r--r--vendor/parking_lot_core-0.8.6/.cargo-checksum.json (renamed from vendor/parking_lot_core-0.8.5/.cargo-checksum.json)2
-rw-r--r--vendor/parking_lot_core-0.8.6/Cargo.toml (renamed from vendor/parking_lot_core-0.8.5/Cargo.toml)30
-rw-r--r--vendor/parking_lot_core-0.8.6/LICENSE-APACHE201
-rw-r--r--vendor/parking_lot_core-0.8.6/LICENSE-MIT (renamed from vendor/parking_lot_core-0.8.5/LICENSE-MIT)0
-rw-r--r--vendor/parking_lot_core-0.8.6/build.rs (renamed from vendor/parking_lot_core-0.8.5/build.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/lib.rs (renamed from vendor/parking_lot_core-0.8.5/src/lib.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/parking_lot.rs (renamed from vendor/parking_lot_core-0.8.5/src/parking_lot.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/spinwait.rs (renamed from vendor/parking_lot_core-0.8.5/src/spinwait.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/generic.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/generic.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/linux.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/linux.rs)8
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/mod.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/mod.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/redox.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/redox.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/sgx.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/sgx.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/unix.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/unix.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/wasm.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/wasm.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/wasm_atomic.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/wasm_atomic.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/windows/keyed_event.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/windows/keyed_event.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/windows/mod.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/windows/mod.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/thread_parker/windows/waitaddress.rs (renamed from vendor/parking_lot_core-0.8.5/src/thread_parker/windows/waitaddress.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/util.rs (renamed from vendor/parking_lot_core-0.8.5/src/util.rs)0
-rw-r--r--vendor/parking_lot_core-0.8.6/src/word_lock.rs (renamed from vendor/parking_lot_core-0.8.5/src/word_lock.rs)0
-rw-r--r--vendor/parking_lot_core/.cargo-checksum.json2
-rw-r--r--vendor/parking_lot_core/Cargo.toml2
-rw-r--r--vendor/parking_lot_core/src/thread_parker/linux.rs8
-rw-r--r--vendor/parking_lot_core/src/thread_parker/unix.rs8
-rw-r--r--vendor/proc-macro2/.cargo-checksum.json2
-rw-r--r--vendor/proc-macro2/Cargo.toml5
-rw-r--r--vendor/proc-macro2/LICENSE-APACHE25
-rw-r--r--vendor/proc-macro2/LICENSE-MIT2
-rw-r--r--vendor/proc-macro2/README.md2
-rw-r--r--vendor/proc-macro2/src/fallback.rs8
-rw-r--r--vendor/proc-macro2/src/lib.rs45
-rw-r--r--vendor/proc-macro2/src/location.rs29
-rw-r--r--vendor/proc-macro2/src/wrapper.rs22
-rw-r--r--vendor/quote/.cargo-checksum.json2
-rw-r--r--vendor/quote/Cargo.toml7
-rw-r--r--vendor/quote/LICENSE-MIT2
-rw-r--r--vendor/quote/README.md2
-rw-r--r--vendor/quote/src/lib.rs2
-rw-r--r--vendor/quote/tests/test.rs3
-rw-r--r--vendor/quote/tests/ui/not-quotable.rs2
-rw-r--r--vendor/quote/tests/ui/not-quotable.stderr9
-rw-r--r--vendor/quote/tests/ui/not-repeatable.rs2
-rw-r--r--vendor/quote/tests/ui/not-repeatable.stderr66
-rw-r--r--vendor/rayon/.cargo-checksum.json2
-rw-r--r--vendor/rayon/Cargo.toml5
-rw-r--r--vendor/rayon/RELEASES.md12
-rw-r--r--vendor/rayon/src/iter/par_bridge.rs159
-rw-r--r--vendor/rayon/tests/par_bridge_recursion.rs30
-rw-r--r--vendor/regex-syntax/.cargo-checksum.json2
-rw-r--r--vendor/regex-syntax/Cargo.toml2
-rw-r--r--vendor/regex-syntax/src/unicode.rs1
-rw-r--r--vendor/regex-syntax/src/unicode_tables/age.rs42
-rw-r--r--vendor/regex-syntax/src/unicode_tables/case_folding_simple.rs6
-rw-r--r--vendor/regex-syntax/src/unicode_tables/general_category.rs250
-rw-r--r--vendor/regex-syntax/src/unicode_tables/grapheme_cluster_break.rs26
-rw-r--r--vendor/regex-syntax/src/unicode_tables/perl_decimal.rs8
-rw-r--r--vendor/regex-syntax/src/unicode_tables/perl_space.rs6
-rw-r--r--vendor/regex-syntax/src/unicode_tables/perl_word.rs30
-rw-r--r--vendor/regex-syntax/src/unicode_tables/property_bool.rs266
-rw-r--r--vendor/regex-syntax/src/unicode_tables/property_names.rs6
-rw-r--r--vendor/regex-syntax/src/unicode_tables/property_values.rs14
-rw-r--r--vendor/regex-syntax/src/unicode_tables/script.rs63
-rw-r--r--vendor/regex-syntax/src/unicode_tables/script_extension.rs54
-rw-r--r--vendor/regex-syntax/src/unicode_tables/sentence_break.rs47
-rw-r--r--vendor/regex-syntax/src/unicode_tables/word_break.rs34
-rw-r--r--vendor/regex/.cargo-checksum.json2
-rw-r--r--vendor/regex/CHANGELOG.md26
-rw-r--r--vendor/regex/Cargo.lock22
-rw-r--r--vendor/regex/Cargo.toml2
-rw-r--r--vendor/regex/README.md8
-rw-r--r--vendor/regex/src/lib.rs3
-rw-r--r--vendor/regex/src/re_bytes.rs12
-rw-r--r--vendor/regex/src/re_unicode.rs12
-rw-r--r--vendor/regex/tests/replace.rs18
-rw-r--r--vendor/rls-data/.cargo-checksum.json1
-rw-r--r--vendor/rls-data/README.md11
-rw-r--r--vendor/rls-data/src/config.rs24
-rw-r--r--vendor/rls-data/src/lib.rs272
-rw-r--r--vendor/rls-span/.cargo-checksum.json1
-rw-r--r--vendor/rls-span/Cargo.toml33
-rw-r--r--vendor/rls-span/src/compiler.rs78
-rw-r--r--vendor/rls-span/src/lib.rs383
-rw-r--r--vendor/rustc-ap-rustc_lexer/.cargo-checksum.json2
-rw-r--r--vendor/rustc-ap-rustc_lexer/Cargo.toml4
-rw-r--r--vendor/rustc-ap-rustc_lexer/src/lib.rs22
-rw-r--r--vendor/ryu/.cargo-checksum.json2
-rw-r--r--vendor/ryu/Cargo.lock30
-rw-r--r--vendor/ryu/Cargo.toml2
-rw-r--r--vendor/ryu/README.md2
-rw-r--r--vendor/ryu/src/lib.rs2
-rw-r--r--vendor/ryu/src/pretty/mantissa.rs6
-rw-r--r--vendor/ryu/src/pretty/mod.rs3
-rw-r--r--vendor/ryu/src/s2f.rs2
-rw-r--r--vendor/scoped-tls/.cargo-checksum.json2
-rw-r--r--vendor/scoped-tls/Cargo.toml18
-rw-r--r--vendor/scoped-tls/README.md2
-rw-r--r--vendor/scoped-tls/appveyor.yml17
-rw-r--r--vendor/scoped-tls/src/lib.rs22
-rw-r--r--vendor/semver/.cargo-checksum.json2
-rw-r--r--vendor/semver/Cargo.toml7
-rw-r--r--vendor/semver/README.md2
-rw-r--r--vendor/semver/build.rs2
-rw-r--r--vendor/semver/src/backport.rs39
-rw-r--r--vendor/semver/src/identifier.rs20
-rw-r--r--vendor/semver/src/lib.rs12
-rw-r--r--vendor/serde/.cargo-checksum.json2
-rw-r--r--vendor/serde/Cargo.toml9
-rw-r--r--vendor/serde/README.md4
-rw-r--r--vendor/serde/build.rs38
-rw-r--r--vendor/serde/crates-io.md2
-rw-r--r--vendor/serde/src/de/format.rs2
-rw-r--r--vendor/serde/src/de/impls.rs44
-rw-r--r--vendor/serde/src/de/mod.rs5
-rw-r--r--vendor/serde/src/lib.rs37
-rw-r--r--vendor/serde/src/private/de.rs14
-rw-r--r--vendor/serde/src/private/ser.rs2
-rw-r--r--vendor/serde/src/ser/impls.rs70
-rw-r--r--vendor/serde/src/ser/mod.rs10
-rw-r--r--vendor/serde_derive/.cargo-checksum.json2
-rw-r--r--vendor/serde_derive/Cargo.toml4
-rw-r--r--vendor/serde_derive/README.md4
-rw-r--r--vendor/serde_derive/build.rs2
-rw-r--r--vendor/serde_derive/crates-io.md2
-rw-r--r--vendor/serde_derive/src/de.rs186
-rw-r--r--vendor/serde_derive/src/internals/check.rs23
-rw-r--r--vendor/serde_derive/src/lib.rs4
-rw-r--r--vendor/serde_derive/src/ser.rs62
-rw-r--r--vendor/serde_derive/src/this.rs32
-rw-r--r--vendor/serde_json/.cargo-checksum.json2
-rw-r--r--vendor/serde_json/Cargo.toml5
-rw-r--r--vendor/serde_json/README.md10
-rw-r--r--vendor/serde_json/build.rs2
-rw-r--r--vendor/serde_json/src/de.rs154
-rw-r--r--vendor/serde_json/src/lib.rs6
-rw-r--r--vendor/serde_json/src/map.rs10
-rw-r--r--vendor/serde_json/src/number.rs37
-rw-r--r--vendor/serde_json/src/ser.rs382
-rw-r--r--vendor/serde_json/src/value/de.rs27
-rw-r--r--vendor/serde_json/src/value/from.rs9
-rw-r--r--vendor/serde_json/src/value/ser.rs15
-rw-r--r--vendor/serde_json/tests/regression/issue953.rs9
-rw-r--r--vendor/serde_json/tests/test.rs3
-rw-r--r--vendor/serde_json/tests/ui/missing_colon.stderr5
-rw-r--r--vendor/serde_json/tests/ui/missing_comma.stderr6
-rw-r--r--vendor/serde_json/tests/ui/missing_value.stderr5
-rw-r--r--vendor/serde_json/tests/ui/parse_expr.stderr6
-rw-r--r--vendor/serde_json/tests/ui/unexpected_after_array_element.stderr2
-rw-r--r--vendor/serde_json/tests/ui/unexpected_after_map_entry.stderr2
-rw-r--r--vendor/serde_json/tests/ui/unexpected_colon.stderr2
-rw-r--r--vendor/serde_json/tests/ui/unexpected_comma.stderr2
-rw-r--r--vendor/serde_repr/.cargo-checksum.json2
-rw-r--r--vendor/serde_repr/Cargo.toml2
-rw-r--r--vendor/serde_repr/README.md2
-rw-r--r--vendor/snap/.cargo-checksum.json2
-rw-r--r--vendor/snap/Cargo.lock8
-rw-r--r--vendor/snap/Cargo.toml26
-rw-r--r--vendor/snap/src/error.rs7
-rw-r--r--vendor/snap/src/read.rs5
-rw-r--r--vendor/syn/.cargo-checksum.json2
-rw-r--r--vendor/syn/Cargo.toml5
-rw-r--r--vendor/syn/README.md2
-rw-r--r--vendor/syn/src/buffer.rs50
-rw-r--r--vendor/syn/src/drops.rs58
-rw-r--r--vendor/syn/src/error.rs54
-rw-r--r--vendor/syn/src/generics.rs25
-rw-r--r--vendor/syn/src/lib.rs6
-rw-r--r--vendor/syn/src/path.rs5
-rw-r--r--vendor/syn/src/punctuated.rs43
-rw-r--r--vendor/syn/src/ty.rs92
-rw-r--r--vendor/syn/src/verbatim.rs20
-rw-r--r--vendor/syn/tests/common/eq.rs95
-rw-r--r--vendor/syn/tests/regression.rs2
-rw-r--r--vendor/syn/tests/regression/issue1235.rs32
-rw-r--r--vendor/syn/tests/repo/mod.rs2
-rw-r--r--vendor/syn/tests/test_expr.rs70
-rw-r--r--vendor/syn/tests/test_iterators.rs19
-rw-r--r--vendor/syn/tests/test_round_trip.rs8
-rw-r--r--vendor/syn/tests/test_size.rs4
-rw-r--r--vendor/thin-vec/.cargo-checksum.json2
-rw-r--r--vendor/thin-vec/Cargo.toml2
-rw-r--r--vendor/thin-vec/src/lib.rs1458
-rw-r--r--vendor/time-macros/.cargo-checksum.json1
-rw-r--r--vendor/time-macros/Cargo.toml45
-rw-r--r--vendor/time-macros/src/date.rs137
-rw-r--r--vendor/time-macros/src/datetime.rs57
-rw-r--r--vendor/time-macros/src/error.rs136
-rw-r--r--vendor/time-macros/src/format_description/component.rs168
-rw-r--r--vendor/time-macros/src/format_description/error.rs29
-rw-r--r--vendor/time-macros/src/format_description/mod.rs40
-rw-r--r--vendor/time-macros/src/format_description/modifier.rs417
-rw-r--r--vendor/time-macros/src/format_description/parse.rs84
-rw-r--r--vendor/time-macros/src/helpers/mod.rs129
-rw-r--r--vendor/time-macros/src/helpers/string.rs188
-rw-r--r--vendor/time-macros/src/lib.rs167
-rw-r--r--vendor/time-macros/src/offset.rs95
-rw-r--r--vendor/time-macros/src/quote.rs134
-rw-r--r--vendor/time-macros/src/serde_format_description.rs163
-rw-r--r--vendor/time-macros/src/time.rs118
-rw-r--r--vendor/time-macros/src/to_tokens.rs68
-rw-r--r--vendor/tinystr/.cargo-checksum.json2
-rw-r--r--vendor/tinystr/Cargo.toml14
-rw-r--r--vendor/tinystr/README.md6
-rw-r--r--vendor/tinystr/src/ascii.rs17
-rw-r--r--vendor/tinystr/src/error.rs3
-rw-r--r--vendor/tinystr/src/lib.rs8
-rw-r--r--vendor/toml-0.5.9/.cargo-checksum.json (renamed from vendor/toml/.cargo-checksum.json)0
-rw-r--r--vendor/toml-0.5.9/Cargo.lock (renamed from vendor/toml/Cargo.lock)0
-rw-r--r--vendor/toml-0.5.9/Cargo.toml (renamed from vendor/toml/Cargo.toml)0
-rw-r--r--vendor/toml-0.5.9/LICENSE-APACHE201
-rw-r--r--vendor/toml-0.5.9/LICENSE-MIT (renamed from vendor/toml/LICENSE-MIT)0
-rw-r--r--vendor/toml-0.5.9/README.md (renamed from vendor/toml/README.md)0
-rw-r--r--vendor/toml-0.5.9/examples/decode.rs (renamed from vendor/toml/examples/decode.rs)0
-rw-r--r--vendor/toml-0.5.9/examples/enum_external.rs (renamed from vendor/toml/examples/enum_external.rs)0
-rw-r--r--vendor/toml-0.5.9/examples/toml2json.rs (renamed from vendor/toml/examples/toml2json.rs)0
-rw-r--r--vendor/toml-0.5.9/src/datetime.rs (renamed from vendor/toml/src/datetime.rs)0
-rw-r--r--vendor/toml-0.5.9/src/de.rs (renamed from vendor/toml/src/de.rs)0
-rw-r--r--vendor/toml-0.5.9/src/lib.rs (renamed from vendor/toml/src/lib.rs)0
-rw-r--r--vendor/toml-0.5.9/src/macros.rs (renamed from vendor/toml/src/macros.rs)0
-rw-r--r--vendor/toml-0.5.9/src/map.rs (renamed from vendor/toml/src/map.rs)0
-rw-r--r--vendor/toml-0.5.9/src/ser.rs (renamed from vendor/toml/src/ser.rs)0
-rw-r--r--vendor/toml-0.5.9/src/spanned.rs (renamed from vendor/toml/src/spanned.rs)0
-rw-r--r--vendor/toml-0.5.9/src/tokens.rs (renamed from vendor/toml/src/tokens.rs)0
-rw-r--r--vendor/toml-0.5.9/src/value.rs (renamed from vendor/toml/src/value.rs)0
-rw-r--r--vendor/toml-0.5.9/tests/enum_external_deserialize.rs (renamed from vendor/toml/tests/enum_external_deserialize.rs)0
-rw-r--r--vendor/tracing-subscriber-0.3.3/.cargo-checksum.json1
-rw-r--r--vendor/tracing-subscriber-0.3.3/src/fmt/format/pretty.rs415
-rw-r--r--vendor/tracing-subscriber-0.3.3/src/fmt/time/time_crate.rs276
-rw-r--r--vendor/tracing-subscriber-0.3.3/src/reload.rs237
-rw-r--r--vendor/tracing-subscriber-0.3.3/tests/filter.rs187
-rw-r--r--vendor/tracing-subscriber-0.3.3/tests/reload.rs81
-rw-r--r--vendor/tracing-subscriber/.cargo-checksum.json1
-rw-r--r--vendor/tracing-subscriber/CHANGELOG.md (renamed from vendor/tracing-subscriber-0.3.3/CHANGELOG.md)392
-rw-r--r--vendor/tracing-subscriber/Cargo.toml (renamed from vendor/tracing-subscriber-0.3.3/Cargo.toml)168
-rw-r--r--vendor/tracing-subscriber/LICENSE (renamed from vendor/tracing-subscriber-0.3.3/LICENSE)0
-rw-r--r--vendor/tracing-subscriber/README.md (renamed from vendor/tracing-subscriber-0.3.3/README.md)6
-rw-r--r--vendor/tracing-subscriber/benches/enter.rs (renamed from vendor/tracing-subscriber-0.3.3/benches/enter.rs)0
-rw-r--r--vendor/tracing-subscriber/benches/filter.rs (renamed from vendor/tracing-subscriber-0.3.3/benches/filter.rs)0
-rw-r--r--vendor/tracing-subscriber/benches/filter_log.rs (renamed from vendor/tracing-subscriber-0.3.3/benches/filter_log.rs)0
-rw-r--r--vendor/tracing-subscriber/benches/fmt.rs (renamed from vendor/tracing-subscriber-0.3.3/benches/fmt.rs)0
-rw-r--r--vendor/tracing-subscriber/benches/support/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/benches/support/mod.rs)0
-rw-r--r--vendor/tracing-subscriber/src/field/debug.rs (renamed from vendor/tracing-subscriber-0.3.3/src/field/debug.rs)0
-rw-r--r--vendor/tracing-subscriber/src/field/delimited.rs (renamed from vendor/tracing-subscriber-0.3.3/src/field/delimited.rs)4
-rw-r--r--vendor/tracing-subscriber/src/field/display.rs (renamed from vendor/tracing-subscriber-0.3.3/src/field/display.rs)2
-rw-r--r--vendor/tracing-subscriber/src/field/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/field/mod.rs)14
-rw-r--r--vendor/tracing-subscriber/src/filter/directive.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/directive.rs)34
-rw-r--r--vendor/tracing-subscriber/src/filter/env/builder.rs325
-rw-r--r--vendor/tracing-subscriber/src/filter/env/directive.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/env/directive.rs)160
-rw-r--r--vendor/tracing-subscriber/src/filter/env/field.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/env/field.rs)250
-rw-r--r--vendor/tracing-subscriber/src/filter/env/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/env/mod.rs)677
-rw-r--r--vendor/tracing-subscriber/src/filter/filter_fn.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/filter_fn.rs)0
-rw-r--r--vendor/tracing-subscriber/src/filter/layer_filters/combinator.rs542
-rw-r--r--vendor/tracing-subscriber/src/filter/layer_filters/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/layer_filters.rs)454
-rw-r--r--vendor/tracing-subscriber/src/filter/level.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/level.rs)0
-rw-r--r--vendor/tracing-subscriber/src/filter/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/mod.rs)0
-rw-r--r--vendor/tracing-subscriber/src/filter/targets.rs (renamed from vendor/tracing-subscriber-0.3.3/src/filter/targets.rs)104
-rw-r--r--vendor/tracing-subscriber/src/fmt/fmt_layer.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/fmt_layer.rs)321
-rw-r--r--vendor/tracing-subscriber/src/fmt/format/json.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/format/json.rs)175
-rw-r--r--vendor/tracing-subscriber/src/fmt/format/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/format/mod.rs)459
-rw-r--r--vendor/tracing-subscriber/src/fmt/format/pretty.rs511
-rw-r--r--vendor/tracing-subscriber/src/fmt/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/mod.rs)392
-rw-r--r--vendor/tracing-subscriber/src/fmt/time/datetime.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/time/datetime.rs)0
-rw-r--r--vendor/tracing-subscriber/src/fmt/time/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/time/mod.rs)8
-rw-r--r--vendor/tracing-subscriber/src/fmt/time/time_crate.rs470
-rw-r--r--vendor/tracing-subscriber/src/fmt/writer.rs (renamed from vendor/tracing-subscriber-0.3.3/src/fmt/writer.rs)30
-rw-r--r--vendor/tracing-subscriber/src/layer/context.rs (renamed from vendor/tracing-subscriber-0.3.3/src/layer/context.rs)22
-rw-r--r--vendor/tracing-subscriber/src/layer/layered.rs (renamed from vendor/tracing-subscriber-0.3.3/src/layer/layered.rs)107
-rw-r--r--vendor/tracing-subscriber/src/layer/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/layer/mod.rs)820
-rw-r--r--vendor/tracing-subscriber/src/layer/tests.rs (renamed from vendor/tracing-subscriber-0.3.3/src/layer/tests.rs)0
-rw-r--r--vendor/tracing-subscriber/src/lib.rs (renamed from vendor/tracing-subscriber-0.3.3/src/lib.rs)54
-rw-r--r--vendor/tracing-subscriber/src/macros.rs (renamed from vendor/tracing-subscriber-0.3.3/src/macros.rs)0
-rw-r--r--vendor/tracing-subscriber/src/prelude.rs (renamed from vendor/tracing-subscriber-0.3.3/src/prelude.rs)0
-rw-r--r--vendor/tracing-subscriber/src/registry/extensions.rs (renamed from vendor/tracing-subscriber-0.3.3/src/registry/extensions.rs)2
-rw-r--r--vendor/tracing-subscriber/src/registry/mod.rs (renamed from vendor/tracing-subscriber-0.3.3/src/registry/mod.rs)20
-rw-r--r--vendor/tracing-subscriber/src/registry/sharded.rs (renamed from vendor/tracing-subscriber-0.3.3/src/registry/sharded.rs)34
-rw-r--r--vendor/tracing-subscriber/src/registry/stack.rs (renamed from vendor/tracing-subscriber-0.3.3/src/registry/stack.rs)0
-rw-r--r--vendor/tracing-subscriber/src/reload.rs384
-rw-r--r--vendor/tracing-subscriber/src/sync.rs (renamed from vendor/tracing-subscriber-0.3.3/src/sync.rs)0
-rw-r--r--vendor/tracing-subscriber/src/util.rs (renamed from vendor/tracing-subscriber-0.3.3/src/util.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/cached_layer_filters_dont_break_other_layers.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/cached_layer_filters_dont_break_other_layers.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/duplicate_spans.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/duplicate_spans.rs)1
-rw-r--r--vendor/tracing-subscriber/tests/env_filter/main.rs547
-rw-r--r--vendor/tracing-subscriber/tests/env_filter/per_layer.rs305
-rw-r--r--vendor/tracing-subscriber/tests/event_enabling.rs81
-rw-r--r--vendor/tracing-subscriber/tests/field_filter.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/field_filter.rs)4
-rw-r--r--vendor/tracing-subscriber/tests/filter_log.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/filter_log.rs)4
-rw-r--r--vendor/tracing-subscriber/tests/fmt_max_level_hint.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/fmt_max_level_hint.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/hinted_layer_filters_dont_break_other_layers.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/hinted_layer_filters_dont_break_other_layers.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filter_interests_are_cached.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filter_interests_are_cached.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/boxed.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/boxed.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/combinators.rs42
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/downcast_raw.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/downcast_raw.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/filter_scopes.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/filter_scopes.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/main.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/main.rs)4
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/per_event.rs61
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/targets.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/targets.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/trees.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/layer_filters/trees.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/layer_filters/vec.rs120
-rw-r--r--vendor/tracing-subscriber/tests/multiple_layer_filter_interests_cached.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/multiple_layer_filter_interests_cached.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/option.rs262
-rw-r--r--vendor/tracing-subscriber/tests/registry_max_level_hint.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/registry_max_level_hint.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/registry_with_subscriber.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/registry_with_subscriber.rs)2
-rw-r--r--vendor/tracing-subscriber/tests/reload.rs155
-rw-r--r--vendor/tracing-subscriber/tests/same_len_filters.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/same_len_filters.rs)4
-rw-r--r--vendor/tracing-subscriber/tests/support.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/support.rs)19
-rw-r--r--vendor/tracing-subscriber/tests/unhinted_layer_filters_dont_break_other_layers.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/unhinted_layer_filters_dont_break_other_layers.rs)0
-rw-r--r--vendor/tracing-subscriber/tests/utils.rs (renamed from vendor/tracing-subscriber-0.3.3/tests/utils.rs)4
-rw-r--r--vendor/tracing-subscriber/tests/vec.rs19
-rw-r--r--vendor/tracing-subscriber/tests/vec_subscriber_filter_interests_cached.rs117
-rw-r--r--vendor/tracing-tree/.cargo-checksum.json2
-rw-r--r--vendor/tracing-tree/Cargo.lock29
-rw-r--r--vendor/tracing-tree/Cargo.toml8
-rw-r--r--vendor/tracing-tree/src/format.rs4
-rw-r--r--vendor/tracing-tree/src/lib.rs2
-rw-r--r--vendor/unicode-bidi/.cargo-checksum.json2
-rw-r--r--vendor/unicode-bidi/Cargo.toml7
-rw-r--r--vendor/unicode-bidi/src/char_data/mod.rs18
-rw-r--r--vendor/unicode-bidi/src/char_data/tables.rs311
-rw-r--r--vendor/unicode-bidi/src/data_source.rs30
-rw-r--r--vendor/unicode-bidi/src/explicit.rs44
-rw-r--r--vendor/unicode-bidi/src/implicit.rs521
-rw-r--r--vendor/unicode-bidi/src/lib.rs93
-rw-r--r--vendor/unicode-bidi/src/prepare.rs102
-rw-r--r--vendor/unicode-ident/.cargo-checksum.json2
-rw-r--r--vendor/unicode-ident/Cargo.toml2
-rw-r--r--vendor/unicode-ident/README.md2
-rw-r--r--vendor/unicode-ident/src/tables.rs2
-rw-r--r--vendor/unicode-ident/tests/static_size.rs9
-rw-r--r--vendor/unicode-ident/tests/tables/mod.rs7
-rw-r--r--vendor/unicode-ident/tests/tables/tables.rs347
-rw-r--r--vendor/writeable/.cargo-checksum.json2
-rw-r--r--vendor/writeable/Cargo.lock110
-rw-r--r--vendor/writeable/Cargo.toml12
-rw-r--r--vendor/writeable/src/impls.rs104
-rw-r--r--vendor/writeable/src/lib.rs60
-rw-r--r--vendor/writeable/src/ops.rs5
-rw-r--r--vendor/xflags-macros/.cargo-checksum.json2
-rw-r--r--vendor/xflags-macros/Cargo.toml2
-rw-r--r--vendor/xflags-macros/tests/it/main.rs70
-rw-r--r--vendor/xflags/.cargo-checksum.json2
-rw-r--r--vendor/xflags/Cargo.lock6
-rw-r--r--vendor/xflags/Cargo.toml4
-rw-r--r--vendor/xflags/src/rt.rs58
-rw-r--r--vendor/xshell-macros/.cargo-checksum.json2
-rw-r--r--vendor/xshell-macros/Cargo.toml2
-rw-r--r--vendor/xshell/.cargo-checksum.json2
-rw-r--r--vendor/xshell/CHANGELOG.md4
-rw-r--r--vendor/xshell/Cargo.lock10
-rw-r--r--vendor/xshell/Cargo.toml4
-rw-r--r--vendor/xshell/src/lib.rs5
-rw-r--r--vendor/yoke-derive/.cargo-checksum.json2
-rw-r--r--vendor/yoke-derive/Cargo.lock87
-rw-r--r--vendor/yoke-derive/Cargo.toml13
-rw-r--r--vendor/yoke-derive/src/lib.rs55
-rw-r--r--vendor/yoke/.cargo-checksum.json2
-rw-r--r--vendor/yoke/Cargo.toml13
-rw-r--r--vendor/yoke/src/erased.rs8
-rw-r--r--vendor/yoke/src/is_covariant.rs142
-rw-r--r--vendor/yoke/src/lib.rs2
-rw-r--r--vendor/yoke/src/macro_impls.rs3
-rw-r--r--vendor/yoke/src/yoke.rs228
-rw-r--r--vendor/yoke/src/zero_from.rs5
-rw-r--r--vendor/zerovec-derive/.cargo-checksum.json2
-rw-r--r--vendor/zerovec-derive/Cargo.lock45
-rw-r--r--vendor/zerovec-derive/Cargo.toml8
-rw-r--r--vendor/zerovec-derive/src/make_varule.rs16
-rw-r--r--vendor/zerovec/.cargo-checksum.json2
-rw-r--r--vendor/zerovec/Cargo.lock327
-rw-r--r--vendor/zerovec/Cargo.toml29
-rw-r--r--vendor/zerovec/README.md2
-rw-r--r--vendor/zerovec/benches/vzv.rs8
-rw-r--r--vendor/zerovec/benches/zeromap.rs20
-rw-r--r--vendor/zerovec/src/flexzerovec/serde.rs8
-rw-r--r--vendor/zerovec/src/flexzerovec/slice.rs8
-rw-r--r--vendor/zerovec/src/lib.rs9
-rw-r--r--vendor/zerovec/src/map/borrowed.rs14
-rw-r--r--vendor/zerovec/src/map/map.rs4
-rw-r--r--vendor/zerovec/src/map/serde.rs8
-rw-r--r--vendor/zerovec/src/map2d/borrowed.rs9
-rw-r--r--vendor/zerovec/src/map2d/cursor.rs70
-rw-r--r--vendor/zerovec/src/map2d/map.rs12
-rw-r--r--vendor/zerovec/src/map2d/serde.rs8
-rw-r--r--vendor/zerovec/src/ule/mod.rs6
-rw-r--r--vendor/zerovec/src/ule/option.rs2
-rw-r--r--vendor/zerovec/src/ule/plain.rs19
-rw-r--r--vendor/zerovec/src/ule/unvalidated.rs10
-rw-r--r--vendor/zerovec/src/varzerovec/serde.rs8
-rw-r--r--vendor/zerovec/src/varzerovec/slice.rs28
-rw-r--r--vendor/zerovec/src/varzerovec/vec.rs16
-rw-r--r--vendor/zerovec/src/yoke_impls.rs14
-rw-r--r--vendor/zerovec/src/zerovec/mod.rs7
-rw-r--r--vendor/zerovec/src/zerovec/serde.rs10
-rw-r--r--vendor/zerovec/src/zerovec/slice.rs30
-rw-r--r--vendor/zip/.cargo-checksum.json2
-rw-r--r--vendor/zip/CHANGELOG.md9
-rw-r--r--vendor/zip/Cargo.lock134
-rw-r--r--vendor/zip/Cargo.toml18
-rw-r--r--vendor/zip/README.md4
-rw-r--r--vendor/zip/benches/read_metadata.rs5
-rw-r--r--vendor/zip/examples/extract.rs6
-rw-r--r--vendor/zip/examples/extract_lorem.rs4
-rw-r--r--vendor/zip/examples/file_info.rs4
-rw-r--r--vendor/zip/examples/stdin_info.rs4
-rw-r--r--vendor/zip/examples/write_dir.rs12
-rw-r--r--vendor/zip/examples/write_sample.rs6
-rw-r--r--vendor/zip/src/compression.rs6
-rw-r--r--vendor/zip/src/read.rs20
-rw-r--r--vendor/zip/src/result.rs21
-rw-r--r--vendor/zip/src/spec.rs16
-rw-r--r--vendor/zip/src/types.rs91
-rw-r--r--vendor/zip/src/write.rs56
-rw-r--r--vendor/zip/tests/end_to_end.rs2
-rw-r--r--vendor/zip/tests/zip64_large.rs2
872 files changed, 101010 insertions, 22175 deletions
diff --git a/vendor/addr2line-0.17.0/.cargo-checksum.json b/vendor/addr2line-0.17.0/.cargo-checksum.json
new file mode 100644
index 000000000..b43ad3bbf
--- /dev/null
+++ b/vendor/addr2line-0.17.0/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"d4ef249a0a4eff26a34a1f847a3c367dfd9988b4da972ac9c16b1d258b62ad87","Cargo.lock":"290a48d58d1ebfef0f5eaec66191f6c1a41080b89e10e931c6984052008479ab","Cargo.toml":"68243a813e2e6ba40d3e939b9ade5489b3f39a58d7dc391ae447a60591315f4a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"e99d88d232bf57d70f0fb87f6b496d44b6653f99f8a63d250a54c61ea4bcde40","README.md":"76d28502bd2e83f6a9e3576bd45e9a7fe5308448c4b5384b0d249515b5f67a5c","bench.plot.r":"6a5d7a4d36ed6b3d9919be703a479bef47698bf947818b483ff03951df2d4e01","benchmark.sh":"b35f89b1ca2c1dc0476cdd07f0284b72d41920d1c7b6054072f50ffba296d78d","coverage.sh":"4677e81922d08a82e83068a911717a247c66af12e559f37b78b6be3337ac9f07","examples/addr2line.rs":"75ef29e1d07d49d247990ad970892d64f629766bafa36afddff5a88976e58060","rustfmt.toml":"01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b","src/function.rs":"395f37cdf03201d416d66bc11abeea627be0abb4585104acd927224a26cb9369","src/lazy.rs":"14ec61761369c21d426673f549c21394221533f444b68cd2a8370952eb19f345","src/lib.rs":"5696c0aee67df576f78935c66bb124f4e5fa19cbc9b25faf8f750e7e8dda113c","tests/correctness.rs":"c9325ffdec577bf5e56f5dd72fdff4927153d0a4c34c0fda5aefaeb44a8d26fd","tests/output_equivalence.rs":"38d7b585b7a2ca43b07eef6b34c11f489d1deae138a010123c33188dfb881c11","tests/parse.rs":"9e421ea9d9348721f6c6533cdba1db5b84287fc685f870c7905dea06b596b4db"},"package":"b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"} \ No newline at end of file
diff --git a/vendor/addr2line-0.17.0/CHANGELOG.md b/vendor/addr2line-0.17.0/CHANGELOG.md
new file mode 100644
index 000000000..914139400
--- /dev/null
+++ b/vendor/addr2line-0.17.0/CHANGELOG.md
@@ -0,0 +1,260 @@
+## 0.17.0 (2021/10/24)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+### Changed
+
+* Use `skip_attributes` to improve performance.
+ [#236](https://github.com/gimli-rs/addr2line/pull/236)
+
+--------------------------------------------------------------------------------
+
+## 0.16.0 (2021/07/26)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+--------------------------------------------------------------------------------
+
+## 0.15.2 (2021/06/04)
+
+### Fixed
+
+* Allow `Context` to be `Send`.
+ [#219](https://github.com/gimli-rs/addr2line/pull/219)
+
+--------------------------------------------------------------------------------
+
+## 0.15.1 (2021/05/02)
+
+### Fixed
+
+* Don't ignore aranges with address 0.
+ [#217](https://github.com/gimli-rs/addr2line/pull/217)
+
+--------------------------------------------------------------------------------
+
+## 0.15.0 (2021/05/02)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+ [#215](https://github.com/gimli-rs/addr2line/pull/215)
+
+* Added `debug_aranges` parameter to `Context::from_sections`.
+ [#200](https://github.com/gimli-rs/addr2line/pull/200)
+
+### Added
+
+* Added `.debug_aranges` support.
+ [#200](https://github.com/gimli-rs/addr2line/pull/200)
+
+* Added supplementary object file support.
+ [#208](https://github.com/gimli-rs/addr2line/pull/208)
+
+### Fixed
+
+* Fixed handling of Windows paths in locations.
+ [#209](https://github.com/gimli-rs/addr2line/pull/209)
+
+* examples/addr2line: Flush stdout after each response.
+ [#210](https://github.com/gimli-rs/addr2line/pull/210)
+
+* examples/addr2line: Avoid copying every section.
+ [#213](https://github.com/gimli-rs/addr2line/pull/213)
+
+--------------------------------------------------------------------------------
+
+## 0.14.1 (2020/12/31)
+
+### Fixed
+
+* Fix location lookup for skeleton units.
+ [#201](https://github.com/gimli-rs/addr2line/pull/201)
+
+### Added
+
+* Added `Context::find_location_range`.
+ [#196](https://github.com/gimli-rs/addr2line/pull/196)
+ [#199](https://github.com/gimli-rs/addr2line/pull/199)
+
+--------------------------------------------------------------------------------
+
+## 0.14.0 (2020/10/27)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+### Fixed
+
+* Handle units that only have line information.
+ [#188](https://github.com/gimli-rs/addr2line/pull/188)
+
+* Handle DWARF units with version <= 4 and no `DW_AT_name`.
+ [#191](https://github.com/gimli-rs/addr2line/pull/191)
+
+* Fix handling of `DW_FORM_ref_addr`.
+ [#193](https://github.com/gimli-rs/addr2line/pull/193)
+
+--------------------------------------------------------------------------------
+
+## 0.13.0 (2020/07/07)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+* Added `rustc-dep-of-std` feature.
+ [#166](https://github.com/gimli-rs/addr2line/pull/166)
+
+### Changed
+
+* Improve performance by parsing function contents lazily.
+ [#178](https://github.com/gimli-rs/addr2line/pull/178)
+
+* Don't skip `.debug_info` and `.debug_line` entries with a zero address.
+ [#182](https://github.com/gimli-rs/addr2line/pull/182)
+
+--------------------------------------------------------------------------------
+
+## 0.12.2 (2020/06/21)
+
+### Fixed
+
+* Avoid linear search for `DW_FORM_ref_addr`.
+ [#175](https://github.com/gimli-rs/addr2line/pull/175)
+
+--------------------------------------------------------------------------------
+
+## 0.12.1 (2020/05/19)
+
+### Fixed
+
+* Handle units with overlapping address ranges.
+ [#163](https://github.com/gimli-rs/addr2line/pull/163)
+
+* Don't assert for functions with overlapping address ranges.
+ [#168](https://github.com/gimli-rs/addr2line/pull/168)
+
+--------------------------------------------------------------------------------
+
+## 0.12.0 (2020/05/12)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+* Added more optional features: `smallvec` and `fallible-iterator`.
+ [#160](https://github.com/gimli-rs/addr2line/pull/160)
+
+### Added
+
+* Added `Context::dwarf` and `Context::find_dwarf_unit`.
+ [#159](https://github.com/gimli-rs/addr2line/pull/159)
+
+### Changed
+
+* Removed `lazycell` dependency.
+ [#160](https://github.com/gimli-rs/addr2line/pull/160)
+
+--------------------------------------------------------------------------------
+
+## 0.11.0 (2020/01/11)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+* [#130](https://github.com/gimli-rs/addr2line/pull/130)
+ Changed `Location::file` from `Option<String>` to `Option<&str>`.
+ This required adding lifetime parameters to `Location` and other structs that
+ contain it.
+
+* [#152](https://github.com/gimli-rs/addr2line/pull/152)
+ Changed `Location::line` and `Location::column` from `Option<u64>`to `Option<u32>`.
+
+* [#156](https://github.com/gimli-rs/addr2line/pull/156)
+ Deleted `alloc` feature, and fixed `no-std` builds with stable rust.
+ Removed default `Reader` parameter for `Context`, and added `ObjectContext` instead.
+
+### Added
+
+* [#134](https://github.com/gimli-rs/addr2line/pull/134)
+ Added `Context::from_dwarf`.
+
+### Changed
+
+* [#133](https://github.com/gimli-rs/addr2line/pull/133)
+ Fixed handling of units that can't be parsed.
+
+* [#155](https://github.com/gimli-rs/addr2line/pull/155)
+ Fixed `addr2line` output to match binutils.
+
+* [#130](https://github.com/gimli-rs/addr2line/pull/130)
+ Improved `.debug_line` parsing performance.
+
+* [#148](https://github.com/gimli-rs/addr2line/pull/148)
+ [#150](https://github.com/gimli-rs/addr2line/pull/150)
+ [#151](https://github.com/gimli-rs/addr2line/pull/151)
+ [#152](https://github.com/gimli-rs/addr2line/pull/152)
+ Improved `.debug_info` parsing performance.
+
+* [#137](https://github.com/gimli-rs/addr2line/pull/137)
+ [#138](https://github.com/gimli-rs/addr2line/pull/138)
+ [#139](https://github.com/gimli-rs/addr2line/pull/139)
+ [#140](https://github.com/gimli-rs/addr2line/pull/140)
+ [#146](https://github.com/gimli-rs/addr2line/pull/146)
+ Improved benchmarks.
+
+--------------------------------------------------------------------------------
+
+## 0.10.0 (2019/07/07)
+
+### Breaking changes
+
+* [#127](https://github.com/gimli-rs/addr2line/pull/127)
+ Update `gimli`.
+
+--------------------------------------------------------------------------------
+
+## 0.9.0 (2019/05/02)
+
+### Breaking changes
+
+* [#121](https://github.com/gimli-rs/addr2line/pull/121)
+ Update `gimli`, `object`, and `fallible-iterator` dependencies.
+
+### Added
+
+* [#121](https://github.com/gimli-rs/addr2line/pull/121)
+ Reexport `gimli`, `object`, and `fallible-iterator`.
+
+--------------------------------------------------------------------------------
+
+## 0.8.0 (2019/02/06)
+
+### Breaking changes
+
+* [#107](https://github.com/gimli-rs/addr2line/pull/107)
+ Update `object` dependency to 0.11. This is part of the public API.
+
+### Added
+
+* [#101](https://github.com/gimli-rs/addr2line/pull/101)
+ Add `object` feature (enabled by default). Disable this feature to remove
+ the `object` dependency and `Context::new` API.
+
+* [#102](https://github.com/gimli-rs/addr2line/pull/102)
+ Add `std` (enabled by default) and `alloc` features.
+
+### Changed
+
+* [#108](https://github.com/gimli-rs/addr2line/issues/108)
+ `demangle` no longer ouputs the hash for rust symbols.
+
+* [#109](https://github.com/gimli-rs/addr2line/issues/109)
+ Set default `R` for `Context<R>`.
diff --git a/vendor/addr2line-0.17.0/Cargo.lock b/vendor/addr2line-0.17.0/Cargo.lock
new file mode 100644
index 000000000..630d72438
--- /dev/null
+++ b/vendor/addr2line-0.17.0/Cargo.lock
@@ -0,0 +1,430 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e61f2b7f93d2c7d2b08263acaa4a363b3e276806c68af6134c44f523bf1aacd"
+dependencies = [
+ "gimli 0.25.0",
+]
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+dependencies = [
+ "backtrace",
+ "clap",
+ "compiler_builtins",
+ "cpp_demangle",
+ "fallible-iterator",
+ "findshlibs",
+ "gimli 0.26.0",
+ "memmap",
+ "object",
+ "rustc-demangle",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+ "rustc-test",
+ "smallvec",
+ "typed-arena",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "ansi_term"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+
+[[package]]
+name = "backtrace"
+version = "0.3.62"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "091bcdf2da9950f96aa522681ce805e6857f6ca8df73833d35736ab2dc78e152"
+dependencies = [
+ "addr2line 0.16.0",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "cc"
+version = "1.0.71"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "2.33.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "bitflags",
+ "strsim",
+ "textwrap",
+ "unicode-width",
+ "vec_map",
+]
+
+[[package]]
+name = "compiler_builtins"
+version = "0.1.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3587b3669d6f2c1cfd34c475272dabcfef29d52703933f6f72ebb36d6bd81a97"
+
+[[package]]
+name = "cpp_demangle"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea47428dc9d2237f3c6bc134472edfd63ebba0af932e783506dcfd66f10d18a"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "fallible-iterator"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
+
+[[package]]
+name = "findshlibs"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d691fdb3f817632d259d09220d4cf0991dbb2c9e59e044a02a59194bf6e14484"
+dependencies = [
+ "cc",
+ "lazy_static",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "flate2"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+dependencies = [
+ "cfg-if",
+ "crc32fast",
+ "libc",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "getopts"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "gimli"
+version = "0.25.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7"
+
+[[package]]
+name = "gimli"
+version = "0.26.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81a03ce013ffccead76c11a15751231f777d9295b845cc1266ed4d34fcbd7977"
+dependencies = [
+ "compiler_builtins",
+ "fallible-iterator",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+ "stable_deref_trait",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "memmap"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
+dependencies = [
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
+dependencies = [
+ "adler",
+ "autocfg",
+]
+
+[[package]]
+name = "object"
+version = "0.27.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9"
+dependencies = [
+ "flate2",
+ "memchr",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "rustc-serialize"
+version = "0.3.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
+
+[[package]]
+name = "rustc-std-workspace-alloc"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff66d57013a5686e1917ed6a025d54dd591fcda71a41fe07edf4d16726aefa86"
+
+[[package]]
+name = "rustc-std-workspace-core"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c"
+
+[[package]]
+name = "rustc-test"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aacc7967b0ae83af208c8caf2781cbf96f01dac0157cd89f7f05324d6d4e59bb"
+dependencies = [
+ "getopts",
+ "libc",
+ "rustc-serialize",
+ "rustc_version",
+ "term",
+ "time",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "semver"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
+dependencies = [
+ "semver-parser",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+
+[[package]]
+name = "smallvec"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+
+[[package]]
+name = "term"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
+dependencies = [
+ "kernel32-sys",
+ "winapi 0.2.8",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "time"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
+dependencies = [
+ "libc",
+ "wasi",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "typed-arena"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "vec_map"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+
+[[package]]
+name = "wasi"
+version = "0.10.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/vendor/addr2line-0.17.0/Cargo.toml b/vendor/addr2line-0.17.0/Cargo.toml
new file mode 100644
index 000000000..358995e53
--- /dev/null
+++ b/vendor/addr2line-0.17.0/Cargo.toml
@@ -0,0 +1,120 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "addr2line"
+version = "0.17.0"
+exclude = ["/benches/*", "/fixtures/*", ".github"]
+description = "A cross-platform symbolication library written in Rust, using `gimli`"
+documentation = "https://docs.rs/addr2line"
+readme = "./README.md"
+keywords = ["DWARF", "debug", "elf", "symbolicate", "atos"]
+categories = ["development-tools::debugging"]
+license = "Apache-2.0 OR MIT"
+repository = "https://github.com/gimli-rs/addr2line"
+[profile.bench]
+codegen-units = 1
+debug = true
+split-debuginfo = "packed"
+
+[profile.dev]
+split-debuginfo = "packed"
+
+[profile.release]
+debug = true
+split-debuginfo = "packed"
+
+[profile.test]
+split-debuginfo = "packed"
+
+[[example]]
+name = "addr2line"
+required-features = ["std-object"]
+
+[[test]]
+name = "output_equivalence"
+harness = false
+required-features = ["std-object"]
+
+[[test]]
+name = "correctness"
+required-features = ["default"]
+
+[[test]]
+name = "parse"
+required-features = ["std-object"]
+[dependencies.alloc]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-alloc"
+
+[dependencies.compiler_builtins]
+version = "0.1.2"
+optional = true
+
+[dependencies.core]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-core"
+
+[dependencies.cpp_demangle]
+version = "0.3"
+optional = true
+default-features = false
+
+[dependencies.fallible-iterator]
+version = "0.2"
+optional = true
+default-features = false
+
+[dependencies.gimli]
+version = "0.26"
+features = ["read"]
+default-features = false
+
+[dependencies.object]
+version = "0.27.1"
+features = ["read"]
+optional = true
+default-features = false
+
+[dependencies.rustc-demangle]
+version = "0.1"
+optional = true
+
+[dependencies.smallvec]
+version = "1"
+optional = true
+default-features = false
+[dev-dependencies.backtrace]
+version = "0.3.13"
+
+[dev-dependencies.clap]
+version = "2"
+
+[dev-dependencies.findshlibs]
+version = "0.10"
+
+[dev-dependencies.memmap]
+version = "0.7"
+
+[dev-dependencies.rustc-test]
+version = "0.3"
+
+[dev-dependencies.typed-arena]
+version = "2"
+
+[features]
+default = ["rustc-demangle", "cpp_demangle", "std-object", "fallible-iterator", "smallvec"]
+rustc-dep-of-std = ["core", "alloc", "compiler_builtins", "gimli/rustc-dep-of-std"]
+std = ["gimli/std"]
+std-object = ["std", "object", "object/std", "object/compression", "gimli/endian-reader"]
diff --git a/vendor/parking_lot_core-0.8.5/LICENSE-APACHE b/vendor/addr2line-0.17.0/LICENSE-APACHE
index 16fe87b06..16fe87b06 100644
--- a/vendor/parking_lot_core-0.8.5/LICENSE-APACHE
+++ b/vendor/addr2line-0.17.0/LICENSE-APACHE
diff --git a/vendor/addr2line-0.17.0/LICENSE-MIT b/vendor/addr2line-0.17.0/LICENSE-MIT
new file mode 100644
index 000000000..3a03f1f85
--- /dev/null
+++ b/vendor/addr2line-0.17.0/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2016-2018 The gimli Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/addr2line-0.17.0/README.md b/vendor/addr2line-0.17.0/README.md
new file mode 100644
index 000000000..dc6cb9344
--- /dev/null
+++ b/vendor/addr2line-0.17.0/README.md
@@ -0,0 +1,48 @@
+# addr2line
+
+[![](https://img.shields.io/crates/v/addr2line.svg)](https://crates.io/crates/addr2line)
+[![](https://img.shields.io/docsrs/addr2line.svg)](https://docs.rs/addr2line)
+[![Coverage Status](https://coveralls.io/repos/github/gimli-rs/addr2line/badge.svg?branch=master)](https://coveralls.io/github/gimli-rs/addr2line?branch=master)
+
+A cross-platform library for retrieving per-address debug information
+from files with DWARF debug information.
+
+`addr2line` uses [`gimli`](https://github.com/gimli-rs/gimli) to parse
+the debug information, and exposes an interface for finding
+the source file, line number, and wrapping function for instruction
+addresses within the target program. These lookups can either be
+performed programmatically through `Context::find_location` and
+`Context::find_frames`, or via the included example binary,
+`addr2line` (named and modelled after the equivalent utility from
+[GNU binutils](https://sourceware.org/binutils/docs/binutils/addr2line.html)).
+
+# Quickstart
+ - Add the [`addr2line` crate](https://crates.io/crates/addr2line) to your `Cargo.toml`
+ - Load the file and parse it with [`addr2line::object::read::File::parse`](https://docs.rs/object/*/object/read/struct.File.html#method.parse)
+ - Pass the parsed file to [`addr2line::Context::new` ](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.new)
+ - Use [`addr2line::Context::find_location`](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.find_location)
+ or [`addr2line::Context::find_frames`](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.find_frames)
+ to look up debug information for an address
+
+# Performance
+
+`addr2line` optimizes for speed over memory by caching parsed information.
+The DWARF information is parsed lazily where possible.
+
+The library aims to perform similarly to equivalent existing tools such
+as `addr2line` from binutils, `eu-addr2line` from elfutils, and
+`llvm-symbolize` from the llvm project, and in the past some benchmarking
+was done that indicates a comparable performance.
+
+## License
+
+Licensed under either of
+
+ * Apache License, Version 2.0 ([`LICENSE-APACHE`](./LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license ([`LICENSE-MIT`](./LICENSE-MIT) or https://opensource.org/licenses/MIT)
+
+at your option.
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
+dual licensed as above, without any additional terms or conditions.
diff --git a/vendor/addr2line-0.17.0/bench.plot.r b/vendor/addr2line-0.17.0/bench.plot.r
new file mode 100644
index 000000000..ecbf24893
--- /dev/null
+++ b/vendor/addr2line-0.17.0/bench.plot.r
@@ -0,0 +1,23 @@
+v <- read.table(file("stdin"))
+t <- data.frame(prog=v[,1], funcs=(v[,2]=="func"), time=v[,3], mem=v[,4], stringsAsFactors=FALSE)
+
+t$prog <- as.character(t$prog)
+t$prog[t$prog == "master"] <- "gimli-rs/addr2line"
+t$funcs[t$funcs == TRUE] <- "With functions"
+t$funcs[t$funcs == FALSE] <- "File/line only"
+t$mem = t$mem / 1024.0
+
+library(ggplot2)
+p <- ggplot(data=t, aes(x=prog, y=time, fill=prog))
+p <- p + geom_bar(stat = "identity")
+p <- p + facet_wrap(~ funcs)
+p <- p + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank())
+p <- p + ylab("time (s)") + ggtitle("addr2line runtime")
+ggsave('time.png',plot=p,width=10,height=6)
+
+p <- ggplot(data=t, aes(x=prog, y=mem, fill=prog))
+p <- p + geom_bar(stat = "identity")
+p <- p + facet_wrap(~ funcs)
+p <- p + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank())
+p <- p + ylab("memory (kB)") + ggtitle("addr2line memory usage")
+ggsave('memory.png',plot=p,width=10,height=6)
diff --git a/vendor/addr2line-0.17.0/benchmark.sh b/vendor/addr2line-0.17.0/benchmark.sh
new file mode 100755
index 000000000..ca4c4f6ec
--- /dev/null
+++ b/vendor/addr2line-0.17.0/benchmark.sh
@@ -0,0 +1,112 @@
+#!/bin/bash
+if [[ $# -le 1 ]]; then
+ echo "Usage: $0 <executable> [<addresses>] REFS..."
+ exit 1
+fi
+target="$1"
+shift
+
+addresses=""
+if [[ -e "$1" ]]; then
+ addresses="$1"
+ shift
+fi
+
+# path to "us"
+# readlink -f, but more portable:
+dirname=$(perl -e 'use Cwd "abs_path";print abs_path(shift)' "$(dirname "$0")")
+
+# https://stackoverflow.com/a/2358432/472927
+{
+ # compile all refs
+ pushd "$dirname" > /dev/null
+ # if the user has some local changes, preserve them
+ nstashed=$(git stash list | wc -l)
+ echo "==> Stashing any local modifications"
+ git stash --keep-index > /dev/null
+ popstash() {
+ # https://stackoverflow.com/q/24520791/472927
+ if [[ "$(git stash list | wc -l)" -ne "$nstashed" ]]; then
+ echo "==> Restoring stashed state"
+ git stash pop > /dev/null
+ fi
+ }
+ # if the user has added stuff to the index, abort
+ if ! git diff-index --quiet HEAD --; then
+ echo "Refusing to overwrite outstanding git changes"
+ popstash
+ exit 2
+ fi
+ current=$(git symbolic-ref --short HEAD)
+ for ref in "$@"; do
+ echo "==> Compiling $ref"
+ git checkout -q "$ref"
+ commit=$(git rev-parse HEAD)
+ fn="target/release/addr2line-$commit"
+ if [[ ! -e "$fn" ]]; then
+ cargo build --release --example addr2line
+ cp target/release/examples/addr2line "$fn"
+ fi
+ if [[ "$ref" != "$commit" ]]; then
+ ln -sfn "addr2line-$commit" target/release/addr2line-"$ref"
+ fi
+ done
+ git checkout -q "$current"
+ popstash
+ popd > /dev/null
+
+ # get us some addresses to look up
+ if [[ -z "$addresses" ]]; then
+ echo "==> Looking for benchmarking addresses (this may take a while)"
+ addresses=$(mktemp tmp.XXXXXXXXXX)
+ objdump -C -x --disassemble -l "$target" \
+ | grep -P '0[048]:' \
+ | awk '{print $1}' \
+ | sed 's/:$//' \
+ > "$addresses"
+ echo " -> Addresses stored in $addresses; you should re-use it next time"
+ fi
+
+ run() {
+ func="$1"
+ name="$2"
+ cmd="$3"
+ args="$4"
+ printf "%s\t%s\t" "$name" "$func"
+ if [[ "$cmd" =~ llvm-symbolizer ]]; then
+ /usr/bin/time -f '%e\t%M' "$cmd" $args -obj="$target" < "$addresses" 2>&1 >/dev/null
+ else
+ /usr/bin/time -f '%e\t%M' "$cmd" $args -e "$target" < "$addresses" 2>&1 >/dev/null
+ fi
+ }
+
+ # run without functions
+ log1=$(mktemp tmp.XXXXXXXXXX)
+ echo "==> Benchmarking"
+ run nofunc binutils addr2line >> "$log1"
+ #run nofunc elfutils eu-addr2line >> "$log1"
+ run nofunc llvm-sym llvm-symbolizer -functions=none >> "$log1"
+ for ref in "$@"; do
+ run nofunc "$ref" "$dirname/target/release/addr2line-$ref" >> "$log1"
+ done
+ cat "$log1" | column -t
+
+ # run with functions
+ log2=$(mktemp tmp.XXXXXXXXXX)
+ echo "==> Benchmarking with -f"
+ run func binutils addr2line "-f -i" >> "$log2"
+ #run func elfutils eu-addr2line "-f -i" >> "$log2"
+ run func llvm-sym llvm-symbolizer "-functions=linkage -demangle=0" >> "$log2"
+ for ref in "$@"; do
+ run func "$ref" "$dirname/target/release/addr2line-$ref" "-f -i" >> "$log2"
+ done
+ cat "$log2" | column -t
+ cat "$log2" >> "$log1"; rm "$log2"
+
+ echo "==> Plotting"
+ Rscript --no-readline --no-restore --no-save "$dirname/bench.plot.r" < "$log1"
+
+ echo "==> Cleaning up"
+ rm "$log1"
+ exit 0
+}
diff --git a/vendor/addr2line-0.17.0/coverage.sh b/vendor/addr2line-0.17.0/coverage.sh
new file mode 100644
index 000000000..892c0b7fa
--- /dev/null
+++ b/vendor/addr2line-0.17.0/coverage.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Run tarpaulin and pycobertura to generate coverage.html.
+
+cargo tarpaulin --skip-clean --out Xml
+pycobertura show --format html --output coverage.html cobertura.xml
diff --git a/vendor/addr2line-0.17.0/examples/addr2line.rs b/vendor/addr2line-0.17.0/examples/addr2line.rs
new file mode 100644
index 000000000..4b228a706
--- /dev/null
+++ b/vendor/addr2line-0.17.0/examples/addr2line.rs
@@ -0,0 +1,299 @@
+extern crate addr2line;
+extern crate clap;
+extern crate fallible_iterator;
+extern crate gimli;
+extern crate memmap;
+extern crate object;
+extern crate typed_arena;
+
+use std::borrow::Cow;
+use std::fs::File;
+use std::io::{BufRead, Lines, StdinLock, Write};
+use std::path::Path;
+
+use clap::{App, Arg, Values};
+use fallible_iterator::FallibleIterator;
+use object::{Object, ObjectSection};
+use typed_arena::Arena;
+
+use addr2line::{Context, Location};
+
+fn parse_uint_from_hex_string(string: &str) -> u64 {
+ if string.len() > 2 && string.starts_with("0x") {
+ u64::from_str_radix(&string[2..], 16).expect("Failed to parse address")
+ } else {
+ u64::from_str_radix(string, 16).expect("Failed to parse address")
+ }
+}
+
+enum Addrs<'a> {
+ Args(Values<'a>),
+ Stdin(Lines<StdinLock<'a>>),
+}
+
+impl<'a> Iterator for Addrs<'a> {
+ type Item = u64;
+
+ fn next(&mut self) -> Option<u64> {
+ let text = match *self {
+ Addrs::Args(ref mut vals) => vals.next().map(Cow::from),
+ Addrs::Stdin(ref mut lines) => lines.next().map(Result::unwrap).map(Cow::from),
+ };
+ text.as_ref()
+ .map(Cow::as_ref)
+ .map(parse_uint_from_hex_string)
+ }
+}
+
+fn print_loc(loc: &Option<Location>, basenames: bool, llvm: bool) {
+ if let Some(ref loc) = *loc {
+ let file = loc.file.as_ref().unwrap();
+ let path = if basenames {
+ Path::new(Path::new(file).file_name().unwrap())
+ } else {
+ Path::new(file)
+ };
+ print!("{}:", path.display());
+ if llvm {
+ print!("{}:{}", loc.line.unwrap_or(0), loc.column.unwrap_or(0));
+ } else if let Some(line) = loc.line {
+ print!("{}", line);
+ } else {
+ print!("?");
+ }
+ println!();
+ } else if llvm {
+ println!("??:0:0");
+ } else {
+ println!("??:?");
+ }
+}
+
+fn print_function(name: &str, language: Option<gimli::DwLang>, demangle: bool) {
+ if demangle {
+ print!("{}", addr2line::demangle_auto(Cow::from(name), language));
+ } else {
+ print!("{}", name);
+ }
+}
+
+fn load_file_section<'input, 'arena, Endian: gimli::Endianity>(
+ id: gimli::SectionId,
+ file: &object::File<'input>,
+ endian: Endian,
+ arena_data: &'arena Arena<Cow<'input, [u8]>>,
+) -> Result<gimli::EndianSlice<'arena, Endian>, ()> {
+ // TODO: Unify with dwarfdump.rs in gimli.
+ let name = id.name();
+ match file.section_by_name(name) {
+ Some(section) => match section.uncompressed_data().unwrap() {
+ Cow::Borrowed(b) => Ok(gimli::EndianSlice::new(b, endian)),
+ Cow::Owned(b) => Ok(gimli::EndianSlice::new(arena_data.alloc(b.into()), endian)),
+ },
+ None => Ok(gimli::EndianSlice::new(&[][..], endian)),
+ }
+}
+
+fn main() {
+ let matches = App::new("hardliner")
+ .version("0.1")
+ .about("A fast addr2line clone")
+ .arg(
+ Arg::with_name("exe")
+ .short("e")
+ .long("exe")
+ .value_name("filename")
+ .help(
+ "Specify the name of the executable for which addresses should be translated.",
+ )
+ .required(true),
+ )
+ .arg(
+ Arg::with_name("sup")
+ .long("sup")
+ .value_name("filename")
+ .help("Path to supplementary object file."),
+ )
+ .arg(
+ Arg::with_name("functions")
+ .short("f")
+ .long("functions")
+ .help("Display function names as well as file and line number information."),
+ )
+ .arg(
+ Arg::with_name("pretty")
+ .short("p")
+ .long("pretty-print")
+ .help(
+ "Make the output more human friendly: each location are printed on \
+ one line.",
+ ),
+ )
+ .arg(Arg::with_name("inlines").short("i").long("inlines").help(
+ "If the address belongs to a function that was inlined, the source \
+ information for all enclosing scopes back to the first non-inlined \
+ function will also be printed.",
+ ))
+ .arg(
+ Arg::with_name("addresses")
+ .short("a")
+ .long("addresses")
+ .help(
+ "Display the address before the function name, file and line \
+ number information.",
+ ),
+ )
+ .arg(
+ Arg::with_name("basenames")
+ .short("s")
+ .long("basenames")
+ .help("Display only the base of each file name."),
+ )
+ .arg(Arg::with_name("demangle").short("C").long("demangle").help(
+ "Demangle function names. \
+ Specifying a specific demangling style (like GNU addr2line) \
+ is not supported. (TODO)",
+ ))
+ .arg(
+ Arg::with_name("llvm")
+ .long("llvm")
+ .help("Display output in the same format as llvm-symbolizer."),
+ )
+ .arg(
+ Arg::with_name("addrs")
+ .takes_value(true)
+ .multiple(true)
+ .help("Addresses to use instead of reading from stdin."),
+ )
+ .get_matches();
+
+ let arena_data = Arena::new();
+
+ let do_functions = matches.is_present("functions");
+ let do_inlines = matches.is_present("inlines");
+ let pretty = matches.is_present("pretty");
+ let print_addrs = matches.is_present("addresses");
+ let basenames = matches.is_present("basenames");
+ let demangle = matches.is_present("demangle");
+ let llvm = matches.is_present("llvm");
+ let path = matches.value_of("exe").unwrap();
+
+ let file = File::open(path).unwrap();
+ let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let object = &object::File::parse(&*map).unwrap();
+
+ let endian = if object.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+
+ let mut load_section = |id: gimli::SectionId| -> Result<_, _> {
+ load_file_section(id, object, endian, &arena_data)
+ };
+
+ let sup_map;
+ let sup_object = if let Some(sup_path) = matches.value_of("sup") {
+ let sup_file = File::open(sup_path).unwrap();
+ sup_map = unsafe { memmap::Mmap::map(&sup_file).unwrap() };
+ Some(object::File::parse(&*sup_map).unwrap())
+ } else {
+ None
+ };
+
+ let symbols = object.symbol_map();
+ let mut dwarf = gimli::Dwarf::load(&mut load_section).unwrap();
+ if let Some(ref sup_object) = sup_object {
+ let mut load_sup_section = |id: gimli::SectionId| -> Result<_, _> {
+ load_file_section(id, sup_object, endian, &arena_data)
+ };
+ dwarf.load_sup(&mut load_sup_section).unwrap();
+ }
+
+ let ctx = Context::from_dwarf(dwarf).unwrap();
+
+ let stdin = std::io::stdin();
+ let addrs = matches
+ .values_of("addrs")
+ .map(Addrs::Args)
+ .unwrap_or_else(|| Addrs::Stdin(stdin.lock().lines()));
+
+ for probe in addrs {
+ if print_addrs {
+ if llvm {
+ print!("0x{:x}", probe);
+ } else {
+ print!("0x{:016x}", probe);
+ }
+ if pretty {
+ print!(": ");
+ } else {
+ println!();
+ }
+ }
+
+ if do_functions || do_inlines {
+ let mut printed_anything = false;
+ let mut frames = ctx.find_frames(probe).unwrap().enumerate();
+ while let Some((i, frame)) = frames.next().unwrap() {
+ if pretty && i != 0 {
+ print!(" (inlined by) ");
+ }
+
+ if do_functions {
+ if let Some(func) = frame.function {
+ print_function(&func.raw_name().unwrap(), func.language, demangle);
+ } else if let Some(name) = symbols.get(probe).map(|x| x.name()) {
+ print_function(name, None, demangle);
+ } else {
+ print!("??");
+ }
+
+ if pretty {
+ print!(" at ");
+ } else {
+ println!();
+ }
+ }
+
+ print_loc(&frame.location, basenames, llvm);
+
+ printed_anything = true;
+
+ if !do_inlines {
+ break;
+ }
+ }
+
+ if !printed_anything {
+ if do_functions {
+ if let Some(name) = symbols.get(probe).map(|x| x.name()) {
+ print_function(name, None, demangle);
+ } else {
+ print!("??");
+ }
+
+ if pretty {
+ print!(" at ");
+ } else {
+ println!();
+ }
+ }
+
+ if llvm {
+ println!("??:0:0");
+ } else {
+ println!("??:?");
+ }
+ }
+ } else {
+ let loc = ctx.find_location(probe).unwrap();
+ print_loc(&loc, basenames, llvm);
+ }
+
+ if llvm {
+ println!();
+ }
+ std::io::stdout().flush().unwrap();
+ }
+}
diff --git a/vendor/addr2line-0.17.0/rustfmt.toml b/vendor/addr2line-0.17.0/rustfmt.toml
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/vendor/addr2line-0.17.0/rustfmt.toml
@@ -0,0 +1 @@
+
diff --git a/vendor/addr2line-0.17.0/src/function.rs b/vendor/addr2line-0.17.0/src/function.rs
new file mode 100644
index 000000000..1589acdbe
--- /dev/null
+++ b/vendor/addr2line-0.17.0/src/function.rs
@@ -0,0 +1,520 @@
+use alloc::boxed::Box;
+use alloc::vec::Vec;
+use core::cmp::Ordering;
+use core::iter;
+
+use crate::lazy::LazyCell;
+use crate::maybe_small;
+use crate::{Error, RangeAttributes, ResDwarf};
+
+pub(crate) struct Functions<R: gimli::Reader> {
+ /// List of all `DW_TAG_subprogram` details in the unit.
+ pub(crate) functions: Box<
+ [(
+ gimli::UnitOffset<R::Offset>,
+ LazyCell<Result<Function<R>, Error>>,
+ )],
+ >,
+ /// List of `DW_TAG_subprogram` address ranges in the unit.
+ pub(crate) addresses: Box<[FunctionAddress]>,
+}
+
+/// A single address range for a function.
+///
+/// It is possible for a function to have multiple address ranges; this
+/// is handled by having multiple `FunctionAddress` entries with the same
+/// `function` field.
+pub(crate) struct FunctionAddress {
+ range: gimli::Range,
+ /// An index into `Functions::functions`.
+ pub(crate) function: usize,
+}
+
+pub(crate) struct Function<R: gimli::Reader> {
+ pub(crate) dw_die_offset: gimli::UnitOffset<R::Offset>,
+ pub(crate) name: Option<R>,
+ /// List of all `DW_TAG_inlined_subroutine` details in this function.
+ inlined_functions: Box<[InlinedFunction<R>]>,
+ /// List of `DW_TAG_inlined_subroutine` address ranges in this function.
+ inlined_addresses: Box<[InlinedFunctionAddress]>,
+}
+
+pub(crate) struct InlinedFunctionAddress {
+ range: gimli::Range,
+ call_depth: usize,
+ /// An index into `Function::inlined_functions`.
+ function: usize,
+}
+
+pub(crate) struct InlinedFunction<R: gimli::Reader> {
+ pub(crate) dw_die_offset: gimli::UnitOffset<R::Offset>,
+ pub(crate) name: Option<R>,
+ pub(crate) call_file: u64,
+ pub(crate) call_line: u32,
+ pub(crate) call_column: u32,
+}
+
+impl<R: gimli::Reader> Functions<R> {
+ pub(crate) fn parse(unit: &gimli::Unit<R>, dwarf: &ResDwarf<R>) -> Result<Functions<R>, Error> {
+ let mut functions = Vec::new();
+ let mut addresses = Vec::new();
+ let mut entries = unit.entries_raw(None)?;
+ while !entries.is_empty() {
+ let dw_die_offset = entries.next_offset();
+ if let Some(abbrev) = entries.read_abbreviation()? {
+ if abbrev.tag() == gimli::DW_TAG_subprogram {
+ let mut ranges = RangeAttributes::default();
+ for spec in abbrev.attributes() {
+ match entries.read_attribute(*spec) {
+ Ok(ref attr) => {
+ match attr.name() {
+ gimli::DW_AT_low_pc => {
+ if let gimli::AttributeValue::Addr(val) = attr.value() {
+ ranges.low_pc = Some(val);
+ }
+ }
+ gimli::DW_AT_high_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => {
+ ranges.high_pc = Some(val)
+ }
+ gimli::AttributeValue::Udata(val) => {
+ ranges.size = Some(val)
+ }
+ _ => {}
+ },
+ gimli::DW_AT_ranges => {
+ ranges.ranges_offset = dwarf
+ .sections
+ .attr_ranges_offset(unit, attr.value())?;
+ }
+ _ => {}
+ };
+ }
+ Err(e) => return Err(e),
+ }
+ }
+
+ let function_index = functions.len();
+ if ranges.for_each_range(&dwarf.sections, unit, |range| {
+ addresses.push(FunctionAddress {
+ range,
+ function: function_index,
+ });
+ })? {
+ functions.push((dw_die_offset, LazyCell::new()));
+ }
+ } else {
+ entries.skip_attributes(abbrev.attributes())?;
+ }
+ }
+ }
+
+ // The binary search requires the addresses to be sorted.
+ //
+ // It also requires them to be non-overlapping. In practice, overlapping
+ // function ranges are unlikely, so we don't try to handle that yet.
+ //
+ // It's possible for multiple functions to have the same address range if the
+ // compiler can detect and remove functions with identical code. In that case
+ // we'll nondeterministically return one of them.
+ addresses.sort_by_key(|x| x.range.begin);
+
+ Ok(Functions {
+ functions: functions.into_boxed_slice(),
+ addresses: addresses.into_boxed_slice(),
+ })
+ }
+
+ pub(crate) fn find_address(&self, probe: u64) -> Option<usize> {
+ self.addresses
+ .binary_search_by(|address| {
+ if probe < address.range.begin {
+ Ordering::Greater
+ } else if probe >= address.range.end {
+ Ordering::Less
+ } else {
+ Ordering::Equal
+ }
+ })
+ .ok()
+ }
+
+ pub(crate) fn parse_inlined_functions(
+ &self,
+ unit: &gimli::Unit<R>,
+ dwarf: &ResDwarf<R>,
+ ) -> Result<(), Error> {
+ for function in &*self.functions {
+ function
+ .1
+ .borrow_with(|| Function::parse(function.0, unit, dwarf))
+ .as_ref()
+ .map_err(Error::clone)?;
+ }
+ Ok(())
+ }
+}
+
+impl<R: gimli::Reader> Function<R> {
+ pub(crate) fn parse(
+ dw_die_offset: gimli::UnitOffset<R::Offset>,
+ unit: &gimli::Unit<R>,
+ dwarf: &ResDwarf<R>,
+ ) -> Result<Self, Error> {
+ let mut entries = unit.entries_raw(Some(dw_die_offset))?;
+ let depth = entries.next_depth();
+ let abbrev = entries.read_abbreviation()?.unwrap();
+ debug_assert_eq!(abbrev.tag(), gimli::DW_TAG_subprogram);
+
+ let mut name = None;
+ for spec in abbrev.attributes() {
+ match entries.read_attribute(*spec) {
+ Ok(ref attr) => {
+ match attr.name() {
+ gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
+ if let Ok(val) = dwarf.sections.attr_string(unit, attr.value()) {
+ name = Some(val);
+ }
+ }
+ gimli::DW_AT_name => {
+ if name.is_none() {
+ name = dwarf.sections.attr_string(unit, attr.value()).ok();
+ }
+ }
+ gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
+ if name.is_none() {
+ name = name_attr(attr.value(), unit, dwarf, 16)?;
+ }
+ }
+ _ => {}
+ };
+ }
+ Err(e) => return Err(e),
+ }
+ }
+
+ let mut inlined_functions = Vec::new();
+ let mut inlined_addresses = Vec::new();
+ Function::parse_children(
+ &mut entries,
+ depth,
+ unit,
+ dwarf,
+ &mut inlined_functions,
+ &mut inlined_addresses,
+ 0,
+ )?;
+
+ // Sort ranges in "breadth-first traversal order", i.e. first by call_depth
+ // and then by range.begin. This allows finding the range containing an
+ // address at a certain depth using binary search.
+ // Note: Using DFS order, i.e. ordering by range.begin first and then by
+ // call_depth, would not work! Consider the two examples
+ // "[0..10 at depth 0], [0..2 at depth 1], [6..8 at depth 1]" and
+ // "[0..5 at depth 0], [0..2 at depth 1], [5..10 at depth 0], [6..8 at depth 1]".
+ // In this example, if you want to look up address 7 at depth 0, and you
+ // encounter [0..2 at depth 1], are you before or after the target range?
+ // You don't know.
+ inlined_addresses.sort_by(|r1, r2| {
+ if r1.call_depth < r2.call_depth {
+ Ordering::Less
+ } else if r1.call_depth > r2.call_depth {
+ Ordering::Greater
+ } else if r1.range.begin < r2.range.begin {
+ Ordering::Less
+ } else if r1.range.begin > r2.range.begin {
+ Ordering::Greater
+ } else {
+ Ordering::Equal
+ }
+ });
+
+ Ok(Function {
+ dw_die_offset,
+ name,
+ inlined_functions: inlined_functions.into_boxed_slice(),
+ inlined_addresses: inlined_addresses.into_boxed_slice(),
+ })
+ }
+
+ fn parse_children(
+ entries: &mut gimli::EntriesRaw<R>,
+ depth: isize,
+ unit: &gimli::Unit<R>,
+ dwarf: &ResDwarf<R>,
+ inlined_functions: &mut Vec<InlinedFunction<R>>,
+ inlined_addresses: &mut Vec<InlinedFunctionAddress>,
+ inlined_depth: usize,
+ ) -> Result<(), Error> {
+ loop {
+ let dw_die_offset = entries.next_offset();
+ let next_depth = entries.next_depth();
+ if next_depth <= depth {
+ return Ok(());
+ }
+ if let Some(abbrev) = entries.read_abbreviation()? {
+ match abbrev.tag() {
+ gimli::DW_TAG_subprogram => {
+ Function::skip(entries, abbrev, next_depth)?;
+ }
+ gimli::DW_TAG_inlined_subroutine => {
+ InlinedFunction::parse(
+ dw_die_offset,
+ entries,
+ abbrev,
+ next_depth,
+ unit,
+ dwarf,
+ inlined_functions,
+ inlined_addresses,
+ inlined_depth,
+ )?;
+ }
+ _ => {
+ entries.skip_attributes(abbrev.attributes())?;
+ }
+ }
+ }
+ }
+ }
+
+ fn skip(
+ entries: &mut gimli::EntriesRaw<R>,
+ abbrev: &gimli::Abbreviation,
+ depth: isize,
+ ) -> Result<(), Error> {
+ // TODO: use DW_AT_sibling
+ entries.skip_attributes(abbrev.attributes())?;
+ while entries.next_depth() > depth {
+ if let Some(abbrev) = entries.read_abbreviation()? {
+ entries.skip_attributes(abbrev.attributes())?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Build the list of inlined functions that contain `probe`.
+ pub(crate) fn find_inlined_functions(
+ &self,
+ probe: u64,
+ ) -> iter::Rev<maybe_small::IntoIter<&InlinedFunction<R>>> {
+ // `inlined_functions` is ordered from outside to inside.
+ let mut inlined_functions = maybe_small::Vec::new();
+ let mut inlined_addresses = &self.inlined_addresses[..];
+ loop {
+ let current_depth = inlined_functions.len();
+ // Look up (probe, current_depth) in inline_ranges.
+ // `inlined_addresses` is sorted in "breadth-first traversal order", i.e.
+ // by `call_depth` first, and then by `range.begin`. See the comment at
+ // the sort call for more information about why.
+ let search = inlined_addresses.binary_search_by(|range| {
+ if range.call_depth > current_depth {
+ Ordering::Greater
+ } else if range.call_depth < current_depth {
+ Ordering::Less
+ } else if range.range.begin > probe {
+ Ordering::Greater
+ } else if range.range.end <= probe {
+ Ordering::Less
+ } else {
+ Ordering::Equal
+ }
+ });
+ if let Ok(index) = search {
+ let function_index = inlined_addresses[index].function;
+ inlined_functions.push(&self.inlined_functions[function_index]);
+ inlined_addresses = &inlined_addresses[index + 1..];
+ } else {
+ break;
+ }
+ }
+ inlined_functions.into_iter().rev()
+ }
+}
+
+impl<R: gimli::Reader> InlinedFunction<R> {
+ fn parse(
+ dw_die_offset: gimli::UnitOffset<R::Offset>,
+ entries: &mut gimli::EntriesRaw<R>,
+ abbrev: &gimli::Abbreviation,
+ depth: isize,
+ unit: &gimli::Unit<R>,
+ dwarf: &ResDwarf<R>,
+ inlined_functions: &mut Vec<InlinedFunction<R>>,
+ inlined_addresses: &mut Vec<InlinedFunctionAddress>,
+ inlined_depth: usize,
+ ) -> Result<(), Error> {
+ let mut ranges = RangeAttributes::default();
+ let mut name = None;
+ let mut call_file = 0;
+ let mut call_line = 0;
+ let mut call_column = 0;
+ for spec in abbrev.attributes() {
+ match entries.read_attribute(*spec) {
+ Ok(ref attr) => match attr.name() {
+ gimli::DW_AT_low_pc => {
+ if let gimli::AttributeValue::Addr(val) = attr.value() {
+ ranges.low_pc = Some(val);
+ }
+ }
+ gimli::DW_AT_high_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => ranges.high_pc = Some(val),
+ gimli::AttributeValue::Udata(val) => ranges.size = Some(val),
+ _ => {}
+ },
+ gimli::DW_AT_ranges => {
+ ranges.ranges_offset =
+ dwarf.sections.attr_ranges_offset(unit, attr.value())?;
+ }
+ gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
+ if let Ok(val) = dwarf.sections.attr_string(unit, attr.value()) {
+ name = Some(val);
+ }
+ }
+ gimli::DW_AT_name => {
+ if name.is_none() {
+ name = dwarf.sections.attr_string(unit, attr.value()).ok();
+ }
+ }
+ gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
+ if name.is_none() {
+ name = name_attr(attr.value(), unit, dwarf, 16)?;
+ }
+ }
+ gimli::DW_AT_call_file => {
+ if let gimli::AttributeValue::FileIndex(fi) = attr.value() {
+ call_file = fi;
+ }
+ }
+ gimli::DW_AT_call_line => {
+ call_line = attr.udata_value().unwrap_or(0) as u32;
+ }
+ gimli::DW_AT_call_column => {
+ call_column = attr.udata_value().unwrap_or(0) as u32;
+ }
+ _ => {}
+ },
+ Err(e) => return Err(e),
+ }
+ }
+
+ let function_index = inlined_functions.len();
+ inlined_functions.push(InlinedFunction {
+ dw_die_offset,
+ name,
+ call_file,
+ call_line,
+ call_column,
+ });
+
+ ranges.for_each_range(&dwarf.sections, unit, |range| {
+ inlined_addresses.push(InlinedFunctionAddress {
+ range,
+ call_depth: inlined_depth,
+ function: function_index,
+ });
+ })?;
+
+ Function::parse_children(
+ entries,
+ depth,
+ unit,
+ dwarf,
+ inlined_functions,
+ inlined_addresses,
+ inlined_depth + 1,
+ )
+ }
+}
+
+fn name_attr<R>(
+ attr: gimli::AttributeValue<R>,
+ unit: &gimli::Unit<R>,
+ dwarf: &ResDwarf<R>,
+ recursion_limit: usize,
+) -> Result<Option<R>, Error>
+where
+ R: gimli::Reader,
+{
+ if recursion_limit == 0 {
+ return Ok(None);
+ }
+
+ match attr {
+ gimli::AttributeValue::UnitRef(offset) => name_entry(unit, offset, dwarf, recursion_limit),
+ gimli::AttributeValue::DebugInfoRef(dr) => {
+ let res_unit = dwarf.find_unit(dr)?;
+ name_entry(
+ &res_unit.dw_unit,
+ gimli::UnitOffset(dr.0 - res_unit.offset.0),
+ dwarf,
+ recursion_limit,
+ )
+ }
+ gimli::AttributeValue::DebugInfoRefSup(dr) => {
+ if let Some(sup_dwarf) = dwarf.sup.as_ref() {
+ let res_unit = sup_dwarf.find_unit(dr)?;
+ name_entry(
+ &res_unit.dw_unit,
+ gimli::UnitOffset(dr.0 - res_unit.offset.0),
+ sup_dwarf,
+ recursion_limit,
+ )
+ } else {
+ Ok(None)
+ }
+ }
+ _ => Ok(None),
+ }
+}
+
+fn name_entry<R>(
+ unit: &gimli::Unit<R>,
+ offset: gimli::UnitOffset<R::Offset>,
+ dwarf: &ResDwarf<R>,
+ recursion_limit: usize,
+) -> Result<Option<R>, Error>
+where
+ R: gimli::Reader,
+{
+ let mut entries = unit.entries_raw(Some(offset))?;
+ let abbrev = if let Some(abbrev) = entries.read_abbreviation()? {
+ abbrev
+ } else {
+ return Err(gimli::Error::NoEntryAtGivenOffset);
+ };
+
+ let mut name = None;
+ let mut next = None;
+ for spec in abbrev.attributes() {
+ match entries.read_attribute(*spec) {
+ Ok(ref attr) => match attr.name() {
+ gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
+ if let Ok(val) = dwarf.sections.attr_string(unit, attr.value()) {
+ return Ok(Some(val));
+ }
+ }
+ gimli::DW_AT_name => {
+ if let Ok(val) = dwarf.sections.attr_string(unit, attr.value()) {
+ name = Some(val);
+ }
+ }
+ gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
+ next = Some(attr.value());
+ }
+ _ => {}
+ },
+ Err(e) => return Err(e),
+ }
+ }
+
+ if name.is_some() {
+ return Ok(name);
+ }
+
+ if let Some(next) = next {
+ return name_attr(next, unit, dwarf, recursion_limit - 1);
+ }
+
+ Ok(None)
+}
diff --git a/vendor/addr2line-0.17.0/src/lazy.rs b/vendor/addr2line-0.17.0/src/lazy.rs
new file mode 100644
index 000000000..280c76b46
--- /dev/null
+++ b/vendor/addr2line-0.17.0/src/lazy.rs
@@ -0,0 +1,29 @@
+use core::cell::UnsafeCell;
+
+pub struct LazyCell<T> {
+ contents: UnsafeCell<Option<T>>,
+}
+impl<T> LazyCell<T> {
+ pub fn new() -> LazyCell<T> {
+ LazyCell {
+ contents: UnsafeCell::new(None),
+ }
+ }
+
+ pub fn borrow_with(&self, closure: impl FnOnce() -> T) -> &T {
+ unsafe {
+ // First check if we're already initialized...
+ let ptr = self.contents.get();
+ if let Some(val) = &*ptr {
+ return val;
+ }
+ // Note that while we're executing `closure` our `borrow_with` may
+ // be called recursively. This means we need to check again after
+ // the closure has executed. For that we use the `get_or_insert`
+ // method which will only perform mutation if we aren't already
+ // `Some`.
+ let val = closure();
+ (*ptr).get_or_insert(val)
+ }
+ }
+}
diff --git a/vendor/addr2line-0.17.0/src/lib.rs b/vendor/addr2line-0.17.0/src/lib.rs
new file mode 100644
index 000000000..b46a98393
--- /dev/null
+++ b/vendor/addr2line-0.17.0/src/lib.rs
@@ -0,0 +1,1192 @@
+//! This crate provides a cross-platform library and binary for translating addresses into
+//! function names, file names and line numbers. Given an address in an executable or an
+//! offset in a section of a relocatable object, it uses the debugging information to
+//! figure out which file name and line number are associated with it.
+//!
+//! When used as a library, files must first be loaded using the
+//! [`object`](https://github.com/gimli-rs/object) crate.
+//! A context can then be created with [`Context::new`](./struct.Context.html#method.new).
+//! The context caches some of the parsed information so that multiple lookups are
+//! efficient.
+//! Location information is obtained with
+//! [`Context::find_location`](./struct.Context.html#method.find_location) or
+//! [`Context::find_location_range`](./struct.Context.html#method.find_location_range).
+//! Function information is obtained with
+//! [`Context::find_frames`](./struct.Context.html#method.find_frames), which returns
+//! a frame for each inline function. Each frame contains both name and location.
+//!
+//! The crate has an example CLI wrapper around the library which provides some of
+//! the functionality of the `addr2line` command line tool distributed with [GNU
+//! binutils](https://www.gnu.org/software/binutils/).
+//!
+//! Currently this library only provides information from the DWARF debugging information,
+//! which is parsed using [`gimli`](https://github.com/gimli-rs/gimli). The example CLI
+//! wrapper also uses symbol table information provided by the `object` crate.
+#![deny(missing_docs)]
+#![no_std]
+
+#[allow(unused_imports)]
+#[macro_use]
+extern crate alloc;
+
+#[cfg(feature = "cpp_demangle")]
+extern crate cpp_demangle;
+#[cfg(feature = "fallible-iterator")]
+pub extern crate fallible_iterator;
+pub extern crate gimli;
+#[cfg(feature = "object")]
+pub extern crate object;
+#[cfg(feature = "rustc-demangle")]
+extern crate rustc_demangle;
+
+use alloc::borrow::Cow;
+use alloc::boxed::Box;
+#[cfg(feature = "object")]
+use alloc::rc::Rc;
+use alloc::string::{String, ToString};
+use alloc::sync::Arc;
+use alloc::vec::Vec;
+
+use core::cmp::{self, Ordering};
+use core::iter;
+use core::mem;
+use core::num::NonZeroU64;
+use core::u64;
+
+use crate::function::{Function, Functions, InlinedFunction};
+use crate::lazy::LazyCell;
+
+#[cfg(feature = "smallvec")]
+mod maybe_small {
+ pub type Vec<T> = smallvec::SmallVec<[T; 16]>;
+ pub type IntoIter<T> = smallvec::IntoIter<[T; 16]>;
+}
+#[cfg(not(feature = "smallvec"))]
+mod maybe_small {
+ pub type Vec<T> = alloc::vec::Vec<T>;
+ pub type IntoIter<T> = alloc::vec::IntoIter<T>;
+}
+
+mod function;
+mod lazy;
+
+type Error = gimli::Error;
+
+/// The state necessary to perform address to line translation.
+///
+/// Constructing a `Context` is somewhat costly, so users should aim to reuse `Context`s
+/// when performing lookups for many addresses in the same executable.
+pub struct Context<R: gimli::Reader> {
+ dwarf: ResDwarf<R>,
+}
+
+/// The type of `Context` that supports the `new` method.
+#[cfg(feature = "std-object")]
+pub type ObjectContext = Context<gimli::EndianRcSlice<gimli::RunTimeEndian>>;
+
+#[cfg(feature = "std-object")]
+impl Context<gimli::EndianRcSlice<gimli::RunTimeEndian>> {
+ /// Construct a new `Context`.
+ ///
+ /// The resulting `Context` uses `gimli::EndianRcSlice<gimli::RunTimeEndian>`.
+ /// This means it is not thread safe, has no lifetime constraints (since it copies
+ /// the input data), and works for any endianity.
+ ///
+ /// Performance sensitive applications may want to use `Context::from_dwarf`
+ /// with a more specialised `gimli::Reader` implementation.
+ #[inline]
+ pub fn new<'data: 'file, 'file, O: object::Object<'data, 'file>>(
+ file: &'file O,
+ ) -> Result<Self, Error> {
+ Self::new_with_sup(file, None)
+ }
+
+ /// Construct a new `Context`.
+ ///
+ /// Optionally also use a supplementary object file.
+ ///
+ /// The resulting `Context` uses `gimli::EndianRcSlice<gimli::RunTimeEndian>`.
+ /// This means it is not thread safe, has no lifetime constraints (since it copies
+ /// the input data), and works for any endianity.
+ ///
+ /// Performance sensitive applications may want to use `Context::from_dwarf_with_sup`
+ /// with a more specialised `gimli::Reader` implementation.
+ pub fn new_with_sup<'data: 'file, 'file, O: object::Object<'data, 'file>>(
+ file: &'file O,
+ sup_file: Option<&'file O>,
+ ) -> Result<Self, Error> {
+ let endian = if file.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+
+ fn load_section<'data: 'file, 'file, O, Endian>(
+ id: gimli::SectionId,
+ file: &'file O,
+ endian: Endian,
+ ) -> Result<gimli::EndianRcSlice<Endian>, Error>
+ where
+ O: object::Object<'data, 'file>,
+ Endian: gimli::Endianity,
+ {
+ use object::ObjectSection;
+
+ let data = file
+ .section_by_name(id.name())
+ .and_then(|section| section.uncompressed_data().ok())
+ .unwrap_or(Cow::Borrowed(&[]));
+ Ok(gimli::EndianRcSlice::new(Rc::from(&*data), endian))
+ }
+
+ let mut dwarf = gimli::Dwarf::load(|id| load_section(id, file, endian))?;
+ if let Some(sup_file) = sup_file {
+ dwarf.load_sup(|id| load_section(id, sup_file, endian))?;
+ }
+ Context::from_dwarf(dwarf)
+ }
+}
+
+impl<R: gimli::Reader> Context<R> {
+ /// Construct a new `Context` from DWARF sections.
+ ///
+ /// This method does not support using a supplementary object file.
+ pub fn from_sections(
+ debug_abbrev: gimli::DebugAbbrev<R>,
+ debug_addr: gimli::DebugAddr<R>,
+ debug_aranges: gimli::DebugAranges<R>,
+ debug_info: gimli::DebugInfo<R>,
+ debug_line: gimli::DebugLine<R>,
+ debug_line_str: gimli::DebugLineStr<R>,
+ debug_ranges: gimli::DebugRanges<R>,
+ debug_rnglists: gimli::DebugRngLists<R>,
+ debug_str: gimli::DebugStr<R>,
+ debug_str_offsets: gimli::DebugStrOffsets<R>,
+ default_section: R,
+ ) -> Result<Self, Error> {
+ Self::from_dwarf(gimli::Dwarf {
+ debug_abbrev,
+ debug_addr,
+ debug_aranges,
+ debug_info,
+ debug_line,
+ debug_line_str,
+ debug_str,
+ debug_str_offsets,
+ debug_types: default_section.clone().into(),
+ locations: gimli::LocationLists::new(
+ default_section.clone().into(),
+ default_section.clone().into(),
+ ),
+ ranges: gimli::RangeLists::new(debug_ranges, debug_rnglists),
+ file_type: gimli::DwarfFileType::Main,
+ sup: None,
+ })
+ }
+
+ /// Construct a new `Context` from an existing [`gimli::Dwarf`] object.
+ #[inline]
+ pub fn from_dwarf(sections: gimli::Dwarf<R>) -> Result<Self, Error> {
+ let mut dwarf = ResDwarf::parse(Arc::new(sections))?;
+ dwarf.sup = match dwarf.sections.sup.clone() {
+ Some(sup_sections) => Some(Box::new(ResDwarf::parse(sup_sections)?)),
+ None => None,
+ };
+ Ok(Context { dwarf })
+ }
+
+ /// The dwarf sections associated with this `Context`.
+ pub fn dwarf(&self) -> &gimli::Dwarf<R> {
+ &self.dwarf.sections
+ }
+
+ /// Finds the CUs for the function address given.
+ ///
+ /// There might be multiple CUs whose range contains this address.
+ /// Weak symbols have shown up in the wild which cause this to happen
+ /// but otherwise this can happen if the CU has non-contiguous functions
+ /// but only reports a single range.
+ ///
+ /// Consequently we return an iterator for all CUs which may contain the
+ /// address, and the caller must check if there is actually a function or
+ /// location in the CU for that address.
+ fn find_units(&self, probe: u64) -> impl Iterator<Item = &ResUnit<R>> {
+ self.find_units_range(probe, probe + 1)
+ .map(|(unit, _range)| unit)
+ }
+
+ /// Finds the CUs covering the range of addresses given.
+ ///
+ /// The range is [low, high) (ie, the upper bound is exclusive). This can return multiple
+ /// ranges for the same unit.
+ #[inline]
+ fn find_units_range(
+ &self,
+ probe_low: u64,
+ probe_high: u64,
+ ) -> impl Iterator<Item = (&ResUnit<R>, &gimli::Range)> {
+ // First up find the position in the array which could have our function
+ // address.
+ let pos = match self
+ .dwarf
+ .unit_ranges
+ .binary_search_by_key(&probe_high, |i| i.range.begin)
+ {
+ // Although unlikely, we could find an exact match.
+ Ok(i) => i + 1,
+ // No exact match was found, but this probe would fit at slot `i`.
+ // This means that slot `i` is bigger than `probe`, along with all
+ // indices greater than `i`, so we need to search all previous
+ // entries.
+ Err(i) => i,
+ };
+
+ // Once we have our index we iterate backwards from that position
+ // looking for a matching CU.
+ self.dwarf.unit_ranges[..pos]
+ .iter()
+ .rev()
+ .take_while(move |i| {
+ // We know that this CU's start is beneath the probe already because
+ // of our sorted array.
+ debug_assert!(i.range.begin <= probe_high);
+
+ // Each entry keeps track of the maximum end address seen so far,
+ // starting from the beginning of the array of unit ranges. We're
+ // iterating in reverse so if our probe is beyond the maximum range
+ // of this entry, then it's guaranteed to not fit in any prior
+ // entries, so we break out.
+ probe_low < i.max_end
+ })
+ .filter_map(move |i| {
+ // If this CU doesn't actually contain this address, move to the
+ // next CU.
+ if probe_low >= i.range.end || probe_high <= i.range.begin {
+ return None;
+ }
+ Some((&self.dwarf.units[i.unit_id], &i.range))
+ })
+ }
+
+ /// Find the DWARF unit corresponding to the given virtual memory address.
+ pub fn find_dwarf_unit(&self, probe: u64) -> Option<&gimli::Unit<R>> {
+ for unit in self.find_units(probe) {
+ match unit.find_function_or_location(probe, &self.dwarf) {
+ Ok((Some(_), _)) | Ok((_, Some(_))) => return Some(&unit.dw_unit),
+ _ => {}
+ }
+ }
+ None
+ }
+
+ /// Find the source file and line corresponding to the given virtual memory address.
+ pub fn find_location(&self, probe: u64) -> Result<Option<Location<'_>>, Error> {
+ for unit in self.find_units(probe) {
+ if let Some(location) = unit.find_location(probe, &self.dwarf.sections)? {
+ return Ok(Some(location));
+ }
+ }
+ Ok(None)
+ }
+
+ /// Return source file and lines for a range of addresses. For each location it also
+ /// returns the address and size of the range of the underlying instructions.
+ pub fn find_location_range(
+ &self,
+ probe_low: u64,
+ probe_high: u64,
+ ) -> Result<LocationRangeIter<'_, R>, Error> {
+ LocationRangeIter::new(self, probe_low, probe_high)
+ }
+
+ /// Return an iterator for the function frames corresponding to the given virtual
+ /// memory address.
+ ///
+ /// If the probe address is not for an inline function then only one frame is
+ /// returned.
+ ///
+ /// If the probe address is for an inline function then the first frame corresponds
+ /// to the innermost inline function. Subsequent frames contain the caller and call
+ /// location, until an non-inline caller is reached.
+ pub fn find_frames(&self, probe: u64) -> Result<FrameIter<R>, Error> {
+ for unit in self.find_units(probe) {
+ match unit.find_function_or_location(probe, &self.dwarf)? {
+ (Some(function), location) => {
+ let inlined_functions = function.find_inlined_functions(probe);
+ return Ok(FrameIter(FrameIterState::Frames(FrameIterFrames {
+ unit,
+ sections: &self.dwarf.sections,
+ function,
+ inlined_functions,
+ next: location,
+ })));
+ }
+ (None, Some(location)) => {
+ return Ok(FrameIter(FrameIterState::Location(Some(location))));
+ }
+ _ => {}
+ }
+ }
+ Ok(FrameIter(FrameIterState::Empty))
+ }
+
+ /// Initialize all line data structures. This is used for benchmarks.
+ #[doc(hidden)]
+ pub fn parse_lines(&self) -> Result<(), Error> {
+ for unit in &self.dwarf.units {
+ unit.parse_lines(&self.dwarf.sections)?;
+ }
+ Ok(())
+ }
+
+ /// Initialize all function data structures. This is used for benchmarks.
+ #[doc(hidden)]
+ pub fn parse_functions(&self) -> Result<(), Error> {
+ for unit in &self.dwarf.units {
+ unit.parse_functions(&self.dwarf)?;
+ }
+ Ok(())
+ }
+
+ /// Initialize all inlined function data structures. This is used for benchmarks.
+ #[doc(hidden)]
+ pub fn parse_inlined_functions(&self) -> Result<(), Error> {
+ for unit in &self.dwarf.units {
+ unit.parse_inlined_functions(&self.dwarf)?;
+ }
+ Ok(())
+ }
+}
+
+struct UnitRange {
+ unit_id: usize,
+ max_end: u64,
+ range: gimli::Range,
+}
+
+struct ResDwarf<R: gimli::Reader> {
+ unit_ranges: Vec<UnitRange>,
+ units: Vec<ResUnit<R>>,
+ sections: Arc<gimli::Dwarf<R>>,
+ sup: Option<Box<ResDwarf<R>>>,
+}
+
+impl<R: gimli::Reader> ResDwarf<R> {
+ fn parse(sections: Arc<gimli::Dwarf<R>>) -> Result<Self, Error> {
+ // Find all the references to compilation units in .debug_aranges.
+ // Note that we always also iterate through all of .debug_info to
+ // find compilation units, because .debug_aranges may be missing some.
+ let mut aranges = Vec::new();
+ let mut headers = sections.debug_aranges.headers();
+ while let Some(header) = headers.next()? {
+ aranges.push((header.debug_info_offset(), header.offset()));
+ }
+ aranges.sort_by_key(|i| i.0);
+
+ let mut unit_ranges = Vec::new();
+ let mut res_units = Vec::new();
+ let mut units = sections.units();
+ while let Some(header) = units.next()? {
+ let unit_id = res_units.len();
+ let offset = match header.offset().as_debug_info_offset() {
+ Some(offset) => offset,
+ None => continue,
+ };
+ // We mainly want compile units, but we may need to follow references to entries
+ // within other units for function names. We don't need anything from type units.
+ match header.type_() {
+ gimli::UnitType::Type { .. } | gimli::UnitType::SplitType { .. } => continue,
+ _ => {}
+ }
+ let dw_unit = match sections.unit(header) {
+ Ok(dw_unit) => dw_unit,
+ Err(_) => continue,
+ };
+
+ let mut lang = None;
+ {
+ let mut entries = dw_unit.entries_raw(None)?;
+
+ let abbrev = match entries.read_abbreviation()? {
+ Some(abbrev) => abbrev,
+ None => continue,
+ };
+
+ let mut ranges = RangeAttributes::default();
+ for spec in abbrev.attributes() {
+ let attr = entries.read_attribute(*spec)?;
+ match attr.name() {
+ gimli::DW_AT_low_pc => {
+ if let gimli::AttributeValue::Addr(val) = attr.value() {
+ ranges.low_pc = Some(val);
+ }
+ }
+ gimli::DW_AT_high_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => ranges.high_pc = Some(val),
+ gimli::AttributeValue::Udata(val) => ranges.size = Some(val),
+ _ => {}
+ },
+ gimli::DW_AT_ranges => {
+ ranges.ranges_offset =
+ sections.attr_ranges_offset(&dw_unit, attr.value())?;
+ }
+ gimli::DW_AT_language => {
+ if let gimli::AttributeValue::Language(val) = attr.value() {
+ lang = Some(val);
+ }
+ }
+ _ => {}
+ }
+ }
+
+ // Find the address ranges for the CU, using in order of preference:
+ // - DW_AT_ranges
+ // - .debug_aranges
+ // - DW_AT_low_pc/DW_AT_high_pc
+ //
+ // Using DW_AT_ranges before .debug_aranges is possibly an arbitrary choice,
+ // but the feeling is that DW_AT_ranges is more likely to be reliable or complete
+ // if it is present.
+ //
+ // .debug_aranges must be used before DW_AT_low_pc/DW_AT_high_pc because
+ // it has been observed on macOS that DW_AT_ranges was not emitted even for
+ // discontiguous CUs.
+ let i = match ranges.ranges_offset {
+ Some(_) => None,
+ None => aranges.binary_search_by_key(&offset, |x| x.0).ok(),
+ };
+ if let Some(mut i) = i {
+ // There should be only one set per CU, but in practice multiple
+ // sets have been observed. This is probably a compiler bug, but
+ // either way we need to handle it.
+ while i > 0 && aranges[i - 1].0 == offset {
+ i -= 1;
+ }
+ for (_, aranges_offset) in aranges[i..].iter().take_while(|x| x.0 == offset) {
+ let aranges_header = sections.debug_aranges.header(*aranges_offset)?;
+ let mut aranges = aranges_header.entries();
+ while let Some(arange) = aranges.next()? {
+ if arange.length() != 0 {
+ unit_ranges.push(UnitRange {
+ range: arange.range(),
+ unit_id,
+ max_end: 0,
+ });
+ }
+ }
+ }
+ } else {
+ ranges.for_each_range(&sections, &dw_unit, |range| {
+ unit_ranges.push(UnitRange {
+ range,
+ unit_id,
+ max_end: 0,
+ });
+ })?;
+ }
+ }
+
+ res_units.push(ResUnit {
+ offset,
+ dw_unit,
+ lang,
+ lines: LazyCell::new(),
+ funcs: LazyCell::new(),
+ });
+ }
+
+ // Sort this for faster lookup in `find_unit_and_address` below.
+ unit_ranges.sort_by_key(|i| i.range.begin);
+
+ // Calculate the `max_end` field now that we've determined the order of
+ // CUs.
+ let mut max = 0;
+ for i in unit_ranges.iter_mut() {
+ max = max.max(i.range.end);
+ i.max_end = max;
+ }
+
+ Ok(ResDwarf {
+ units: res_units,
+ unit_ranges,
+ sections,
+ sup: None,
+ })
+ }
+
+ fn find_unit(&self, offset: gimli::DebugInfoOffset<R::Offset>) -> Result<&ResUnit<R>, Error> {
+ match self
+ .units
+ .binary_search_by_key(&offset.0, |unit| unit.offset.0)
+ {
+ // There is never a DIE at the unit offset or before the first unit.
+ Ok(_) | Err(0) => Err(gimli::Error::NoEntryAtGivenOffset),
+ Err(i) => Ok(&self.units[i - 1]),
+ }
+ }
+}
+
+struct Lines {
+ files: Box<[String]>,
+ sequences: Box<[LineSequence]>,
+}
+
+struct LineSequence {
+ start: u64,
+ end: u64,
+ rows: Box<[LineRow]>,
+}
+
+struct LineRow {
+ address: u64,
+ file_index: u64,
+ line: u32,
+ column: u32,
+}
+
+struct ResUnit<R: gimli::Reader> {
+ offset: gimli::DebugInfoOffset<R::Offset>,
+ dw_unit: gimli::Unit<R>,
+ lang: Option<gimli::DwLang>,
+ lines: LazyCell<Result<Lines, Error>>,
+ funcs: LazyCell<Result<Functions<R>, Error>>,
+}
+
+impl<R: gimli::Reader> ResUnit<R> {
+ fn parse_lines(&self, sections: &gimli::Dwarf<R>) -> Result<Option<&Lines>, Error> {
+ let ilnp = match self.dw_unit.line_program {
+ Some(ref ilnp) => ilnp,
+ None => return Ok(None),
+ };
+ self.lines
+ .borrow_with(|| {
+ let mut sequences = Vec::new();
+ let mut sequence_rows = Vec::<LineRow>::new();
+ let mut rows = ilnp.clone().rows();
+ while let Some((_, row)) = rows.next_row()? {
+ if row.end_sequence() {
+ if let Some(start) = sequence_rows.first().map(|x| x.address) {
+ let end = row.address();
+ let mut rows = Vec::new();
+ mem::swap(&mut rows, &mut sequence_rows);
+ sequences.push(LineSequence {
+ start,
+ end,
+ rows: rows.into_boxed_slice(),
+ });
+ }
+ continue;
+ }
+
+ let address = row.address();
+ let file_index = row.file_index();
+ let line = row.line().map(NonZeroU64::get).unwrap_or(0) as u32;
+ let column = match row.column() {
+ gimli::ColumnType::LeftEdge => 0,
+ gimli::ColumnType::Column(x) => x.get() as u32,
+ };
+
+ if let Some(last_row) = sequence_rows.last_mut() {
+ if last_row.address == address {
+ last_row.file_index = file_index;
+ last_row.line = line;
+ last_row.column = column;
+ continue;
+ }
+ }
+
+ sequence_rows.push(LineRow {
+ address,
+ file_index,
+ line,
+ column,
+ });
+ }
+ sequences.sort_by_key(|x| x.start);
+
+ let mut files = Vec::new();
+ let header = ilnp.header();
+ match header.file(0) {
+ Some(file) => files.push(self.render_file(file, header, sections)?),
+ None => files.push(String::from("")), // DWARF version <= 4 may not have 0th index
+ }
+ let mut index = 1;
+ while let Some(file) = header.file(index) {
+ files.push(self.render_file(file, header, sections)?);
+ index += 1;
+ }
+
+ Ok(Lines {
+ files: files.into_boxed_slice(),
+ sequences: sequences.into_boxed_slice(),
+ })
+ })
+ .as_ref()
+ .map(Some)
+ .map_err(Error::clone)
+ }
+
+ fn parse_functions(&self, dwarf: &ResDwarf<R>) -> Result<&Functions<R>, Error> {
+ self.funcs
+ .borrow_with(|| Functions::parse(&self.dw_unit, dwarf))
+ .as_ref()
+ .map_err(Error::clone)
+ }
+
+ fn parse_inlined_functions(&self, dwarf: &ResDwarf<R>) -> Result<(), Error> {
+ self.funcs
+ .borrow_with(|| Functions::parse(&self.dw_unit, dwarf))
+ .as_ref()
+ .map_err(Error::clone)?
+ .parse_inlined_functions(&self.dw_unit, dwarf)
+ }
+
+ fn find_location(
+ &self,
+ probe: u64,
+ sections: &gimli::Dwarf<R>,
+ ) -> Result<Option<Location<'_>>, Error> {
+ if let Some(mut iter) = LocationRangeUnitIter::new(self, sections, probe, probe + 1)? {
+ match iter.next() {
+ None => Ok(None),
+ Some((_addr, _len, loc)) => Ok(Some(loc)),
+ }
+ } else {
+ Ok(None)
+ }
+ }
+
+ #[inline]
+ fn find_location_range(
+ &self,
+ probe_low: u64,
+ probe_high: u64,
+ sections: &gimli::Dwarf<R>,
+ ) -> Result<Option<LocationRangeUnitIter<'_>>, Error> {
+ LocationRangeUnitIter::new(self, sections, probe_low, probe_high)
+ }
+
+ fn find_function_or_location(
+ &self,
+ probe: u64,
+ dwarf: &ResDwarf<R>,
+ ) -> Result<(Option<&Function<R>>, Option<Location<'_>>), Error> {
+ let functions = self.parse_functions(dwarf)?;
+ let function = match functions.find_address(probe) {
+ Some(address) => {
+ let function_index = functions.addresses[address].function;
+ let (offset, ref function) = functions.functions[function_index];
+ Some(
+ function
+ .borrow_with(|| Function::parse(offset, &self.dw_unit, dwarf))
+ .as_ref()
+ .map_err(Error::clone)?,
+ )
+ }
+ None => None,
+ };
+ let location = self.find_location(probe, &dwarf.sections)?;
+ Ok((function, location))
+ }
+
+ fn render_file(
+ &self,
+ file: &gimli::FileEntry<R, R::Offset>,
+ header: &gimli::LineProgramHeader<R, R::Offset>,
+ sections: &gimli::Dwarf<R>,
+ ) -> Result<String, gimli::Error> {
+ let mut path = if let Some(ref comp_dir) = self.dw_unit.comp_dir {
+ comp_dir.to_string_lossy()?.into_owned()
+ } else {
+ String::new()
+ };
+
+ if let Some(directory) = file.directory(header) {
+ path_push(
+ &mut path,
+ sections
+ .attr_string(&self.dw_unit, directory)?
+ .to_string_lossy()?
+ .as_ref(),
+ );
+ }
+
+ path_push(
+ &mut path,
+ sections
+ .attr_string(&self.dw_unit, file.path_name())?
+ .to_string_lossy()?
+ .as_ref(),
+ );
+
+ Ok(path)
+ }
+}
+
+/// Iterator over `Location`s in a range of addresses, returned by `Context::find_location_range`.
+pub struct LocationRangeIter<'ctx, R: gimli::Reader> {
+ unit_iter: Box<dyn Iterator<Item = (&'ctx ResUnit<R>, &'ctx gimli::Range)> + 'ctx>,
+ iter: Option<LocationRangeUnitIter<'ctx>>,
+
+ probe_low: u64,
+ probe_high: u64,
+ sections: &'ctx gimli::Dwarf<R>,
+}
+
+impl<'ctx, R: gimli::Reader> LocationRangeIter<'ctx, R> {
+ #[inline]
+ fn new(ctx: &'ctx Context<R>, probe_low: u64, probe_high: u64) -> Result<Self, Error> {
+ let sections = &ctx.dwarf.sections;
+ let unit_iter = ctx.find_units_range(probe_low, probe_high);
+ Ok(Self {
+ unit_iter: Box::new(unit_iter),
+ iter: None,
+ probe_low,
+ probe_high,
+ sections,
+ })
+ }
+
+ fn next_loc(&mut self) -> Result<Option<(u64, u64, Location<'ctx>)>, Error> {
+ loop {
+ let iter = self.iter.take();
+ match iter {
+ None => match self.unit_iter.next() {
+ Some((unit, range)) => {
+ self.iter = unit.find_location_range(
+ cmp::max(self.probe_low, range.begin),
+ cmp::min(self.probe_high, range.end),
+ self.sections,
+ )?;
+ }
+ None => return Ok(None),
+ },
+ Some(mut iter) => {
+ if let item @ Some(_) = iter.next() {
+ self.iter = Some(iter);
+ return Ok(item);
+ }
+ }
+ }
+ }
+ }
+}
+
+impl<'ctx, R> Iterator for LocationRangeIter<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ type Item = (u64, u64, Location<'ctx>);
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.next_loc() {
+ Err(_) => None,
+ Ok(loc) => loc,
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'ctx, R> fallible_iterator::FallibleIterator for LocationRangeIter<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ type Item = (u64, u64, Location<'ctx>);
+ type Error = Error;
+
+ #[inline]
+ fn next(&mut self) -> Result<Option<Self::Item>, Self::Error> {
+ self.next_loc()
+ }
+}
+
+struct LocationRangeUnitIter<'ctx> {
+ lines: &'ctx Lines,
+ seqs: &'ctx [LineSequence],
+ seq_idx: usize,
+ row_idx: usize,
+ probe_high: u64,
+}
+
+impl<'ctx> LocationRangeUnitIter<'ctx> {
+ fn new<R: gimli::Reader>(
+ resunit: &'ctx ResUnit<R>,
+ sections: &gimli::Dwarf<R>,
+ probe_low: u64,
+ probe_high: u64,
+ ) -> Result<Option<Self>, Error> {
+ let lines = resunit.parse_lines(sections)?;
+
+ if let Some(lines) = lines {
+ // Find index for probe_low.
+ let seq_idx = lines.sequences.binary_search_by(|sequence| {
+ if probe_low < sequence.start {
+ Ordering::Greater
+ } else if probe_low >= sequence.end {
+ Ordering::Less
+ } else {
+ Ordering::Equal
+ }
+ });
+ let seq_idx = match seq_idx {
+ Ok(x) => x,
+ Err(0) => 0, // probe below sequence, but range could overlap
+ Err(_) => lines.sequences.len(),
+ };
+
+ let row_idx = if let Some(seq) = lines.sequences.get(seq_idx) {
+ let idx = seq.rows.binary_search_by(|row| row.address.cmp(&probe_low));
+ let idx = match idx {
+ Ok(x) => x,
+ Err(0) => 0, // probe below sequence, but range could overlap
+ Err(x) => x - 1,
+ };
+ idx
+ } else {
+ 0
+ };
+
+ Ok(Some(Self {
+ lines,
+ seqs: &*lines.sequences,
+ seq_idx,
+ row_idx,
+ probe_high,
+ }))
+ } else {
+ Ok(None)
+ }
+ }
+}
+
+impl<'ctx> Iterator for LocationRangeUnitIter<'ctx> {
+ type Item = (u64, u64, Location<'ctx>);
+
+ fn next(&mut self) -> Option<(u64, u64, Location<'ctx>)> {
+ loop {
+ let seq = match self.seqs.get(self.seq_idx) {
+ Some(seq) => seq,
+ None => break,
+ };
+
+ if seq.start >= self.probe_high {
+ break;
+ }
+
+ match seq.rows.get(self.row_idx) {
+ Some(row) => {
+ if row.address >= self.probe_high {
+ break;
+ }
+
+ let file = self
+ .lines
+ .files
+ .get(row.file_index as usize)
+ .map(String::as_str);
+ let nextaddr = seq
+ .rows
+ .get(self.row_idx + 1)
+ .map(|row| row.address)
+ .unwrap_or(seq.end);
+
+ let item = (
+ row.address,
+ nextaddr - row.address,
+ Location {
+ file,
+ line: if row.line != 0 { Some(row.line) } else { None },
+ column: if row.column != 0 {
+ Some(row.column)
+ } else {
+ None
+ },
+ },
+ );
+ self.row_idx += 1;
+
+ return Some(item);
+ }
+ None => {
+ self.seq_idx += 1;
+ self.row_idx = 0;
+ }
+ }
+ }
+ None
+ }
+}
+
+fn path_push(path: &mut String, p: &str) {
+ if has_unix_root(p) || has_windows_root(p) {
+ *path = p.to_string();
+ } else {
+ let dir_separator = if has_windows_root(path.as_str()) {
+ '\\'
+ } else {
+ '/'
+ };
+
+ if !path.ends_with(dir_separator) {
+ path.push(dir_separator);
+ }
+ *path += p;
+ }
+}
+
+/// Check if the path in the given string has a unix style root
+fn has_unix_root(p: &str) -> bool {
+ p.starts_with('/')
+}
+
+/// Check if the path in the given string has a windows style root
+fn has_windows_root(p: &str) -> bool {
+ p.starts_with('\\') || p.get(1..3) == Some(":\\")
+}
+struct RangeAttributes<R: gimli::Reader> {
+ low_pc: Option<u64>,
+ high_pc: Option<u64>,
+ size: Option<u64>,
+ ranges_offset: Option<gimli::RangeListsOffset<<R as gimli::Reader>::Offset>>,
+}
+
+impl<R: gimli::Reader> Default for RangeAttributes<R> {
+ fn default() -> Self {
+ RangeAttributes {
+ low_pc: None,
+ high_pc: None,
+ size: None,
+ ranges_offset: None,
+ }
+ }
+}
+
+impl<R: gimli::Reader> RangeAttributes<R> {
+ fn for_each_range<F: FnMut(gimli::Range)>(
+ &self,
+ sections: &gimli::Dwarf<R>,
+ unit: &gimli::Unit<R>,
+ mut f: F,
+ ) -> Result<bool, Error> {
+ let mut added_any = false;
+ let mut add_range = |range: gimli::Range| {
+ if range.begin < range.end {
+ f(range);
+ added_any = true
+ }
+ };
+ if let Some(ranges_offset) = self.ranges_offset {
+ let mut range_list = sections.ranges(unit, ranges_offset)?;
+ while let Some(range) = range_list.next()? {
+ add_range(range);
+ }
+ } else if let (Some(begin), Some(end)) = (self.low_pc, self.high_pc) {
+ add_range(gimli::Range { begin, end });
+ } else if let (Some(begin), Some(size)) = (self.low_pc, self.size) {
+ add_range(gimli::Range {
+ begin,
+ end: begin + size,
+ });
+ }
+ Ok(added_any)
+ }
+}
+
+/// An iterator over function frames.
+pub struct FrameIter<'ctx, R>(FrameIterState<'ctx, R>)
+where
+ R: gimli::Reader + 'ctx;
+
+enum FrameIterState<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ Empty,
+ Location(Option<Location<'ctx>>),
+ Frames(FrameIterFrames<'ctx, R>),
+}
+
+struct FrameIterFrames<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ unit: &'ctx ResUnit<R>,
+ sections: &'ctx gimli::Dwarf<R>,
+ function: &'ctx Function<R>,
+ inlined_functions: iter::Rev<maybe_small::IntoIter<&'ctx InlinedFunction<R>>>,
+ next: Option<Location<'ctx>>,
+}
+
+impl<'ctx, R> FrameIter<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ /// Advances the iterator and returns the next frame.
+ pub fn next(&mut self) -> Result<Option<Frame<'ctx, R>>, Error> {
+ let frames = match &mut self.0 {
+ FrameIterState::Empty => return Ok(None),
+ FrameIterState::Location(location) => {
+ // We can't move out of a mutable reference, so use `take` instead.
+ let location = location.take();
+ self.0 = FrameIterState::Empty;
+ return Ok(Some(Frame {
+ dw_die_offset: None,
+ function: None,
+ location,
+ }));
+ }
+ FrameIterState::Frames(frames) => frames,
+ };
+
+ let loc = frames.next.take();
+ let func = match frames.inlined_functions.next() {
+ Some(func) => func,
+ None => {
+ let frame = Frame {
+ dw_die_offset: Some(frames.function.dw_die_offset),
+ function: frames.function.name.clone().map(|name| FunctionName {
+ name,
+ language: frames.unit.lang,
+ }),
+ location: loc,
+ };
+ self.0 = FrameIterState::Empty;
+ return Ok(Some(frame));
+ }
+ };
+
+ let mut next = Location {
+ file: None,
+ line: if func.call_line != 0 {
+ Some(func.call_line)
+ } else {
+ None
+ },
+ column: if func.call_column != 0 {
+ Some(func.call_column)
+ } else {
+ None
+ },
+ };
+ if func.call_file != 0 {
+ if let Some(lines) = frames.unit.parse_lines(frames.sections)? {
+ next.file = lines.files.get(func.call_file as usize).map(String::as_str);
+ }
+ }
+ frames.next = Some(next);
+
+ Ok(Some(Frame {
+ dw_die_offset: Some(func.dw_die_offset),
+ function: func.name.clone().map(|name| FunctionName {
+ name,
+ language: frames.unit.lang,
+ }),
+ location: loc,
+ }))
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'ctx, R> fallible_iterator::FallibleIterator for FrameIter<'ctx, R>
+where
+ R: gimli::Reader + 'ctx,
+{
+ type Item = Frame<'ctx, R>;
+ type Error = Error;
+
+ #[inline]
+ fn next(&mut self) -> Result<Option<Frame<'ctx, R>>, Error> {
+ self.next()
+ }
+}
+
+/// A function frame.
+pub struct Frame<'ctx, R: gimli::Reader> {
+ /// The DWARF unit offset corresponding to the DIE of the function.
+ pub dw_die_offset: Option<gimli::UnitOffset<R::Offset>>,
+ /// The name of the function.
+ pub function: Option<FunctionName<R>>,
+ /// The source location corresponding to this frame.
+ pub location: Option<Location<'ctx>>,
+}
+
+/// A function name.
+pub struct FunctionName<R: gimli::Reader> {
+ /// The name of the function.
+ pub name: R,
+ /// The language of the compilation unit containing this function.
+ pub language: Option<gimli::DwLang>,
+}
+
+impl<R: gimli::Reader> FunctionName<R> {
+ /// The raw name of this function before demangling.
+ pub fn raw_name(&self) -> Result<Cow<str>, Error> {
+ self.name.to_string_lossy()
+ }
+
+ /// The name of this function after demangling (if applicable).
+ pub fn demangle(&self) -> Result<Cow<str>, Error> {
+ self.raw_name().map(|x| demangle_auto(x, self.language))
+ }
+}
+
+/// Demangle a symbol name using the demangling scheme for the given language.
+///
+/// Returns `None` if demangling failed or is not required.
+#[allow(unused_variables)]
+pub fn demangle(name: &str, language: gimli::DwLang) -> Option<String> {
+ match language {
+ #[cfg(feature = "rustc-demangle")]
+ gimli::DW_LANG_Rust => rustc_demangle::try_demangle(name)
+ .ok()
+ .as_ref()
+ .map(|x| format!("{:#}", x)),
+ #[cfg(feature = "cpp_demangle")]
+ gimli::DW_LANG_C_plus_plus
+ | gimli::DW_LANG_C_plus_plus_03
+ | gimli::DW_LANG_C_plus_plus_11
+ | gimli::DW_LANG_C_plus_plus_14 => cpp_demangle::Symbol::new(name)
+ .ok()
+ .and_then(|x| x.demangle(&Default::default()).ok()),
+ _ => None,
+ }
+}
+
+/// Apply 'best effort' demangling of a symbol name.
+///
+/// If `language` is given, then only the demangling scheme for that language
+/// is used.
+///
+/// If `language` is `None`, then heuristics are used to determine how to
+/// demangle the name. Currently, these heuristics are very basic.
+///
+/// If demangling fails or is not required, then `name` is returned unchanged.
+pub fn demangle_auto(name: Cow<str>, language: Option<gimli::DwLang>) -> Cow<str> {
+ match language {
+ Some(language) => demangle(name.as_ref(), language),
+ None => demangle(name.as_ref(), gimli::DW_LANG_Rust)
+ .or_else(|| demangle(name.as_ref(), gimli::DW_LANG_C_plus_plus)),
+ }
+ .map(Cow::from)
+ .unwrap_or(name)
+}
+
+/// A source location.
+pub struct Location<'a> {
+ /// The file name.
+ pub file: Option<&'a str>,
+ /// The line number.
+ pub line: Option<u32>,
+ /// The column number.
+ pub column: Option<u32>,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn context_is_send() {
+ fn assert_is_send<T: Send>() {}
+ assert_is_send::<crate::Context<gimli::read::EndianSlice<gimli::LittleEndian>>>();
+ }
+}
diff --git a/vendor/addr2line-0.17.0/tests/correctness.rs b/vendor/addr2line-0.17.0/tests/correctness.rs
new file mode 100644
index 000000000..3f7b43373
--- /dev/null
+++ b/vendor/addr2line-0.17.0/tests/correctness.rs
@@ -0,0 +1,91 @@
+extern crate addr2line;
+extern crate fallible_iterator;
+extern crate findshlibs;
+extern crate gimli;
+extern crate memmap;
+extern crate object;
+
+use addr2line::Context;
+use fallible_iterator::FallibleIterator;
+use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
+use object::Object;
+use std::fs::File;
+
+fn find_debuginfo() -> memmap::Mmap {
+ let path = std::env::current_exe().unwrap();
+ let file = File::open(&path).unwrap();
+ let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let file = &object::File::parse(&*map).unwrap();
+ if let Ok(uuid) = file.mach_uuid() {
+ for candidate in path.parent().unwrap().read_dir().unwrap() {
+ let path = candidate.unwrap().path();
+ if !path.to_str().unwrap().ends_with(".dSYM") {
+ continue;
+ }
+ for candidate in path.join("Contents/Resources/DWARF").read_dir().unwrap() {
+ let path = candidate.unwrap().path();
+ let file = File::open(&path).unwrap();
+ let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let file = &object::File::parse(&*map).unwrap();
+ if file.mach_uuid().unwrap() == uuid {
+ return map;
+ }
+ }
+ }
+ }
+
+ return map;
+}
+
+#[test]
+fn correctness() {
+ let map = find_debuginfo();
+ let file = &object::File::parse(&*map).unwrap();
+ let ctx = Context::new(file).unwrap();
+
+ let mut bias = None;
+ TargetSharedLibrary::each(|lib| {
+ bias = Some(lib.virtual_memory_bias().0 as u64);
+ IterationControl::Break
+ });
+
+ let test = |sym: u64, expected_prefix: &str| {
+ let ip = sym.wrapping_sub(bias.unwrap());
+
+ let frames = ctx.find_frames(ip).unwrap();
+ let frame = frames.last().unwrap().unwrap();
+ let name = frame.function.as_ref().unwrap().demangle().unwrap();
+ // Old rust versions generate DWARF with wrong linkage name,
+ // so only check the start.
+ if !name.starts_with(expected_prefix) {
+ panic!("incorrect name '{}', expected {:?}", name, expected_prefix);
+ }
+ };
+
+ test(test_function as u64, "correctness::test_function");
+ test(
+ small::test_function as u64,
+ "correctness::small::test_function",
+ );
+ test(auxiliary::foo as u64, "auxiliary::foo");
+}
+
+mod small {
+ pub fn test_function() {
+ println!("y");
+ }
+}
+
+fn test_function() {
+ println!("x");
+}
+
+#[test]
+fn zero_function() {
+ let map = find_debuginfo();
+ let file = &object::File::parse(&*map).unwrap();
+ let ctx = Context::new(file).unwrap();
+ for probe in 0..10 {
+ assert!(ctx.find_frames(probe).unwrap().count().unwrap() < 10);
+ }
+}
diff --git a/vendor/addr2line-0.17.0/tests/output_equivalence.rs b/vendor/addr2line-0.17.0/tests/output_equivalence.rs
new file mode 100644
index 000000000..9dc366672
--- /dev/null
+++ b/vendor/addr2line-0.17.0/tests/output_equivalence.rs
@@ -0,0 +1,145 @@
+extern crate backtrace;
+extern crate findshlibs;
+extern crate rustc_test as test;
+
+use std::env;
+use std::ffi::OsStr;
+use std::path::Path;
+use std::process::Command;
+
+use backtrace::Backtrace;
+use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
+use test::{ShouldPanic, TestDesc, TestDescAndFn, TestFn, TestName};
+
+fn make_trace() -> Vec<String> {
+ fn foo() -> Backtrace {
+ bar()
+ }
+ #[inline(never)]
+ fn bar() -> Backtrace {
+ baz()
+ }
+ #[inline(always)]
+ fn baz() -> Backtrace {
+ Backtrace::new_unresolved()
+ }
+
+ let mut base_addr = None;
+ TargetSharedLibrary::each(|lib| {
+ base_addr = Some(lib.virtual_memory_bias().0 as isize);
+ IterationControl::Break
+ });
+ let addrfix = -base_addr.unwrap();
+
+ let trace = foo();
+ trace
+ .frames()
+ .iter()
+ .take(5)
+ .map(|x| format!("{:p}", (x.ip() as *const u8).wrapping_offset(addrfix)))
+ .collect()
+}
+
+fn run_cmd<P: AsRef<OsStr>>(exe: P, me: &Path, flags: Option<&str>, trace: &str) -> String {
+ let mut cmd = Command::new(exe);
+ cmd.env("LC_ALL", "C"); // GNU addr2line is localized, we aren't
+ cmd.env("RUST_BACKTRACE", "1"); // if a child crashes, we want to know why
+
+ if let Some(flags) = flags {
+ cmd.arg(flags);
+ }
+ cmd.arg("--exe").arg(me).arg(trace);
+
+ let output = cmd.output().unwrap();
+
+ assert!(output.status.success());
+ String::from_utf8(output.stdout).unwrap()
+}
+
+fn run_test(flags: Option<&str>) {
+ let me = env::current_exe().unwrap();
+ let mut exe = me.clone();
+ assert!(exe.pop());
+ if exe.file_name().unwrap().to_str().unwrap() == "deps" {
+ assert!(exe.pop());
+ }
+ exe.push("examples");
+ exe.push("addr2line");
+
+ assert!(exe.is_file());
+
+ let trace = make_trace();
+
+ // HACK: GNU addr2line has a bug where looking up multiple addresses can cause the second
+ // lookup to fail. Workaround by doing one address at a time.
+ for addr in &trace {
+ let theirs = run_cmd("addr2line", &me, flags, addr);
+ let ours = run_cmd(&exe, &me, flags, addr);
+
+ // HACK: GNU addr2line does not tidy up paths properly, causing double slashes to be printed.
+ // We consider our behavior to be correct, so we fix their output to match ours.
+ let theirs = theirs.replace("//", "/");
+
+ assert!(
+ theirs == ours,
+ "Output not equivalent:
+
+$ addr2line {0} --exe {1} {2}
+{4}
+$ {3} {0} --exe {1} {2}
+{5}
+
+
+",
+ flags.unwrap_or(""),
+ me.display(),
+ trace.join(" "),
+ exe.display(),
+ theirs,
+ ours
+ );
+ }
+}
+
+static FLAGS: &'static str = "aipsf";
+
+fn make_tests() -> Vec<TestDescAndFn> {
+ (0..(1 << FLAGS.len()))
+ .map(|bits| {
+ if bits == 0 {
+ None
+ } else {
+ let mut param = String::new();
+ param.push('-');
+ for (i, flag) in FLAGS.chars().enumerate() {
+ if (bits & (1 << i)) != 0 {
+ param.push(flag);
+ }
+ }
+ Some(param)
+ }
+ })
+ .map(|param| TestDescAndFn {
+ desc: TestDesc {
+ name: TestName::DynTestName(format!(
+ "addr2line {}",
+ param.as_ref().map_or("", String::as_str)
+ )),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: TestFn::DynTestFn(Box::new(move || {
+ run_test(param.as_ref().map(String::as_str))
+ })),
+ })
+ .collect()
+}
+
+fn main() {
+ if !cfg!(target_os = "linux") {
+ return;
+ }
+ let args: Vec<_> = env::args().collect();
+ test::test_main(&args, make_tests());
+}
diff --git a/vendor/addr2line-0.17.0/tests/parse.rs b/vendor/addr2line-0.17.0/tests/parse.rs
new file mode 100644
index 000000000..91d66e382
--- /dev/null
+++ b/vendor/addr2line-0.17.0/tests/parse.rs
@@ -0,0 +1,118 @@
+extern crate addr2line;
+extern crate memmap;
+extern crate object;
+
+use std::borrow::Cow;
+use std::env;
+use std::fs::File;
+use std::path::{self, PathBuf};
+
+use object::Object;
+
+fn release_fixture_path() -> PathBuf {
+ if let Ok(p) = env::var("ADDR2LINE_FIXTURE_PATH") {
+ return p.into();
+ }
+
+ let mut path = PathBuf::new();
+ if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") {
+ path.push(dir);
+ }
+ path.push("fixtures");
+ path.push("addr2line-release");
+ path
+}
+
+fn with_file<F: FnOnce(&object::File)>(target: &path::Path, f: F) {
+ let file = File::open(target).unwrap();
+ let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let file = object::File::parse(&*map).unwrap();
+ f(&file)
+}
+
+fn dwarf_load<'a>(object: &object::File<'a>) -> gimli::Dwarf<Cow<'a, [u8]>> {
+ let load_section = |id: gimli::SectionId| -> Result<Cow<'a, [u8]>, gimli::Error> {
+ use object::ObjectSection;
+
+ let data = object
+ .section_by_name(id.name())
+ .and_then(|section| section.data().ok())
+ .unwrap_or(&[][..]);
+ Ok(Cow::Borrowed(data))
+ };
+ gimli::Dwarf::load(&load_section).unwrap()
+}
+
+fn dwarf_borrow<'a>(
+ dwarf: &'a gimli::Dwarf<Cow<[u8]>>,
+) -> gimli::Dwarf<gimli::EndianSlice<'a, gimli::LittleEndian>> {
+ let borrow_section: &dyn for<'b> Fn(
+ &'b Cow<[u8]>,
+ ) -> gimli::EndianSlice<'b, gimli::LittleEndian> =
+ &|section| gimli::EndianSlice::new(&*section, gimli::LittleEndian);
+ dwarf.borrow(&borrow_section)
+}
+
+#[test]
+fn parse_base_rc() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ addr2line::ObjectContext::new(file).unwrap();
+ });
+}
+
+#[test]
+fn parse_base_slice() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ let dwarf = dwarf_load(file);
+ let dwarf = dwarf_borrow(&dwarf);
+ addr2line::Context::from_dwarf(dwarf).unwrap();
+ });
+}
+
+#[test]
+fn parse_lines_rc() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ let context = addr2line::ObjectContext::new(file).unwrap();
+ context.parse_lines().unwrap();
+ });
+}
+
+#[test]
+fn parse_lines_slice() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ let dwarf = dwarf_load(file);
+ let dwarf = dwarf_borrow(&dwarf);
+ let context = addr2line::Context::from_dwarf(dwarf).unwrap();
+ context.parse_lines().unwrap();
+ });
+}
+
+#[test]
+fn parse_functions_rc() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ let context = addr2line::ObjectContext::new(file).unwrap();
+ context.parse_functions().unwrap();
+ });
+}
+
+#[test]
+fn parse_functions_slice() {
+ let target = release_fixture_path();
+
+ with_file(&target, |file| {
+ let dwarf = dwarf_load(file);
+ let dwarf = dwarf_borrow(&dwarf);
+ let context = addr2line::Context::from_dwarf(dwarf).unwrap();
+ context.parse_functions().unwrap();
+ });
+}
diff --git a/vendor/addr2line/.cargo-checksum.json b/vendor/addr2line/.cargo-checksum.json
index b43ad3bbf..43f25b3ba 100644
--- a/vendor/addr2line/.cargo-checksum.json
+++ b/vendor/addr2line/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"d4ef249a0a4eff26a34a1f847a3c367dfd9988b4da972ac9c16b1d258b62ad87","Cargo.lock":"290a48d58d1ebfef0f5eaec66191f6c1a41080b89e10e931c6984052008479ab","Cargo.toml":"68243a813e2e6ba40d3e939b9ade5489b3f39a58d7dc391ae447a60591315f4a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"e99d88d232bf57d70f0fb87f6b496d44b6653f99f8a63d250a54c61ea4bcde40","README.md":"76d28502bd2e83f6a9e3576bd45e9a7fe5308448c4b5384b0d249515b5f67a5c","bench.plot.r":"6a5d7a4d36ed6b3d9919be703a479bef47698bf947818b483ff03951df2d4e01","benchmark.sh":"b35f89b1ca2c1dc0476cdd07f0284b72d41920d1c7b6054072f50ffba296d78d","coverage.sh":"4677e81922d08a82e83068a911717a247c66af12e559f37b78b6be3337ac9f07","examples/addr2line.rs":"75ef29e1d07d49d247990ad970892d64f629766bafa36afddff5a88976e58060","rustfmt.toml":"01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b","src/function.rs":"395f37cdf03201d416d66bc11abeea627be0abb4585104acd927224a26cb9369","src/lazy.rs":"14ec61761369c21d426673f549c21394221533f444b68cd2a8370952eb19f345","src/lib.rs":"5696c0aee67df576f78935c66bb124f4e5fa19cbc9b25faf8f750e7e8dda113c","tests/correctness.rs":"c9325ffdec577bf5e56f5dd72fdff4927153d0a4c34c0fda5aefaeb44a8d26fd","tests/output_equivalence.rs":"38d7b585b7a2ca43b07eef6b34c11f489d1deae138a010123c33188dfb881c11","tests/parse.rs":"9e421ea9d9348721f6c6533cdba1db5b84287fc685f870c7905dea06b596b4db"},"package":"b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"59733fc6186af0929ca4b6508e10ea1777f757c333a355d8154498332aee259f","Cargo.lock":"0b30594b7d3f093b44ca9c53366bbcb3f28a97ed24da65e56fe9961e7250c3c2","Cargo.toml":"c68ebf21efe63bb706716dd7b4bb7d33734629f13db36014d6be49fed6c8d731","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"e99d88d232bf57d70f0fb87f6b496d44b6653f99f8a63d250a54c61ea4bcde40","README.md":"76d28502bd2e83f6a9e3576bd45e9a7fe5308448c4b5384b0d249515b5f67a5c","bench.plot.r":"6a5d7a4d36ed6b3d9919be703a479bef47698bf947818b483ff03951df2d4e01","benchmark.sh":"b35f89b1ca2c1dc0476cdd07f0284b72d41920d1c7b6054072f50ffba296d78d","coverage.sh":"4677e81922d08a82e83068a911717a247c66af12e559f37b78b6be3337ac9f07","examples/addr2line.rs":"624548450eda1c8491fe4de60f0a96d20ef9c0d70770de2d76d803850319c876","rustfmt.toml":"01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b","src/function.rs":"caa2458f48509a0e0e2c4920719383997e9ccab44efceeabcf3019587f438a57","src/lazy.rs":"21bc1a7d8593c11e93577f1f17163705dbbadf752a007fae4401a4150ec9e473","src/lib.rs":"41d575590b9a683349552d7c4c845ef9a2739e7a4a9e01d53d4e0f4a4947fc45","tests/correctness.rs":"0bd7f9bc9d333cca0649d7bb85e07ebc14855ec2f2b9082f4ec752ccea77e1d6","tests/output_equivalence.rs":"9b637de957f4760ed8bdbfac9e1bacf57f0123c54ed0fbfeb8c2c3b7077f3d81","tests/parse.rs":"f0b2437d0c0b204f6527975b10015a62636a61e5b6e20661824c6ddbdfe3eefe"},"package":"a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"} \ No newline at end of file
diff --git a/vendor/addr2line/CHANGELOG.md b/vendor/addr2line/CHANGELOG.md
index 914139400..ed47aa90d 100644
--- a/vendor/addr2line/CHANGELOG.md
+++ b/vendor/addr2line/CHANGELOG.md
@@ -1,3 +1,30 @@
+## 0.19.0 (2022/11/24)
+
+### Breaking changes
+
+* Updated `gimli` and `object` dependencies.
+
+--------------------------------------------------------------------------------
+
+## 0.18.0 (2022/07/16)
+
+### Breaking changes
+
+* Updated `object` dependency.
+
+### Changed
+
+* Fixed handling of relative path for `DW_AT_comp_dir`.
+ [#239](https://github.com/gimli-rs/addr2line/pull/239)
+
+* Fixed handling of `DW_FORM_addrx` for DWARF 5 support.
+ [#243](https://github.com/gimli-rs/addr2line/pull/243)
+
+* Fixed handling of units that are missing range information.
+ [#249](https://github.com/gimli-rs/addr2line/pull/249)
+
+--------------------------------------------------------------------------------
+
## 0.17.0 (2021/10/24)
### Breaking changes
diff --git a/vendor/addr2line/Cargo.lock b/vendor/addr2line/Cargo.lock
index 630d72438..330f3f544 100644
--- a/vendor/addr2line/Cargo.lock
+++ b/vendor/addr2line/Cargo.lock
@@ -4,16 +4,16 @@ version = 3
[[package]]
name = "addr2line"
-version = "0.16.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e61f2b7f93d2c7d2b08263acaa4a363b3e276806c68af6134c44f523bf1aacd"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
dependencies = [
- "gimli 0.25.0",
+ "gimli 0.26.2",
]
[[package]]
name = "addr2line"
-version = "0.17.0"
+version = "0.19.0"
dependencies = [
"backtrace",
"clap",
@@ -21,9 +21,9 @@ dependencies = [
"cpp_demangle",
"fallible-iterator",
"findshlibs",
- "gimli 0.26.0",
- "memmap",
- "object",
+ "gimli 0.27.0",
+ "memmap2",
+ "object 0.30.0",
"rustc-demangle",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
@@ -39,15 +39,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
-name = "ansi_term"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
-dependencies = [
- "winapi 0.3.9",
-]
-
-[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -60,22 +51,22 @@ dependencies = [
[[package]]
name = "autocfg"
-version = "1.0.1"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
-version = "0.3.62"
+version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "091bcdf2da9950f96aa522681ce805e6857f6ca8df73833d35736ab2dc78e152"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
- "addr2line 0.16.0",
+ "addr2line 0.17.0",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
- "object",
+ "object 0.29.0",
"rustc-demangle",
]
@@ -87,9 +78,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cc"
-version = "1.0.71"
+version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
[[package]]
name = "cfg-if"
@@ -99,39 +90,48 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
-version = "2.33.3"
+version = "3.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
+checksum = "ab8b79fe3946ceb4a0b1c080b4018992b8d27e9ff363644c1c9b6387c854614d"
dependencies = [
- "ansi_term",
"atty",
"bitflags",
+ "clap_lex",
+ "indexmap",
"strsim",
+ "termcolor",
"textwrap",
- "unicode-width",
- "vec_map",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
+dependencies = [
+ "os_str_bytes",
]
[[package]]
name = "compiler_builtins"
-version = "0.1.51"
+version = "0.1.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3587b3669d6f2c1cfd34c475272dabcfef29d52703933f6f72ebb36d6bd81a97"
+checksum = "c6e3183e88f659a862835db8f4b67dbeed3d93e44dd4927eef78edb1c149d784"
[[package]]
name = "cpp_demangle"
-version = "0.3.3"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea47428dc9d2237f3c6bc134472edfd63ebba0af932e783506dcfd66f10d18a"
+checksum = "b446fd40bcc17eddd6a4a78f24315eb90afdb3334999ddfd4909985c47722442"
dependencies = [
"cfg-if",
]
[[package]]
name = "crc32fast"
-version = "1.2.1"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
]
@@ -144,9 +144,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "findshlibs"
-version = "0.10.1"
+version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d691fdb3f817632d259d09220d4cf0991dbb2c9e59e044a02a59194bf6e14484"
+checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64"
dependencies = [
"cc",
"lazy_static",
@@ -156,13 +156,11 @@ dependencies = [
[[package]]
name = "flate2"
-version = "1.0.22"
+version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
dependencies = [
- "cfg-if",
"crc32fast",
- "libc",
"miniz_oxide",
]
@@ -177,15 +175,15 @@ dependencies = [
[[package]]
name = "gimli"
-version = "0.25.0"
+version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7"
+checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
[[package]]
name = "gimli"
-version = "0.26.0"
+version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81a03ce013ffccead76c11a15751231f777d9295b845cc1266ed4d34fcbd7977"
+checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
dependencies = [
"compiler_builtins",
"fallible-iterator",
@@ -195,6 +193,12 @@ dependencies = [
]
[[package]]
+name = "hashbrown"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022"
+
+[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -204,6 +208,16 @@ dependencies = [
]
[[package]]
+name = "indexmap"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -221,47 +235,60 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.105"
+version = "0.2.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013"
+checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
[[package]]
name = "memchr"
-version = "2.4.1"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
-name = "memmap"
-version = "0.7.0"
+name = "memmap2"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
+checksum = "3a79b39c93a7a5a27eeaf9a23b5ff43f1b9e0ad6b1cdd441140ae53c35613fc7"
dependencies = [
"libc",
- "winapi 0.3.9",
]
[[package]]
name = "miniz_oxide"
-version = "0.4.4"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
+checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
dependencies = [
"adler",
- "autocfg",
]
[[package]]
name = "object"
-version = "0.27.1"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "object"
+version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9"
+checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
dependencies = [
"flate2",
"memchr",
]
[[package]]
+name = "os_str_bytes"
+version = "6.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "648001efe5d5c0102d8cea768e348da85d90af8ba91f0bea908f157951493cd4"
+
+[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -325,9 +352,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "smallvec"
-version = "1.7.0"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
+checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "stable_deref_trait"
@@ -337,9 +364,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
-version = "0.8.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "term"
@@ -352,15 +379,21 @@ dependencies = [
]
[[package]]
-name = "textwrap"
-version = "0.11.0"
+name = "termcolor"
+version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
dependencies = [
- "unicode-width",
+ "winapi-util",
]
[[package]]
+name = "textwrap"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
+
+[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -384,12 +417,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
[[package]]
-name = "vec_map"
-version = "0.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
-
-[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -424,6 +451,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/vendor/addr2line/Cargo.toml b/vendor/addr2line/Cargo.toml
index 358995e53..2b51239b8 100644
--- a/vendor/addr2line/Cargo.toml
+++ b/vendor/addr2line/Cargo.toml
@@ -3,38 +3,40 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
name = "addr2line"
-version = "0.17.0"
-exclude = ["/benches/*", "/fixtures/*", ".github"]
+version = "0.19.0"
+exclude = [
+ "/benches/*",
+ "/fixtures/*",
+ ".github",
+]
description = "A cross-platform symbolication library written in Rust, using `gimli`"
documentation = "https://docs.rs/addr2line"
readme = "./README.md"
-keywords = ["DWARF", "debug", "elf", "symbolicate", "atos"]
+keywords = [
+ "DWARF",
+ "debug",
+ "elf",
+ "symbolicate",
+ "atos",
+]
categories = ["development-tools::debugging"]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/gimli-rs/addr2line"
+
[profile.bench]
codegen-units = 1
debug = true
-split-debuginfo = "packed"
-
-[profile.dev]
-split-debuginfo = "packed"
[profile.release]
debug = true
-split-debuginfo = "packed"
-
-[profile.test]
-split-debuginfo = "packed"
[[example]]
name = "addr2line"
@@ -52,6 +54,7 @@ required-features = ["default"]
[[test]]
name = "parse"
required-features = ["std-object"]
+
[dependencies.alloc]
version = "1.0.0"
optional = true
@@ -67,7 +70,8 @@ optional = true
package = "rustc-std-workspace-core"
[dependencies.cpp_demangle]
-version = "0.3"
+version = "0.4"
+features = ["alloc"]
optional = true
default-features = false
@@ -77,12 +81,12 @@ optional = true
default-features = false
[dependencies.gimli]
-version = "0.26"
+version = "0.27.0"
features = ["read"]
default-features = false
[dependencies.object]
-version = "0.27.1"
+version = "0.30.0"
features = ["read"]
optional = true
default-features = false
@@ -95,17 +99,18 @@ optional = true
version = "1"
optional = true
default-features = false
+
[dev-dependencies.backtrace]
version = "0.3.13"
[dev-dependencies.clap]
-version = "2"
+version = "3.1.6"
[dev-dependencies.findshlibs]
version = "0.10"
-[dev-dependencies.memmap]
-version = "0.7"
+[dev-dependencies.memmap2]
+version = "0.5.5"
[dev-dependencies.rustc-test]
version = "0.3"
@@ -114,7 +119,24 @@ version = "0.3"
version = "2"
[features]
-default = ["rustc-demangle", "cpp_demangle", "std-object", "fallible-iterator", "smallvec"]
-rustc-dep-of-std = ["core", "alloc", "compiler_builtins", "gimli/rustc-dep-of-std"]
+default = [
+ "rustc-demangle",
+ "cpp_demangle",
+ "std-object",
+ "fallible-iterator",
+ "smallvec",
+]
+rustc-dep-of-std = [
+ "core",
+ "alloc",
+ "compiler_builtins",
+ "gimli/rustc-dep-of-std",
+]
std = ["gimli/std"]
-std-object = ["std", "object", "object/std", "object/compression", "gimli/endian-reader"]
+std-object = [
+ "std",
+ "object",
+ "object/std",
+ "object/compression",
+ "gimli/endian-reader",
+]
diff --git a/vendor/addr2line/examples/addr2line.rs b/vendor/addr2line/examples/addr2line.rs
index 4b228a706..fa4d8e457 100644
--- a/vendor/addr2line/examples/addr2line.rs
+++ b/vendor/addr2line/examples/addr2line.rs
@@ -2,7 +2,7 @@ extern crate addr2line;
extern crate clap;
extern crate fallible_iterator;
extern crate gimli;
-extern crate memmap;
+extern crate memmap2;
extern crate object;
extern crate typed_arena;
@@ -11,18 +11,18 @@ use std::fs::File;
use std::io::{BufRead, Lines, StdinLock, Write};
use std::path::Path;
-use clap::{App, Arg, Values};
+use clap::{Arg, Command, Values};
use fallible_iterator::FallibleIterator;
-use object::{Object, ObjectSection};
+use object::{Object, ObjectSection, SymbolMap, SymbolMapName};
use typed_arena::Arena;
use addr2line::{Context, Location};
-fn parse_uint_from_hex_string(string: &str) -> u64 {
+fn parse_uint_from_hex_string(string: &str) -> Option<u64> {
if string.len() > 2 && string.starts_with("0x") {
- u64::from_str_radix(&string[2..], 16).expect("Failed to parse address")
+ u64::from_str_radix(&string[2..], 16).ok()
} else {
- u64::from_str_radix(string, 16).expect("Failed to parse address")
+ u64::from_str_radix(string, 16).ok()
}
}
@@ -32,9 +32,9 @@ enum Addrs<'a> {
}
impl<'a> Iterator for Addrs<'a> {
- type Item = u64;
+ type Item = Option<u64>;
- fn next(&mut self) -> Option<u64> {
+ fn next(&mut self) -> Option<Option<u64>> {
let text = match *self {
Addrs::Args(ref mut vals) => vals.next().map(Cow::from),
Addrs::Stdin(ref mut lines) => lines.next().map(Result::unwrap).map(Cow::from),
@@ -45,15 +45,18 @@ impl<'a> Iterator for Addrs<'a> {
}
}
-fn print_loc(loc: &Option<Location>, basenames: bool, llvm: bool) {
- if let Some(ref loc) = *loc {
- let file = loc.file.as_ref().unwrap();
- let path = if basenames {
- Path::new(Path::new(file).file_name().unwrap())
+fn print_loc(loc: Option<&Location>, basenames: bool, llvm: bool) {
+ if let Some(ref loc) = loc {
+ if let Some(ref file) = loc.file.as_ref() {
+ let path = if basenames {
+ Path::new(Path::new(file).file_name().unwrap())
+ } else {
+ Path::new(file)
+ };
+ print!("{}:", path.display());
} else {
- Path::new(file)
- };
- print!("{}:", path.display());
+ print!("??:");
+ }
if llvm {
print!("{}:{}", loc.line.unwrap_or(0), loc.column.unwrap_or(0));
} else if let Some(line) = loc.line {
@@ -65,15 +68,19 @@ fn print_loc(loc: &Option<Location>, basenames: bool, llvm: bool) {
} else if llvm {
println!("??:0:0");
} else {
- println!("??:?");
+ println!("??:0");
}
}
-fn print_function(name: &str, language: Option<gimli::DwLang>, demangle: bool) {
- if demangle {
- print!("{}", addr2line::demangle_auto(Cow::from(name), language));
+fn print_function(name: Option<&str>, language: Option<gimli::DwLang>, demangle: bool) {
+ if let Some(name) = name {
+ if demangle {
+ print!("{}", addr2line::demangle_auto(Cow::from(name), language));
+ } else {
+ print!("{}", name);
+ }
} else {
- print!("{}", name);
+ print!("??");
}
}
@@ -94,77 +101,61 @@ fn load_file_section<'input, 'arena, Endian: gimli::Endianity>(
}
}
+fn find_name_from_symbols<'a>(
+ symbols: &'a SymbolMap<SymbolMapName>,
+ probe: u64,
+) -> Option<&'a str> {
+ symbols.get(probe).map(|x| x.name())
+}
+
fn main() {
- let matches = App::new("hardliner")
+ let matches = Command::new("addr2line")
.version("0.1")
- .about("A fast addr2line clone")
- .arg(
- Arg::with_name("exe")
- .short("e")
+ .about("A fast addr2line Rust port")
+ .args(&[
+ Arg::new("exe")
+ .short('e')
.long("exe")
.value_name("filename")
.help(
"Specify the name of the executable for which addresses should be translated.",
)
.required(true),
- )
- .arg(
- Arg::with_name("sup")
+ Arg::new("sup")
.long("sup")
.value_name("filename")
.help("Path to supplementary object file."),
- )
- .arg(
- Arg::with_name("functions")
- .short("f")
+ Arg::new("functions")
+ .short('f')
.long("functions")
.help("Display function names as well as file and line number information."),
- )
- .arg(
- Arg::with_name("pretty")
- .short("p")
- .long("pretty-print")
- .help(
- "Make the output more human friendly: each location are printed on \
- one line.",
- ),
- )
- .arg(Arg::with_name("inlines").short("i").long("inlines").help(
- "If the address belongs to a function that was inlined, the source \
- information for all enclosing scopes back to the first non-inlined \
- function will also be printed.",
- ))
- .arg(
- Arg::with_name("addresses")
- .short("a")
- .long("addresses")
- .help(
- "Display the address before the function name, file and line \
- number information.",
- ),
- )
- .arg(
- Arg::with_name("basenames")
- .short("s")
+ Arg::new("pretty").short('p').long("pretty-print").help(
+ "Make the output more human friendly: each location are printed on one line.",
+ ),
+ Arg::new("inlines").short('i').long("inlines").help(
+ "If the address belongs to a function that was inlined, the source information for \
+ all enclosing scopes back to the first non-inlined function will also be printed.",
+ ),
+ Arg::new("addresses").short('a').long("addresses").help(
+ "Display the address before the function name, file and line number information.",
+ ),
+ Arg::new("basenames")
+ .short('s')
.long("basenames")
.help("Display only the base of each file name."),
- )
- .arg(Arg::with_name("demangle").short("C").long("demangle").help(
- "Demangle function names. \
- Specifying a specific demangling style (like GNU addr2line) \
- is not supported. (TODO)",
- ))
- .arg(
- Arg::with_name("llvm")
+ Arg::new("demangle").short('C').long("demangle").help(
+ "Demangle function names. \
+ Specifying a specific demangling style (like GNU addr2line) is not supported. \
+ (TODO)"
+ ),
+ Arg::new("llvm")
.long("llvm")
.help("Display output in the same format as llvm-symbolizer."),
- )
- .arg(
- Arg::with_name("addrs")
+ Arg::new("addrs")
.takes_value(true)
- .multiple(true)
+ .multiple_occurrences(true)
.help("Addresses to use instead of reading from stdin."),
- )
+ ])
.get_matches();
let arena_data = Arena::new();
@@ -179,7 +170,7 @@ fn main() {
let path = matches.value_of("exe").unwrap();
let file = File::open(path).unwrap();
- let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let object = &object::File::parse(&*map).unwrap();
let endian = if object.is_little_endian() {
@@ -195,7 +186,7 @@ fn main() {
let sup_map;
let sup_object = if let Some(sup_path) = matches.value_of("sup") {
let sup_file = File::open(sup_path).unwrap();
- sup_map = unsafe { memmap::Mmap::map(&sup_file).unwrap() };
+ sup_map = unsafe { memmap2::Mmap::map(&sup_file).unwrap() };
Some(object::File::parse(&*sup_map).unwrap())
} else {
None
@@ -220,10 +211,11 @@ fn main() {
for probe in addrs {
if print_addrs {
+ let addr = probe.unwrap_or(0);
if llvm {
- print!("0x{:x}", probe);
+ print!("0x{:x}", addr);
} else {
- print!("0x{:016x}", probe);
+ print!("0x{:016x}", addr);
}
if pretty {
print!(": ");
@@ -234,44 +226,46 @@ fn main() {
if do_functions || do_inlines {
let mut printed_anything = false;
- let mut frames = ctx.find_frames(probe).unwrap().enumerate();
- while let Some((i, frame)) = frames.next().unwrap() {
- if pretty && i != 0 {
- print!(" (inlined by) ");
- }
-
- if do_functions {
- if let Some(func) = frame.function {
- print_function(&func.raw_name().unwrap(), func.language, demangle);
- } else if let Some(name) = symbols.get(probe).map(|x| x.name()) {
- print_function(name, None, demangle);
- } else {
- print!("??");
+ if let Some(probe) = probe {
+ let mut frames = ctx.find_frames(probe).unwrap().enumerate();
+ while let Some((i, frame)) = frames.next().unwrap() {
+ if pretty && i != 0 {
+ print!(" (inlined by) ");
}
- if pretty {
- print!(" at ");
- } else {
- println!();
+ if do_functions {
+ if let Some(func) = frame.function {
+ print_function(
+ func.raw_name().ok().as_ref().map(AsRef::as_ref),
+ func.language,
+ demangle,
+ );
+ } else {
+ let name = find_name_from_symbols(&symbols, probe);
+ print_function(name, None, demangle);
+ }
+
+ if pretty {
+ print!(" at ");
+ } else {
+ println!();
+ }
}
- }
- print_loc(&frame.location, basenames, llvm);
+ print_loc(frame.location.as_ref(), basenames, llvm);
- printed_anything = true;
+ printed_anything = true;
- if !do_inlines {
- break;
+ if !do_inlines {
+ break;
+ }
}
}
if !printed_anything {
if do_functions {
- if let Some(name) = symbols.get(probe).map(|x| x.name()) {
- print_function(name, None, demangle);
- } else {
- print!("??");
- }
+ let name = probe.and_then(|probe| find_name_from_symbols(&symbols, probe));
+ print_function(name, None, demangle);
if pretty {
print!(" at ");
@@ -280,15 +274,11 @@ fn main() {
}
}
- if llvm {
- println!("??:0:0");
- } else {
- println!("??:?");
- }
+ print_loc(None, basenames, llvm);
}
} else {
- let loc = ctx.find_location(probe).unwrap();
- print_loc(&loc, basenames, llvm);
+ let loc = probe.and_then(|probe| ctx.find_location(probe).unwrap());
+ print_loc(loc.as_ref(), basenames, llvm);
}
if llvm {
diff --git a/vendor/addr2line/src/function.rs b/vendor/addr2line/src/function.rs
index 1589acdbe..44dc73f24 100644
--- a/vendor/addr2line/src/function.rs
+++ b/vendor/addr2line/src/function.rs
@@ -68,15 +68,24 @@ impl<R: gimli::Reader> Functions<R> {
match entries.read_attribute(*spec) {
Ok(ref attr) => {
match attr.name() {
- gimli::DW_AT_low_pc => {
- if let gimli::AttributeValue::Addr(val) = attr.value() {
- ranges.low_pc = Some(val);
+ gimli::DW_AT_low_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => {
+ ranges.low_pc = Some(val)
}
- }
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.low_pc =
+ Some(dwarf.sections.address(unit, index)?);
+ }
+ _ => {}
+ },
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => {
ranges.high_pc = Some(val)
}
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.high_pc =
+ Some(dwarf.sections.address(unit, index)?);
+ }
gimli::AttributeValue::Udata(val) => {
ranges.size = Some(val)
}
@@ -352,13 +361,18 @@ impl<R: gimli::Reader> InlinedFunction<R> {
for spec in abbrev.attributes() {
match entries.read_attribute(*spec) {
Ok(ref attr) => match attr.name() {
- gimli::DW_AT_low_pc => {
- if let gimli::AttributeValue::Addr(val) = attr.value() {
- ranges.low_pc = Some(val);
+ gimli::DW_AT_low_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => ranges.low_pc = Some(val),
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.low_pc = Some(dwarf.sections.address(unit, index)?);
}
- }
+ _ => {}
+ },
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => ranges.high_pc = Some(val),
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.high_pc = Some(dwarf.sections.address(unit, index)?);
+ }
gimli::AttributeValue::Udata(val) => ranges.size = Some(val),
_ => {}
},
diff --git a/vendor/addr2line/src/lazy.rs b/vendor/addr2line/src/lazy.rs
index 280c76b46..a34ed176a 100644
--- a/vendor/addr2line/src/lazy.rs
+++ b/vendor/addr2line/src/lazy.rs
@@ -11,19 +11,17 @@ impl<T> LazyCell<T> {
}
pub fn borrow_with(&self, closure: impl FnOnce() -> T) -> &T {
- unsafe {
- // First check if we're already initialized...
- let ptr = self.contents.get();
- if let Some(val) = &*ptr {
- return val;
- }
- // Note that while we're executing `closure` our `borrow_with` may
- // be called recursively. This means we need to check again after
- // the closure has executed. For that we use the `get_or_insert`
- // method which will only perform mutation if we aren't already
- // `Some`.
- let val = closure();
- (*ptr).get_or_insert(val)
+ // First check if we're already initialized...
+ let ptr = self.contents.get();
+ if let Some(val) = unsafe { &*ptr } {
+ return val;
}
+ // Note that while we're executing `closure` our `borrow_with` may
+ // be called recursively. This means we need to check again after
+ // the closure has executed. For that we use the `get_or_insert`
+ // method which will only perform mutation if we aren't already
+ // `Some`.
+ let val = closure();
+ unsafe { (*ptr).get_or_insert(val) }
}
}
diff --git a/vendor/addr2line/src/lib.rs b/vendor/addr2line/src/lib.rs
index b46a98393..3afa37f8f 100644
--- a/vendor/addr2line/src/lib.rs
+++ b/vendor/addr2line/src/lib.rs
@@ -181,6 +181,7 @@ impl<R: gimli::Reader> Context<R> {
ranges: gimli::RangeLists::new(debug_ranges, debug_rnglists),
file_type: gimli::DwarfFileType::Main,
sup: None,
+ abbreviations_cache: gimli::AbbreviationsCache::new(),
})
}
@@ -404,6 +405,7 @@ impl<R: gimli::Reader> ResDwarf<R> {
};
let mut lang = None;
+ let mut have_unit_range = false;
{
let mut entries = dw_unit.entries_raw(None)?;
@@ -416,13 +418,18 @@ impl<R: gimli::Reader> ResDwarf<R> {
for spec in abbrev.attributes() {
let attr = entries.read_attribute(*spec)?;
match attr.name() {
- gimli::DW_AT_low_pc => {
- if let gimli::AttributeValue::Addr(val) = attr.value() {
- ranges.low_pc = Some(val);
+ gimli::DW_AT_low_pc => match attr.value() {
+ gimli::AttributeValue::Addr(val) => ranges.low_pc = Some(val),
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.low_pc = Some(sections.address(&dw_unit, index)?);
}
- }
+ _ => {}
+ },
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => ranges.high_pc = Some(val),
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ ranges.high_pc = Some(sections.address(&dw_unit, index)?);
+ }
gimli::AttributeValue::Udata(val) => ranges.size = Some(val),
_ => {}
},
@@ -472,11 +479,12 @@ impl<R: gimli::Reader> ResDwarf<R> {
unit_id,
max_end: 0,
});
+ have_unit_range = true;
}
}
}
} else {
- ranges.for_each_range(&sections, &dw_unit, |range| {
+ have_unit_range |= ranges.for_each_range(&sections, &dw_unit, |range| {
unit_ranges.push(UnitRange {
range,
unit_id,
@@ -486,11 +494,34 @@ impl<R: gimli::Reader> ResDwarf<R> {
}
}
+ let lines = LazyCell::new();
+ if !have_unit_range {
+ // The unit did not declare any ranges.
+ // Try to get some ranges from the line program sequences.
+ if let Some(ref ilnp) = dw_unit.line_program {
+ if let Ok(lines) = lines
+ .borrow_with(|| Lines::parse(&dw_unit, ilnp.clone(), &*sections))
+ .as_ref()
+ {
+ for sequence in lines.sequences.iter() {
+ unit_ranges.push(UnitRange {
+ range: gimli::Range {
+ begin: sequence.start,
+ end: sequence.end,
+ },
+ unit_id,
+ max_end: 0,
+ })
+ }
+ }
+ }
+ }
+
res_units.push(ResUnit {
offset,
dw_unit,
lang,
- lines: LazyCell::new(),
+ lines,
funcs: LazyCell::new(),
});
}
@@ -531,6 +562,111 @@ struct Lines {
sequences: Box<[LineSequence]>,
}
+impl Lines {
+ fn parse<R: gimli::Reader>(
+ dw_unit: &gimli::Unit<R>,
+ ilnp: gimli::IncompleteLineProgram<R, R::Offset>,
+ sections: &gimli::Dwarf<R>,
+ ) -> Result<Self, Error> {
+ let mut sequences = Vec::new();
+ let mut sequence_rows = Vec::<LineRow>::new();
+ let mut rows = ilnp.rows();
+ while let Some((_, row)) = rows.next_row()? {
+ if row.end_sequence() {
+ if let Some(start) = sequence_rows.first().map(|x| x.address) {
+ let end = row.address();
+ let mut rows = Vec::new();
+ mem::swap(&mut rows, &mut sequence_rows);
+ sequences.push(LineSequence {
+ start,
+ end,
+ rows: rows.into_boxed_slice(),
+ });
+ }
+ continue;
+ }
+
+ let address = row.address();
+ let file_index = row.file_index();
+ let line = row.line().map(NonZeroU64::get).unwrap_or(0) as u32;
+ let column = match row.column() {
+ gimli::ColumnType::LeftEdge => 0,
+ gimli::ColumnType::Column(x) => x.get() as u32,
+ };
+
+ if let Some(last_row) = sequence_rows.last_mut() {
+ if last_row.address == address {
+ last_row.file_index = file_index;
+ last_row.line = line;
+ last_row.column = column;
+ continue;
+ }
+ }
+
+ sequence_rows.push(LineRow {
+ address,
+ file_index,
+ line,
+ column,
+ });
+ }
+ sequences.sort_by_key(|x| x.start);
+
+ let mut files = Vec::new();
+ let header = rows.header();
+ match header.file(0) {
+ Some(file) => files.push(render_file(dw_unit, file, header, sections)?),
+ None => files.push(String::from("")), // DWARF version <= 4 may not have 0th index
+ }
+ let mut index = 1;
+ while let Some(file) = header.file(index) {
+ files.push(render_file(dw_unit, file, header, sections)?);
+ index += 1;
+ }
+
+ Ok(Self {
+ files: files.into_boxed_slice(),
+ sequences: sequences.into_boxed_slice(),
+ })
+ }
+}
+
+fn render_file<R: gimli::Reader>(
+ dw_unit: &gimli::Unit<R>,
+ file: &gimli::FileEntry<R, R::Offset>,
+ header: &gimli::LineProgramHeader<R, R::Offset>,
+ sections: &gimli::Dwarf<R>,
+) -> Result<String, gimli::Error> {
+ let mut path = if let Some(ref comp_dir) = dw_unit.comp_dir {
+ comp_dir.to_string_lossy()?.into_owned()
+ } else {
+ String::new()
+ };
+
+ // The directory index 0 is defined to correspond to the compilation unit directory.
+ if file.directory_index() != 0 {
+ if let Some(directory) = file.directory(header) {
+ path_push(
+ &mut path,
+ sections
+ .attr_string(dw_unit, directory)?
+ .to_string_lossy()?
+ .as_ref(),
+ );
+ }
+ }
+
+ path_push(
+ &mut path,
+ sections
+ .attr_string(dw_unit, file.path_name())?
+ .to_string_lossy()?
+ .as_ref(),
+ );
+
+ Ok(path)
+}
+
struct LineSequence {
start: u64,
end: u64,
@@ -559,68 +695,7 @@ impl<R: gimli::Reader> ResUnit<R> {
None => return Ok(None),
};
self.lines
- .borrow_with(|| {
- let mut sequences = Vec::new();
- let mut sequence_rows = Vec::<LineRow>::new();
- let mut rows = ilnp.clone().rows();
- while let Some((_, row)) = rows.next_row()? {
- if row.end_sequence() {
- if let Some(start) = sequence_rows.first().map(|x| x.address) {
- let end = row.address();
- let mut rows = Vec::new();
- mem::swap(&mut rows, &mut sequence_rows);
- sequences.push(LineSequence {
- start,
- end,
- rows: rows.into_boxed_slice(),
- });
- }
- continue;
- }
-
- let address = row.address();
- let file_index = row.file_index();
- let line = row.line().map(NonZeroU64::get).unwrap_or(0) as u32;
- let column = match row.column() {
- gimli::ColumnType::LeftEdge => 0,
- gimli::ColumnType::Column(x) => x.get() as u32,
- };
-
- if let Some(last_row) = sequence_rows.last_mut() {
- if last_row.address == address {
- last_row.file_index = file_index;
- last_row.line = line;
- last_row.column = column;
- continue;
- }
- }
-
- sequence_rows.push(LineRow {
- address,
- file_index,
- line,
- column,
- });
- }
- sequences.sort_by_key(|x| x.start);
-
- let mut files = Vec::new();
- let header = ilnp.header();
- match header.file(0) {
- Some(file) => files.push(self.render_file(file, header, sections)?),
- None => files.push(String::from("")), // DWARF version <= 4 may not have 0th index
- }
- let mut index = 1;
- while let Some(file) = header.file(index) {
- files.push(self.render_file(file, header, sections)?);
- index += 1;
- }
-
- Ok(Lines {
- files: files.into_boxed_slice(),
- sequences: sequences.into_boxed_slice(),
- })
- })
+ .borrow_with(|| Lines::parse(&self.dw_unit, ilnp.clone(), sections))
.as_ref()
.map(Some)
.map_err(Error::clone)
@@ -688,39 +763,6 @@ impl<R: gimli::Reader> ResUnit<R> {
let location = self.find_location(probe, &dwarf.sections)?;
Ok((function, location))
}
-
- fn render_file(
- &self,
- file: &gimli::FileEntry<R, R::Offset>,
- header: &gimli::LineProgramHeader<R, R::Offset>,
- sections: &gimli::Dwarf<R>,
- ) -> Result<String, gimli::Error> {
- let mut path = if let Some(ref comp_dir) = self.dw_unit.comp_dir {
- comp_dir.to_string_lossy()?.into_owned()
- } else {
- String::new()
- };
-
- if let Some(directory) = file.directory(header) {
- path_push(
- &mut path,
- sections
- .attr_string(&self.dw_unit, directory)?
- .to_string_lossy()?
- .as_ref(),
- );
- }
-
- path_push(
- &mut path,
- sections
- .attr_string(&self.dw_unit, file.path_name())?
- .to_string_lossy()?
- .as_ref(),
- );
-
- Ok(path)
- }
}
/// Iterator over `Location`s in a range of addresses, returned by `Context::find_location_range`.
@@ -928,7 +970,7 @@ fn path_push(path: &mut String, p: &str) {
'/'
};
- if !path.ends_with(dir_separator) {
+ if !path.is_empty() && !path.ends_with(dir_separator) {
path.push(dir_separator);
}
*path += p;
diff --git a/vendor/addr2line/tests/correctness.rs b/vendor/addr2line/tests/correctness.rs
index 3f7b43373..955e2b831 100644
--- a/vendor/addr2line/tests/correctness.rs
+++ b/vendor/addr2line/tests/correctness.rs
@@ -2,7 +2,7 @@ extern crate addr2line;
extern crate fallible_iterator;
extern crate findshlibs;
extern crate gimli;
-extern crate memmap;
+extern crate memmap2;
extern crate object;
use addr2line::Context;
@@ -11,10 +11,10 @@ use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
use object::Object;
use std::fs::File;
-fn find_debuginfo() -> memmap::Mmap {
+fn find_debuginfo() -> memmap2::Mmap {
let path = std::env::current_exe().unwrap();
let file = File::open(&path).unwrap();
- let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = &object::File::parse(&*map).unwrap();
if let Ok(uuid) = file.mach_uuid() {
for candidate in path.parent().unwrap().read_dir().unwrap() {
@@ -25,7 +25,7 @@ fn find_debuginfo() -> memmap::Mmap {
for candidate in path.join("Contents/Resources/DWARF").read_dir().unwrap() {
let path = candidate.unwrap().path();
let file = File::open(&path).unwrap();
- let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = &object::File::parse(&*map).unwrap();
if file.mach_uuid().unwrap() == uuid {
return map;
@@ -41,11 +41,12 @@ fn find_debuginfo() -> memmap::Mmap {
fn correctness() {
let map = find_debuginfo();
let file = &object::File::parse(&*map).unwrap();
+ let module_base = file.relative_address_base();
let ctx = Context::new(file).unwrap();
let mut bias = None;
TargetSharedLibrary::each(|lib| {
- bias = Some(lib.virtual_memory_bias().0 as u64);
+ bias = Some((lib.virtual_memory_bias().0 as u64).wrapping_sub(module_base));
IterationControl::Break
});
diff --git a/vendor/addr2line/tests/output_equivalence.rs b/vendor/addr2line/tests/output_equivalence.rs
index 9dc366672..c0e1f8335 100644
--- a/vendor/addr2line/tests/output_equivalence.rs
+++ b/vendor/addr2line/tests/output_equivalence.rs
@@ -11,6 +11,7 @@ use backtrace::Backtrace;
use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
use test::{ShouldPanic, TestDesc, TestDescAndFn, TestFn, TestName};
+#[inline(never)]
fn make_trace() -> Vec<String> {
fn foo() -> Backtrace {
bar()
diff --git a/vendor/addr2line/tests/parse.rs b/vendor/addr2line/tests/parse.rs
index 91d66e382..60b2300b5 100644
--- a/vendor/addr2line/tests/parse.rs
+++ b/vendor/addr2line/tests/parse.rs
@@ -1,5 +1,5 @@
extern crate addr2line;
-extern crate memmap;
+extern crate memmap2;
extern crate object;
use std::borrow::Cow;
@@ -25,7 +25,7 @@ fn release_fixture_path() -> PathBuf {
fn with_file<F: FnOnce(&object::File)>(target: &path::Path, f: F) {
let file = File::open(target).unwrap();
- let map = unsafe { memmap::Mmap::map(&file).unwrap() };
+ let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = object::File::parse(&*map).unwrap();
f(&file)
}
diff --git a/vendor/ansi_term/.cargo-checksum.json b/vendor/ansi_term/.cargo-checksum.json
deleted file mode 100644
index ee41459db..000000000
--- a/vendor/ansi_term/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.lock":"31bb7b361278d99a00595cbd916c444e6fd193b5f0b1ea0cf2d9454440739501","Cargo.toml":"4ca681d6949661455ac88541ffa68ebc7db50cb2b6e9a2134e6d0687da4997c3","LICENCE":"2762990c7fbba9d550802a2593c1d857dcd52596bb0f9f192a97e9a7ac5f4f9e","README.md":"8d983e1bb3cc99724010d9073a5be6452cd49bd57a877525fd0a5dd41e6591d5","examples/256_colours.rs":"5f2845068bc2d93cff4a61f18ffa44fbbbc91be771dfd686d537d343f37041da","examples/basic_colours.rs":"d610795f3743d10d90ec4e5ab32cc09fb16640896cecd2f93fca434a0920397c","examples/rgb_colours.rs":"8399e5131e959a56c932036b790e601fb4ad658856112daf87f933889b443f2c","src/ansi.rs":"988fb87936064fa006fcc9474ac62099c8d6e98d38bb80cec2cd864066482a08","src/debug.rs":"61343f8bf13695020102c033aeaacd9ccd3ec830eacbf9011127e61829451d20","src/difference.rs":"9b4b8f91c72932bfda262abdceff0ec124a5a8dd27d07bd4d2e5e7889135c6c9","src/display.rs":"c04f2397d1d1d86a5e2188c2840c505cb0baeaf9706a88d4bbe56eadc67811b9","src/lib.rs":"b85df4b9b8832cda777db049efa2ec84b9847438fa3feaf8540e597ce2532a47","src/style.rs":"1042fc973f5ea8bbb2a2faec334aad530520b53edc9b3296174ae38c1060490b","src/util.rs":"07c127f732887573a1c9126fc0288e13e7a8f1f803513b95e50aac2905171b0d","src/windows.rs":"7ce7dd6738b9728fcd3908c284b6f29a9bdfb34af761b4c7385cf7e3e1b20e64","src/write.rs":"c9ec03764ad1ecea8b680243c9cafc5e70919fcea7500cc18246ffd8f6bb4b33"},"package":"d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"} \ No newline at end of file
diff --git a/vendor/ansi_term/Cargo.lock b/vendor/ansi_term/Cargo.lock
deleted file mode 100644
index b21004554..000000000
--- a/vendor/ansi_term/Cargo.lock
+++ /dev/null
@@ -1,168 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-[[package]]
-name = "aho-corasick"
-version = "0.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "ansi_term"
-version = "0.12.1"
-dependencies = [
- "doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "doc-comment"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "itoa"
-version = "0.4.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "memchr"
-version = "2.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "proc-macro2"
-version = "0.4.30"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "quote"
-version = "0.6.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "ryu"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "serde"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "serde_derive 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_derive"
-version = "1.0.94"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.39 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_json"
-version = "1.0.40"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "syn"
-version = "0.15.39"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "thread_local"
-version = "0.3.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "unicode-xid"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "winapi"
-version = "0.3.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[metadata]
-"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
-"checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
-"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
-"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
-"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
-"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
-"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26"
-"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f"
-"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997"
-"checksum serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "076a696fdea89c19d3baed462576b8f6d663064414b5c793642da8dfeb99475b"
-"checksum serde_derive 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "ef45eb79d6463b22f5f9e16d283798b7c0175ba6050bc25c1a946c122727fe7b"
-"checksum serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)" = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704"
-"checksum syn 0.15.39 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d960b829a55e56db167e861ddb43602c003c7be0bee1d345021703fac2fb7c"
-"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
-"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
-"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
-"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/vendor/ansi_term/Cargo.toml b/vendor/ansi_term/Cargo.toml
deleted file mode 100644
index 0e5febabd..000000000
--- a/vendor/ansi_term/Cargo.toml
+++ /dev/null
@@ -1,43 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-name = "ansi_term"
-version = "0.12.1"
-authors = ["ogham@bsago.me", "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>", "Josh Triplett <josh@joshtriplett.org>"]
-description = "Library for ANSI terminal colours and styles (bold, underline)"
-homepage = "https://github.com/ogham/rust-ansi-term"
-documentation = "https://docs.rs/ansi_term"
-readme = "README.md"
-license = "MIT"
-repository = "https://github.com/ogham/rust-ansi-term"
-
-[lib]
-name = "ansi_term"
-[dependencies.serde]
-version = "1.0.90"
-features = ["derive"]
-optional = true
-[dev-dependencies.doc-comment]
-version = "0.3"
-
-[dev-dependencies.regex]
-version = "1.1.9"
-
-[dev-dependencies.serde_json]
-version = "1.0.39"
-
-[features]
-derive_serde_style = ["serde"]
-[target."cfg(target_os=\"windows\")".dependencies.winapi]
-version = "0.3.4"
-features = ["consoleapi", "errhandlingapi", "fileapi", "handleapi", "processenv"]
diff --git a/vendor/ansi_term/examples/basic_colours.rs b/vendor/ansi_term/examples/basic_colours.rs
deleted file mode 100644
index ba8815621..000000000
--- a/vendor/ansi_term/examples/basic_colours.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-extern crate ansi_term;
-use ansi_term::{Style, Colour::*};
-
-// This example prints out the 16 basic colours.
-
-fn main() {
- let normal = Style::default();
-
- println!("{} {}", normal.paint("Normal"), normal.bold().paint("bold"));
- println!("{} {}", Black.paint("Black"), Black.bold().paint("bold"));
- println!("{} {}", Red.paint("Red"), Red.bold().paint("bold"));
- println!("{} {}", Green.paint("Green"), Green.bold().paint("bold"));
- println!("{} {}", Yellow.paint("Yellow"), Yellow.bold().paint("bold"));
- println!("{} {}", Blue.paint("Blue"), Blue.bold().paint("bold"));
- println!("{} {}", Purple.paint("Purple"), Purple.bold().paint("bold"));
- println!("{} {}", Cyan.paint("Cyan"), Cyan.bold().paint("bold"));
- println!("{} {}", White.paint("White"), White.bold().paint("bold"));
-}
diff --git a/vendor/ansi_term/examples/rgb_colours.rs b/vendor/ansi_term/examples/rgb_colours.rs
deleted file mode 100644
index fd2cc7a85..000000000
--- a/vendor/ansi_term/examples/rgb_colours.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-extern crate ansi_term;
-use ansi_term::{Style, Colour};
-
-// This example prints out a colour gradient in a grid by calculating each
-// character’s red, green, and blue components, and using 24-bit colour codes
-// to display them.
-
-const WIDTH: i32 = 80;
-const HEIGHT: i32 = 24;
-
-fn main() {
- for row in 0 .. HEIGHT {
- for col in 0 .. WIDTH {
- let r = (row * 255 / HEIGHT) as u8;
- let g = (col * 255 / WIDTH) as u8;
- let b = 128;
-
- print!("{}", Style::default().on(Colour::RGB(r, g, b)).paint(" "));
- }
-
- print!("\n");
- }
-}
diff --git a/vendor/ansi_term/src/style.rs b/vendor/ansi_term/src/style.rs
deleted file mode 100644
index 1bee4d91c..000000000
--- a/vendor/ansi_term/src/style.rs
+++ /dev/null
@@ -1,521 +0,0 @@
-/// A style is a collection of properties that can format a string
-/// using ANSI escape codes.
-///
-/// # Examples
-///
-/// ```
-/// use ansi_term::{Style, Colour};
-///
-/// let style = Style::new().bold().on(Colour::Black);
-/// println!("{}", style.paint("Bold on black"));
-/// ```
-#[derive(PartialEq, Clone, Copy)]
-#[cfg_attr(feature = "derive_serde_style", derive(serde::Deserialize, serde::Serialize))]
-pub struct Style {
-
- /// The style's foreground colour, if it has one.
- pub foreground: Option<Colour>,
-
- /// The style's background colour, if it has one.
- pub background: Option<Colour>,
-
- /// Whether this style is bold.
- pub is_bold: bool,
-
- /// Whether this style is dimmed.
- pub is_dimmed: bool,
-
- /// Whether this style is italic.
- pub is_italic: bool,
-
- /// Whether this style is underlined.
- pub is_underline: bool,
-
- /// Whether this style is blinking.
- pub is_blink: bool,
-
- /// Whether this style has reverse colours.
- pub is_reverse: bool,
-
- /// Whether this style is hidden.
- pub is_hidden: bool,
-
- /// Whether this style is struckthrough.
- pub is_strikethrough: bool
-}
-
-impl Style {
-
- /// Creates a new Style with no properties set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new();
- /// println!("{}", style.paint("hi"));
- /// ```
- pub fn new() -> Style {
- Style::default()
- }
-
- /// Returns a `Style` with the bold property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().bold();
- /// println!("{}", style.paint("hey"));
- /// ```
- pub fn bold(&self) -> Style {
- Style { is_bold: true, .. *self }
- }
-
- /// Returns a `Style` with the dimmed property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().dimmed();
- /// println!("{}", style.paint("sup"));
- /// ```
- pub fn dimmed(&self) -> Style {
- Style { is_dimmed: true, .. *self }
- }
-
- /// Returns a `Style` with the italic property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().italic();
- /// println!("{}", style.paint("greetings"));
- /// ```
- pub fn italic(&self) -> Style {
- Style { is_italic: true, .. *self }
- }
-
- /// Returns a `Style` with the underline property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().underline();
- /// println!("{}", style.paint("salutations"));
- /// ```
- pub fn underline(&self) -> Style {
- Style { is_underline: true, .. *self }
- }
-
- /// Returns a `Style` with the blink property set.
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().blink();
- /// println!("{}", style.paint("wazzup"));
- /// ```
- pub fn blink(&self) -> Style {
- Style { is_blink: true, .. *self }
- }
-
- /// Returns a `Style` with the reverse property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().reverse();
- /// println!("{}", style.paint("aloha"));
- /// ```
- pub fn reverse(&self) -> Style {
- Style { is_reverse: true, .. *self }
- }
-
- /// Returns a `Style` with the hidden property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().hidden();
- /// println!("{}", style.paint("ahoy"));
- /// ```
- pub fn hidden(&self) -> Style {
- Style { is_hidden: true, .. *self }
- }
-
- /// Returns a `Style` with the strikethrough property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// let style = Style::new().strikethrough();
- /// println!("{}", style.paint("yo"));
- /// ```
- pub fn strikethrough(&self) -> Style {
- Style { is_strikethrough: true, .. *self }
- }
-
- /// Returns a `Style` with the foreground colour property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::{Style, Colour};
- ///
- /// let style = Style::new().fg(Colour::Yellow);
- /// println!("{}", style.paint("hi"));
- /// ```
- pub fn fg(&self, foreground: Colour) -> Style {
- Style { foreground: Some(foreground), .. *self }
- }
-
- /// Returns a `Style` with the background colour property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::{Style, Colour};
- ///
- /// let style = Style::new().on(Colour::Blue);
- /// println!("{}", style.paint("eyyyy"));
- /// ```
- pub fn on(&self, background: Colour) -> Style {
- Style { background: Some(background), .. *self }
- }
-
- /// Return true if this `Style` has no actual styles, and can be written
- /// without any control characters.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Style;
- ///
- /// assert_eq!(true, Style::default().is_plain());
- /// assert_eq!(false, Style::default().bold().is_plain());
- /// ```
- pub fn is_plain(self) -> bool {
- self == Style::default()
- }
-}
-
-impl Default for Style {
-
- /// Returns a style with *no* properties set. Formatting text using this
- /// style returns the exact same text.
- ///
- /// ```
- /// use ansi_term::Style;
- /// assert_eq!(None, Style::default().foreground);
- /// assert_eq!(None, Style::default().background);
- /// assert_eq!(false, Style::default().is_bold);
- /// assert_eq!("txt", Style::default().paint("txt").to_string());
- /// ```
- fn default() -> Style {
- Style {
- foreground: None,
- background: None,
- is_bold: false,
- is_dimmed: false,
- is_italic: false,
- is_underline: false,
- is_blink: false,
- is_reverse: false,
- is_hidden: false,
- is_strikethrough: false,
- }
- }
-}
-
-
-// ---- colours ----
-
-/// A colour is one specific type of ANSI escape code, and can refer
-/// to either the foreground or background colour.
-///
-/// These use the standard numeric sequences.
-/// See <http://invisible-island.net/xterm/ctlseqs/ctlseqs.html>
-#[derive(PartialEq, Clone, Copy, Debug)]
-#[cfg_attr(feature = "derive_serde_style", derive(serde::Deserialize, serde::Serialize))]
-pub enum Colour {
-
- /// Colour #0 (foreground code `30`, background code `40`).
- ///
- /// This is not necessarily the background colour, and using it as one may
- /// render the text hard to read on terminals with dark backgrounds.
- Black,
-
- /// Colour #1 (foreground code `31`, background code `41`).
- Red,
-
- /// Colour #2 (foreground code `32`, background code `42`).
- Green,
-
- /// Colour #3 (foreground code `33`, background code `43`).
- Yellow,
-
- /// Colour #4 (foreground code `34`, background code `44`).
- Blue,
-
- /// Colour #5 (foreground code `35`, background code `45`).
- Purple,
-
- /// Colour #6 (foreground code `36`, background code `46`).
- Cyan,
-
- /// Colour #7 (foreground code `37`, background code `47`).
- ///
- /// As above, this is not necessarily the foreground colour, and may be
- /// hard to read on terminals with light backgrounds.
- White,
-
- /// A colour number from 0 to 255, for use in 256-colour terminal
- /// environments.
- ///
- /// - Colours 0 to 7 are the `Black` to `White` variants respectively.
- /// These colours can usually be changed in the terminal emulator.
- /// - Colours 8 to 15 are brighter versions of the eight colours above.
- /// These can also usually be changed in the terminal emulator, or it
- /// could be configured to use the original colours and show the text in
- /// bold instead. It varies depending on the program.
- /// - Colours 16 to 231 contain several palettes of bright colours,
- /// arranged in six squares measuring six by six each.
- /// - Colours 232 to 255 are shades of grey from black to white.
- ///
- /// It might make more sense to look at a [colour chart][cc].
- ///
- /// [cc]: https://upload.wikimedia.org/wikipedia/commons/1/15/Xterm_256color_chart.svg
- Fixed(u8),
-
- /// A 24-bit RGB color, as specified by ISO-8613-3.
- RGB(u8, u8, u8),
-}
-
-
-impl Colour {
-
- /// Returns a `Style` with the foreground colour set to this colour.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Red.normal();
- /// println!("{}", style.paint("hi"));
- /// ```
- pub fn normal(self) -> Style {
- Style { foreground: Some(self), .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// bold property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Green.bold();
- /// println!("{}", style.paint("hey"));
- /// ```
- pub fn bold(self) -> Style {
- Style { foreground: Some(self), is_bold: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// dimmed property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Yellow.dimmed();
- /// println!("{}", style.paint("sup"));
- /// ```
- pub fn dimmed(self) -> Style {
- Style { foreground: Some(self), is_dimmed: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// italic property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Blue.italic();
- /// println!("{}", style.paint("greetings"));
- /// ```
- pub fn italic(self) -> Style {
- Style { foreground: Some(self), is_italic: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// underline property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Purple.underline();
- /// println!("{}", style.paint("salutations"));
- /// ```
- pub fn underline(self) -> Style {
- Style { foreground: Some(self), is_underline: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// blink property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Cyan.blink();
- /// println!("{}", style.paint("wazzup"));
- /// ```
- pub fn blink(self) -> Style {
- Style { foreground: Some(self), is_blink: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// reverse property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Black.reverse();
- /// println!("{}", style.paint("aloha"));
- /// ```
- pub fn reverse(self) -> Style {
- Style { foreground: Some(self), is_reverse: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// hidden property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::White.hidden();
- /// println!("{}", style.paint("ahoy"));
- /// ```
- pub fn hidden(self) -> Style {
- Style { foreground: Some(self), is_hidden: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// strikethrough property set.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::Fixed(244).strikethrough();
- /// println!("{}", style.paint("yo"));
- /// ```
- pub fn strikethrough(self) -> Style {
- Style { foreground: Some(self), is_strikethrough: true, .. Style::default() }
- }
-
- /// Returns a `Style` with the foreground colour set to this colour and the
- /// background colour property set to the given colour.
- ///
- /// # Examples
- ///
- /// ```
- /// use ansi_term::Colour;
- ///
- /// let style = Colour::RGB(31, 31, 31).on(Colour::White);
- /// println!("{}", style.paint("eyyyy"));
- /// ```
- pub fn on(self, background: Colour) -> Style {
- Style { foreground: Some(self), background: Some(background), .. Style::default() }
- }
-}
-
-impl From<Colour> for Style {
-
- /// You can turn a `Colour` into a `Style` with the foreground colour set
- /// with the `From` trait.
- ///
- /// ```
- /// use ansi_term::{Style, Colour};
- /// let green_foreground = Style::default().fg(Colour::Green);
- /// assert_eq!(green_foreground, Colour::Green.normal());
- /// assert_eq!(green_foreground, Colour::Green.into());
- /// assert_eq!(green_foreground, Style::from(Colour::Green));
- /// ```
- fn from(colour: Colour) -> Style {
- colour.normal()
- }
-}
-
-#[cfg(test)]
-#[cfg(feature = "derive_serde_style")]
-mod serde_json_tests {
- use super::{Style, Colour};
-
- #[test]
- fn colour_serialization() {
-
- let colours = &[
- Colour::Red,
- Colour::Blue,
- Colour::RGB(123, 123, 123),
- Colour::Fixed(255),
- ];
-
- assert_eq!(serde_json::to_string(&colours).unwrap(), String::from("[\"Red\",\"Blue\",{\"RGB\":[123,123,123]},{\"Fixed\":255}]"));
- }
-
- #[test]
- fn colour_deserialization() {
- let colours = &[
- Colour::Red,
- Colour::Blue,
- Colour::RGB(123, 123, 123),
- Colour::Fixed(255),
- ];
-
- for colour in colours.into_iter() {
- let serialized = serde_json::to_string(&colour).unwrap();
- let deserialized: Colour = serde_json::from_str(&serialized).unwrap();
-
- assert_eq!(colour, &deserialized);
- }
- }
-
- #[test]
- fn style_serialization() {
- let style = Style::default();
-
- assert_eq!(serde_json::to_string(&style).unwrap(), "{\"foreground\":null,\"background\":null,\"is_bold\":false,\"is_dimmed\":false,\"is_italic\":false,\"is_underline\":false,\"is_blink\":false,\"is_reverse\":false,\"is_hidden\":false,\"is_strikethrough\":false}".to_string());
- }
-}
diff --git a/vendor/anyhow/.cargo-checksum.json b/vendor/anyhow/.cargo-checksum.json
index e1190e628..7fef58ba8 100644
--- a/vendor/anyhow/.cargo-checksum.json
+++ b/vendor/anyhow/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"c8e64a8ca00738e01d382e5ec9ce52e3869df34371a2f9bbf008fc8c23817200","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"4bd4d352368ac0f5447031d82454490f6ac0c80d2fa4cb64ba0c23c614670d49","build.rs":"88bf7100143c79c0af683da7f28deaac031c9b9b213a6426560dc089b0ba45aa","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"5b4103a7d24d6f438a64b8cc0fafe28d55fc0ca090368174ce44d64e3940badd","src/chain.rs":"6edefc5f3c7d69683095862e54e3bb56faba5b3387bf2eeaed429da090007a0a","src/context.rs":"fe733dd36f34ee8d8fc26569cc94df0236a8c1600ed4c969a63afe06dcb7afeb","src/ensure.rs":"498bc9c7fb8b93168ed12f532cb97df6ccdda9ce25371586d7f5b1b1c98a14bf","src/error.rs":"e45d4dcfe64b1823b42fbf9bb260e6437987e8c2d51f92434db9d808b36e700a","src/fmt.rs":"c2d4aad6ce20625a70a7c091e3087b6a2c19a4a87c7a12edb4c98978307245ea","src/kind.rs":"332854c5eb07d44447c356a2e7dc585634b0da1ffbbfa81269c369deaefbc247","src/lib.rs":"dc32c43ef5a7d690f764af71198b7d14fefb0247ae43926aedecf605876c30dd","src/macros.rs":"dd35f2ec2a0a25e4504fb04bcd42f6d0963bc0035aaaefc412f5ee1d78945fe1","src/ptr.rs":"f4e28bc9feba1e84160ca9d185008a51b5d72e168e6546f3e942f4258c361e19","src/wrapper.rs":"ff3ad72065a30cc32e9acb0614a30703c49c57b941a335c348b6439af684316b","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"981e792db353be2f14c7a1cabe43b5f1329c168cb7679077cc2be786a0920d48","tests/test_backtrace.rs":"0e50edbb33b6bd07ba89ff3db72fb7c688ba2a4371fccdbbb20309ab02948b6a","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"d5e90e3eba58abc60d241d3aade39e0b8d4006d9a14f3cf015d3d925160b5812","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"729ba5fb75959a511a0d1cda1b0f7f88af94aa88f8d251505af1c988e74ef8c6","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"17572596f257aac9aa2ec4620e292ca6a954128b94772bb948399fab53832e70","tests/test_macros.rs":"11f05010bc9b16319884c1286444100e30cddc2ecd1ffe5e0fd3fee5ffb32683","tests/test_repr.rs":"dbb9b04ddbe1ab31eb5331ea69f05bb3a147299da2275a3d4dcc92947b5591b9","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"345102cbef47310f2f4066a669199873a627ca4f1fcb885505c6e17d1fc95e88","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"d827fcf94a6ddc64f9ad3bfda8018276c2b91f5be4b833af81815e1b3a8a367c","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"64e448b6759cf51d41b1360307a638452bbe53ffa706f93e4a503b712d7b89a8","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"} \ No newline at end of file
+{"files":{"Cargo.toml":"08309b6f7ceb1c1d34eef18cf248e38e752124505448f5d76505f0e0544c35eb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"62fc2a591c37e781f76fe4d89bcd964eca4fbde246bc43cd4e2fe9db2d30ee70","build.rs":"88bf7100143c79c0af683da7f28deaac031c9b9b213a6426560dc089b0ba45aa","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"5b4103a7d24d6f438a64b8cc0fafe28d55fc0ca090368174ce44d64e3940badd","src/chain.rs":"6edefc5f3c7d69683095862e54e3bb56faba5b3387bf2eeaed429da090007a0a","src/context.rs":"e129c580b5c2f3017dd977e4122a93f9fbc04b451e930d68f390f51e6be3bdcb","src/ensure.rs":"498bc9c7fb8b93168ed12f532cb97df6ccdda9ce25371586d7f5b1b1c98a14bf","src/error.rs":"e45d4dcfe64b1823b42fbf9bb260e6437987e8c2d51f92434db9d808b36e700a","src/fmt.rs":"c2d4aad6ce20625a70a7c091e3087b6a2c19a4a87c7a12edb4c98978307245ea","src/kind.rs":"332854c5eb07d44447c356a2e7dc585634b0da1ffbbfa81269c369deaefbc247","src/lib.rs":"ee210d5c7af74242360aa4651f9f58cf10784c2d77256bfed8ca6c796b0b54c2","src/macros.rs":"dd35f2ec2a0a25e4504fb04bcd42f6d0963bc0035aaaefc412f5ee1d78945fe1","src/ptr.rs":"f4e28bc9feba1e84160ca9d185008a51b5d72e168e6546f3e942f4258c361e19","src/wrapper.rs":"ff3ad72065a30cc32e9acb0614a30703c49c57b941a335c348b6439af684316b","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"981e792db353be2f14c7a1cabe43b5f1329c168cb7679077cc2be786a0920d48","tests/test_backtrace.rs":"0e50edbb33b6bd07ba89ff3db72fb7c688ba2a4371fccdbbb20309ab02948b6a","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"d5e90e3eba58abc60d241d3aade39e0b8d4006d9a14f3cf015d3d925160b5812","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"c68ea8e3db9e887ce3a7314676e7ff5080aac0a37bc12cae9c6652dead93bcfa","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"17572596f257aac9aa2ec4620e292ca6a954128b94772bb948399fab53832e70","tests/test_macros.rs":"11f05010bc9b16319884c1286444100e30cddc2ecd1ffe5e0fd3fee5ffb32683","tests/test_repr.rs":"dbb9b04ddbe1ab31eb5331ea69f05bb3a147299da2275a3d4dcc92947b5591b9","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"315782f5f4246290fe190e3767b22c3dcaffaabc19c5ace0373537d53e765278","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"04415aeaa14995f47f06f35fb1f6971d332d2110aabca920c30ab0803d6a0a5e","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"171f6c1c962503855480696e5d39e68946ec2a027b61a6f36ca1ad1b40265c5d","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61"} \ No newline at end of file
diff --git a/vendor/anyhow/Cargo.toml b/vendor/anyhow/Cargo.toml
index b3d57a30f..6fca21780 100644
--- a/vendor/anyhow/Cargo.toml
+++ b/vendor/anyhow/Cargo.toml
@@ -11,9 +11,9 @@
[package]
edition = "2018"
-rust-version = "1.38"
+rust-version = "1.39"
name = "anyhow"
-version = "1.0.66"
+version = "1.0.68"
authors = ["David Tolnay <dtolnay@gmail.com>"]
description = "Flexible concrete Error type built on std::error::Error"
documentation = "https://docs.rs/anyhow"
@@ -36,6 +36,9 @@ rustdoc-args = [
"doc_cfg",
]
+[lib]
+doc-scrape-examples = false
+
[dependencies.backtrace]
version = "0.3.51"
optional = true
diff --git a/vendor/anyhow/README.md b/vendor/anyhow/README.md
index 77852792e..6380c1c04 100644
--- a/vendor/anyhow/README.md
+++ b/vendor/anyhow/README.md
@@ -4,7 +4,7 @@ Anyhow&ensp;¯\\\_(°ペ)\_/¯
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/anyhow-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/anyhow)
[<img alt="crates.io" src="https://img.shields.io/crates/v/anyhow.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/anyhow)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-anyhow-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/anyhow)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/anyhow/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/anyhow/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/anyhow/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/anyhow/actions?query=branch%3Amaster)
This library provides [`anyhow::Error`][Error], a trait object based error type
for easy idiomatic error handling in Rust applications.
@@ -16,7 +16,7 @@ for easy idiomatic error handling in Rust applications.
anyhow = "1.0"
```
-*Compiler support: requires rustc 1.38+*
+*Compiler support: requires rustc 1.39+*
<br>
diff --git a/vendor/anyhow/src/context.rs b/vendor/anyhow/src/context.rs
index 238473e5c..9df86937b 100644
--- a/vendor/anyhow/src/context.rs
+++ b/vendor/anyhow/src/context.rs
@@ -4,7 +4,7 @@ use core::convert::Infallible;
use core::fmt::{self, Debug, Display, Write};
#[cfg(backtrace)]
-use std::any::Demand;
+use std::any::{Demand, Provider};
mod ext {
use super::*;
@@ -92,7 +92,12 @@ impl<T> Context<T, Infallible> for Option<T> {
where
C: Display + Send + Sync + 'static,
{
- self.ok_or_else(|| Error::from_display(context, backtrace!()))
+ // Not using ok_or_else to save 2 useless frames off the captured
+ // backtrace.
+ match self {
+ Some(ok) => Ok(ok),
+ None => Err(Error::from_display(context, backtrace!())),
+ }
}
fn with_context<C, F>(self, context: F) -> Result<T, Error>
@@ -100,7 +105,10 @@ impl<T> Context<T, Infallible> for Option<T> {
C: Display + Send + Sync + 'static,
F: FnOnce() -> C,
{
- self.ok_or_else(|| Error::from_display(context(), backtrace!()))
+ match self {
+ Some(ok) => Ok(ok),
+ None => Err(Error::from_display(context(), backtrace!())),
+ }
}
}
@@ -137,7 +145,7 @@ where
#[cfg(backtrace)]
fn provide<'a>(&'a self, demand: &mut Demand<'a>) {
- self.error.provide(demand);
+ StdError::provide(&self.error, demand);
}
}
@@ -151,8 +159,7 @@ where
#[cfg(backtrace)]
fn provide<'a>(&'a self, demand: &mut Demand<'a>) {
- demand.provide_ref(self.error.backtrace());
- self.error.provide(demand);
+ Provider::provide(&self.error, demand);
}
}
diff --git a/vendor/anyhow/src/lib.rs b/vendor/anyhow/src/lib.rs
index 583ab424e..3510d195e 100644
--- a/vendor/anyhow/src/lib.rs
+++ b/vendor/anyhow/src/lib.rs
@@ -210,7 +210,7 @@
//! will require an explicit `.map_err(Error::msg)` when working with a
//! non-Anyhow error type inside a function that returns Anyhow's error type.
-#![doc(html_root_url = "https://docs.rs/anyhow/1.0.66")]
+#![doc(html_root_url = "https://docs.rs/anyhow/1.0.68")]
#![cfg_attr(backtrace, feature(error_generic_member_access, provide_any))]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
diff --git a/vendor/anyhow/tests/test_ensure.rs b/vendor/anyhow/tests/test_ensure.rs
index 6984c7d11..de867f7fe 100644
--- a/vendor/anyhow/tests/test_ensure.rs
+++ b/vendor/anyhow/tests/test_ensure.rs
@@ -5,7 +5,6 @@
clippy::ifs_same_cond,
clippy::items_after_statements,
clippy::let_and_return,
- clippy::let_underscore_drop,
clippy::match_bool,
clippy::never_loop,
clippy::overly_complex_bool_expr,
diff --git a/vendor/anyhow/tests/ui/empty-ensure.stderr b/vendor/anyhow/tests/ui/empty-ensure.stderr
index 91e0a9803..bf0229a2b 100644
--- a/vendor/anyhow/tests/ui/empty-ensure.stderr
+++ b/vendor/anyhow/tests/ui/empty-ensure.stderr
@@ -4,4 +4,9 @@ error: unexpected end of macro invocation
4 | ensure!();
| ^^^^^^^^^ missing tokens in macro arguments
|
+note: while trying to match meta-variable `$cond:expr`
+ --> src/ensure.rs
+ |
+ | ($cond:expr $(,)?) => {
+ | ^^^^^^^^^^
= note: this error originates in the macro `$crate::__parse_ensure` which comes from the expansion of the macro `ensure` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/vendor/anyhow/tests/ui/no-impl.stderr b/vendor/anyhow/tests/ui/no-impl.stderr
index dced9987e..1ddf76863 100644
--- a/vendor/anyhow/tests/ui/no-impl.stderr
+++ b/vendor/anyhow/tests/ui/no-impl.stderr
@@ -18,7 +18,7 @@ error[E0599]: the method `anyhow_kind` exists for reference `&Error`, but its tr
which is required by `&Error: anyhow::kind::AdhocKind`
`&Error: Into<anyhow::Error>`
which is required by `&Error: anyhow::kind::TraitKind`
-note: the following traits must be implemented
+note: the traits `Into` and `std::fmt::Display` must be implemented
--> $RUST/core/src/fmt/mod.rs
|
| pub trait Display {
diff --git a/vendor/anyhow/tests/ui/temporary-value.stderr b/vendor/anyhow/tests/ui/temporary-value.stderr
index 4e4115fc3..dc27c4981 100644
--- a/vendor/anyhow/tests/ui/temporary-value.stderr
+++ b/vendor/anyhow/tests/ui/temporary-value.stderr
@@ -4,6 +4,6 @@ error[E0716]: temporary value dropped while borrowed
4 | let _ = anyhow!(&String::new());
| ---------^^^^^^^^^^^^^-
| | |
- | | creates a temporary which is freed while still in use
+ | | creates a temporary value which is freed while still in use
| temporary value is freed at the end of this statement
| argument requires that borrow lasts for `'static`
diff --git a/vendor/backtrace/.cargo-checksum.json b/vendor/backtrace/.cargo-checksum.json
index 434618119..e94768666 100644
--- a/vendor/backtrace/.cargo-checksum.json
+++ b/vendor/backtrace/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"b0096194daaeffd2e4c7577e0549b74f3d40183ef2abc83d29b9fe2faca882a7","Cargo.toml":"94e809a91c1cae05980cc022d6449db2e9029f18aa33c8151d7fb8a738f443a0","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"6007ea91612793f8c77d499d2065acd2255fc5f3c3268fd8bd1ae5f7bb40d6de","benches/benchmarks.rs":"029b78bb79052ec940eecfd18067b743925189202fc16015d3c4c25b05eb6d67","build.rs":"8d5e860da109f86c67596b10b5613ff6d19f9d24c2970f491a55261fb1973692","ci/android-ndk.sh":"89fafa41d08ff477f949bfc163d04d1eb34fdee370f7a695cfba4ef34c164a55","ci/android-sdk.sh":"69a953f70f32064d1d2a57c7082a50336b90a12d10c75e5416dbb1d6d718016c","ci/debuglink-docker.sh":"3a16131df8c69fef37331cb6f01a6623d169177474f475159d05bab61df077a9","ci/debuglink.sh":"164a961b930de8c9aedf45a11076c3d41081846a8e6a9566ba2b6ad615179e0b","ci/docker/aarch64-linux-android/Dockerfile":"1058f2ee9cf74b4c51a489e62544bea94c6cd537ad5c1b056f3e4b262f7e09f2","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"a7b7aae0d8e2f826cf1c6c7c3160f8e5e9a30478b83c394b6575ce15b0ff0802","ci/docker/arm-linux-androideabi/Dockerfile":"12f8c62f0750d3581292b23309f5aef15492c946a73e55df13bc345de5ca576e","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"5156382ff639b11801c1bd7ddc6e03e8834505a74ecf7160e92182603cd5d96f","ci/docker/armv7-linux-androideabi/Dockerfile":"7c582c2a4b162b147deada3194a30185ccb7a01215f97990cd1a5a3460c30fb9","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"4aaceef14ba700ea3719fe30fcb46f1bb154a47aa52cdb64fa6ed7eff96d6c85","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0816c89b79a74be7ccfc34e95cd718ce29a8698a2ab56903b4b0712470f5c8bb","ci/docker/i686-linux-android/Dockerfile":"18957e8dad4c6d9c8ad561f846e20f6e2186ff7a8421f2a0089793b510f66fe2","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0816c89b79a74be7ccfc34e95cd718ce29a8698a2ab56903b4b0712470f5c8bb","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"270de99179c925e6284a8283fdb4e40a8c813a569b24930d6cfe79a4c470ab61","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"4aa8ca641efd2f1937ef669eda2f3e357b2fb926911722b3afc6cf25ce4bcac2","ci/docker/x86_64-linux-android/Dockerfile":"e6d6fb37041a9d6fc6771be1ae8d1eaa506a1dc8796170c1ffcc3d6dd043bed2","ci/docker/x86_64-pc-windows-gnu/Dockerfile":"654988c5c008610f90d5159a8dd0ab6fd491e9c0d16ad65b03ef53f694a5400c","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"9f89f080551fff6678b1efcc0925fc8c16316f69cdd150e89f9e95cdab583510","ci/docker/x86_64-unknown-linux-musl/Dockerfile":"4db3cb7d315588f363eb9f377bf1c27d8e8886c07b6c0d0c5cf7ee91114a718b","ci/run-docker.sh":"70760696a608b0d89eb3dcc4b08f176d709dd9f98e50297f2b7e0bb9b0f3b458","ci/run.sh":"0bb5c8256019779f3e1db20fcc2c01416ffd4679428f3e395ef5f3e55d2d642c","ci/runtest-android.rs":"be2e49bb296b92a8e3643a6c0a70917fe7b130fa43b71a29c4e7514f45c00e7e","examples/backtrace.rs":"5da0c95ccebfaffbbe7b5a92b0e488017dc375cbb5b8fb2b7712dd65b2dfb2ba","examples/raw.rs":"575ec6037f597ba7ab0eaf9dd699fadfabef918ba2affea7dc20cdbde55de5ec","src/android-api.c":"b75f16de578451464f49b83dc817b76aa9a0be0d86ea71d1659cc78f99e94fbd","src/backtrace/dbghelp.rs":"ea2f175d6c62259d86e7e9bb04328e03657d7259d4459aab70734f1cf1cd9d72","src/backtrace/libunwind.rs":"65373ce7bd87abc411b4307bd41679f9176987170b3b627abe0f0bb1625ff685","src/backtrace/miri.rs":"630faf9919d3ced8d75095cea30fde6c1ed7f0ad135dee8c81a3d7614b55cc11","src/backtrace/mod.rs":"b301e6b7da4f3811c5255c2f1fdb83f4ab97acfa1647d998d8455614ae90ddaf","src/backtrace/noop.rs":"a8550b70b3c83f6852a1dba83cf97b1325238b5aa3ce7b35437e9382cdafd924","src/capture.rs":"8701657803f04ea9e0b9dd6a4ca619761edb8a42cfd2f7c9c9e4ee31d9357159","src/dbghelp.rs":"40891588891fe48c16263374a36092ac3e67ddf4f756b880839f31dfcb80b7c4","src/lib.rs":"03f2a0f2524cb2078e2f28959c3eb1625ff400d3e23eeb7d3b73d86c012979b2","src/print.rs":"766affbd9d2242a81d85067413dd95b807425a6df2fffb17a4eacc5f606d4200","src/print/fuchsia.rs":"de45f55032e05fdc1fd55224910158f8c64a705494103a29c7e2680536e76e40","src/symbolize/dbghelp.rs":"58aeda764a27702e0abb3af62bae8a162d8b6cb8c80ffd141ee794d81a8ace15","src/symbolize/gimli.rs":"062afb232fdd1550ae613de79385365b03aa50a721794666d7856401b7f44e5a","src/symbolize/gimli/coff.rs":"d3f4a274bd3b2ed81d114a9326630c019f682ca91aac6ec31e660f420c35b064","src/symbolize/gimli/elf.rs":"3445558fc1feed60165af55e07847c59c3eca5e4031f50b04329fc5be36eb057","src/symbolize/gimli/libs_dl_iterate_phdr.rs":"8f7dabbdff97e5a24f0d5b3670469840666a91e9971ac352abeb4e96d98a2a6c","src/symbolize/gimli/libs_haiku.rs":"0a0d4b37145e898f7068cadacccf362d8216e463e7026af2ce38d75ebfd74bea","src/symbolize/gimli/libs_illumos.rs":"523e96272b46bdaab2abb0dd0201cb8032bf86558cbed986a20d7e2cc02fa8be","src/symbolize/gimli/libs_libnx.rs":"4116eceadb2d9916d4f5602712eacec647f185d4c53c723aced8de5fc471b14d","src/symbolize/gimli/libs_macos.rs":"a0d6edf8f3af23523d1a63a12ef6a6dd9ad1057b2cb20cc405da0544daba5389","src/symbolize/gimli/libs_windows.rs":"6459f8610ca1a0fd7456539ec604f5276c94b3d0d7331357eaed338e49220a02","src/symbolize/gimli/macho.rs":"a725d85566438499ecfb0ac06193c3153a2fa1b533f360d55c63dea386d1920d","src/symbolize/gimli/mmap_fake.rs":"9564fcf47000e70d521b31518e205c8e6ee09b7410fb1eb1e452721757ff54ba","src/symbolize/gimli/mmap_unix.rs":"8159a4a807bd5692412ba1a280bb36ab942c06e904f37a92e2545f0b4211308a","src/symbolize/gimli/mmap_windows.rs":"1ca715317c1054968d92350438b293f800bae2174f395b20bc43a633d757fe8f","src/symbolize/gimli/stash.rs":"67d01016b17ca4c0adbb0827da9b83fde5f79ccc89db3e4fd769ab03c1248d8e","src/symbolize/miri.rs":"f5201cc8a7de24ad3424d2472cb0af59cd28563d09cc0c21e998f4cee4367ade","src/symbolize/mod.rs":"a7177603810aca1cd9cd4a59027a1dd2c792dc9d345435b5dc866eb7c8b66baf","src/symbolize/noop.rs":"5d4432079b8ae2b9382945a57ae43df57bb4b7ed2e5956d4167e051a44567388","src/types.rs":"f43c94b99d57ca66a5cfe939a46016c95b2d69d82695fb52480f7a3e5b344fd9","src/windows.rs":"46f02837a2e6b404a035993f2e1f03cdfc1a5a872c1feb92926566a31138d273","tests/accuracy/auxiliary.rs":"71d2238da401042e007ef5ee20336d6834724bae96d93c8c52d11a5a332d7d34","tests/accuracy/main.rs":"f8e42aeeb50b35fca380db2e9fe52820cc0bc3133167289c5710e3290701e78e","tests/concurrent-panics.rs":"7696676e46a1c50a3a88446fdc59c0dedfb81ded24d77ec725f7cc101a5d8fe6","tests/long_fn_name.rs":"ebef58e34543ed4d47048faa9b6525f68fc71e12255af734523a513c5d4baa6f","tests/skip_inner_frames.rs":"7560fe59e83e4b234789c448da5504d3dd9065f9ad1b2615f12606f9112df4e0","tests/smoke.rs":"33014495f9158aea2d0ee2ee3335ffe82105c7ed894f96eaf7d23845a60f7439"},"package":"cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"} \ No newline at end of file
+{"files":{"Cargo.lock":"7d3b2926febbfe2d3ae525bf139703fa9d614cad0836d0e88a176352787a8151","Cargo.toml":"01600b69a4b1967a6f307a698f576a86308d39c4e571ff398235a88204b7b220","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"6007ea91612793f8c77d499d2065acd2255fc5f3c3268fd8bd1ae5f7bb40d6de","benches/benchmarks.rs":"029b78bb79052ec940eecfd18067b743925189202fc16015d3c4c25b05eb6d67","build.rs":"8d5e860da109f86c67596b10b5613ff6d19f9d24c2970f491a55261fb1973692","ci/android-ndk.sh":"89fafa41d08ff477f949bfc163d04d1eb34fdee370f7a695cfba4ef34c164a55","ci/android-sdk.sh":"69a953f70f32064d1d2a57c7082a50336b90a12d10c75e5416dbb1d6d718016c","ci/debuglink-docker.sh":"3a16131df8c69fef37331cb6f01a6623d169177474f475159d05bab61df077a9","ci/debuglink.sh":"164a961b930de8c9aedf45a11076c3d41081846a8e6a9566ba2b6ad615179e0b","ci/docker/aarch64-linux-android/Dockerfile":"1058f2ee9cf74b4c51a489e62544bea94c6cd537ad5c1b056f3e4b262f7e09f2","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"a7b7aae0d8e2f826cf1c6c7c3160f8e5e9a30478b83c394b6575ce15b0ff0802","ci/docker/arm-linux-androideabi/Dockerfile":"12f8c62f0750d3581292b23309f5aef15492c946a73e55df13bc345de5ca576e","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"5156382ff639b11801c1bd7ddc6e03e8834505a74ecf7160e92182603cd5d96f","ci/docker/armv7-linux-androideabi/Dockerfile":"7c582c2a4b162b147deada3194a30185ccb7a01215f97990cd1a5a3460c30fb9","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"4aaceef14ba700ea3719fe30fcb46f1bb154a47aa52cdb64fa6ed7eff96d6c85","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0816c89b79a74be7ccfc34e95cd718ce29a8698a2ab56903b4b0712470f5c8bb","ci/docker/i686-linux-android/Dockerfile":"18957e8dad4c6d9c8ad561f846e20f6e2186ff7a8421f2a0089793b510f66fe2","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0816c89b79a74be7ccfc34e95cd718ce29a8698a2ab56903b4b0712470f5c8bb","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"270de99179c925e6284a8283fdb4e40a8c813a569b24930d6cfe79a4c470ab61","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"4aa8ca641efd2f1937ef669eda2f3e357b2fb926911722b3afc6cf25ce4bcac2","ci/docker/x86_64-linux-android/Dockerfile":"e6d6fb37041a9d6fc6771be1ae8d1eaa506a1dc8796170c1ffcc3d6dd043bed2","ci/docker/x86_64-pc-windows-gnu/Dockerfile":"654988c5c008610f90d5159a8dd0ab6fd491e9c0d16ad65b03ef53f694a5400c","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"9f89f080551fff6678b1efcc0925fc8c16316f69cdd150e89f9e95cdab583510","ci/docker/x86_64-unknown-linux-musl/Dockerfile":"4db3cb7d315588f363eb9f377bf1c27d8e8886c07b6c0d0c5cf7ee91114a718b","ci/run-docker.sh":"70760696a608b0d89eb3dcc4b08f176d709dd9f98e50297f2b7e0bb9b0f3b458","ci/run.sh":"0bb5c8256019779f3e1db20fcc2c01416ffd4679428f3e395ef5f3e55d2d642c","ci/runtest-android.rs":"be2e49bb296b92a8e3643a6c0a70917fe7b130fa43b71a29c4e7514f45c00e7e","examples/backtrace.rs":"5da0c95ccebfaffbbe7b5a92b0e488017dc375cbb5b8fb2b7712dd65b2dfb2ba","examples/raw.rs":"575ec6037f597ba7ab0eaf9dd699fadfabef918ba2affea7dc20cdbde55de5ec","src/android-api.c":"b75f16de578451464f49b83dc817b76aa9a0be0d86ea71d1659cc78f99e94fbd","src/backtrace/dbghelp.rs":"ea2f175d6c62259d86e7e9bb04328e03657d7259d4459aab70734f1cf1cd9d72","src/backtrace/libunwind.rs":"65373ce7bd87abc411b4307bd41679f9176987170b3b627abe0f0bb1625ff685","src/backtrace/miri.rs":"c0dcb1e430eea92fcbad3e293124010f39c1f8ff1bbbf1a0275c53b05880cd5f","src/backtrace/mod.rs":"b301e6b7da4f3811c5255c2f1fdb83f4ab97acfa1647d998d8455614ae90ddaf","src/backtrace/noop.rs":"a8550b70b3c83f6852a1dba83cf97b1325238b5aa3ce7b35437e9382cdafd924","src/capture.rs":"8701657803f04ea9e0b9dd6a4ca619761edb8a42cfd2f7c9c9e4ee31d9357159","src/dbghelp.rs":"40891588891fe48c16263374a36092ac3e67ddf4f756b880839f31dfcb80b7c4","src/lib.rs":"03f2a0f2524cb2078e2f28959c3eb1625ff400d3e23eeb7d3b73d86c012979b2","src/print.rs":"7bae8abae6947d6d0e57ff9c834d1d4d0b528a6d7e6424e7ad9a5ac557cf3e08","src/print/fuchsia.rs":"de45f55032e05fdc1fd55224910158f8c64a705494103a29c7e2680536e76e40","src/symbolize/dbghelp.rs":"58aeda764a27702e0abb3af62bae8a162d8b6cb8c80ffd141ee794d81a8ace15","src/symbolize/gimli.rs":"f43374c89f12095a99431de0f758a3414dfd2f1e84bfa35897ce8afdaf006041","src/symbolize/gimli/coff.rs":"d3f4a274bd3b2ed81d114a9326630c019f682ca91aac6ec31e660f420c35b064","src/symbolize/gimli/elf.rs":"3445558fc1feed60165af55e07847c59c3eca5e4031f50b04329fc5be36eb057","src/symbolize/gimli/libs_dl_iterate_phdr.rs":"bc64242857dd82288d6e7d6b183e651d05ab7bc4f037f19b9e42719f04fc2c83","src/symbolize/gimli/libs_haiku.rs":"0a0d4b37145e898f7068cadacccf362d8216e463e7026af2ce38d75ebfd74bea","src/symbolize/gimli/libs_illumos.rs":"523e96272b46bdaab2abb0dd0201cb8032bf86558cbed986a20d7e2cc02fa8be","src/symbolize/gimli/libs_libnx.rs":"4116eceadb2d9916d4f5602712eacec647f185d4c53c723aced8de5fc471b14d","src/symbolize/gimli/libs_macos.rs":"c24cb480ae029b350325873ac4358104e5943ad61b62eb22bb6b65b2f05bbd29","src/symbolize/gimli/libs_windows.rs":"6459f8610ca1a0fd7456539ec604f5276c94b3d0d7331357eaed338e49220a02","src/symbolize/gimli/macho.rs":"47a970c23443f322e79bf54cf7f11d9990516b17fbad7c893f20bd56a22083af","src/symbolize/gimli/mmap_fake.rs":"9564fcf47000e70d521b31518e205c8e6ee09b7410fb1eb1e452721757ff54ba","src/symbolize/gimli/mmap_unix.rs":"8159a4a807bd5692412ba1a280bb36ab942c06e904f37a92e2545f0b4211308a","src/symbolize/gimli/mmap_windows.rs":"1ca715317c1054968d92350438b293f800bae2174f395b20bc43a633d757fe8f","src/symbolize/gimli/parse_running_mmaps_unix.rs":"1e25b842b979b745bf3e22e8ce6f13b242f2fb52a75d8e865bad9538e44b76a3","src/symbolize/gimli/stash.rs":"67d01016b17ca4c0adbb0827da9b83fde5f79ccc89db3e4fd769ab03c1248d8e","src/symbolize/miri.rs":"f5201cc8a7de24ad3424d2472cb0af59cd28563d09cc0c21e998f4cee4367ade","src/symbolize/mod.rs":"a7177603810aca1cd9cd4a59027a1dd2c792dc9d345435b5dc866eb7c8b66baf","src/symbolize/noop.rs":"5d4432079b8ae2b9382945a57ae43df57bb4b7ed2e5956d4167e051a44567388","src/types.rs":"f43c94b99d57ca66a5cfe939a46016c95b2d69d82695fb52480f7a3e5b344fd9","src/windows.rs":"b0bbbf4088021fd646024ad01de1a60bf12bf62b6b744f0f5952681f77cd49e6","tests/accuracy/auxiliary.rs":"71d2238da401042e007ef5ee20336d6834724bae96d93c8c52d11a5a332d7d34","tests/accuracy/main.rs":"f8e42aeeb50b35fca380db2e9fe52820cc0bc3133167289c5710e3290701e78e","tests/common/mod.rs":"733101288a48cf94d5a87a1957724deaf2650c3e4e8aa0190a4a7db62aa90d01","tests/concurrent-panics.rs":"b60279ad5c4fb9b2754807f35179cbc8fbd7acbe6e92ac6d0f416ae75db38705","tests/current-exe-mismatch.rs":"b44a885a655f761eb15d4a47bdfff4332f9a1f88105b7aed9ea0b052e385615f","tests/long_fn_name.rs":"ebef58e34543ed4d47048faa9b6525f68fc71e12255af734523a513c5d4baa6f","tests/skip_inner_frames.rs":"6c03cd0ad9facf0aa81e59cf970504785b6ada9993a2dfc2aea0b18b79419aeb","tests/smoke.rs":"33014495f9158aea2d0ee2ee3335ffe82105c7ed894f96eaf7d23845a60f7439"},"package":"233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"} \ No newline at end of file
diff --git a/vendor/backtrace/Cargo.lock b/vendor/backtrace/Cargo.lock
index 6eee92255..14da4f7d5 100644
--- a/vendor/backtrace/Cargo.lock
+++ b/vendor/backtrace/Cargo.lock
@@ -4,9 +4,9 @@ version = 3
[[package]]
name = "addr2line"
-version = "0.17.0"
+version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
dependencies = [
"gimli",
]
@@ -19,7 +19,7 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "backtrace"
-version = "0.3.66"
+version = "0.3.67"
dependencies = [
"addr2line",
"cc",
@@ -37,9 +37,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.0.73"
+version = "1.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
[[package]]
name = "cfg-if"
@@ -49,30 +49,30 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cpp_demangle"
-version = "0.3.5"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eeaa953eaad386a53111e47172c2fedba671e5684c8dd601a5f474f4f118710f"
+checksum = "b446fd40bcc17eddd6a4a78f24315eb90afdb3334999ddfd4909985c47722442"
dependencies = [
"cfg-if",
]
[[package]]
name = "gimli"
-version = "0.26.1"
+version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
+checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
[[package]]
name = "libc"
-version = "0.2.126"
+version = "0.2.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
[[package]]
name = "libloading"
-version = "0.7.3"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
@@ -86,36 +86,36 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "miniz_oxide"
-version = "0.5.3"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
name = "object"
-version = "0.29.0"
+version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
dependencies = [
"memchr",
]
[[package]]
name = "proc-macro2"
-version = "1.0.40"
+version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.20"
+version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
@@ -134,18 +134,18 @@ checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
[[package]]
name = "serde"
-version = "1.0.138"
+version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1578c6245786b9d168c5447eeacfb96856573ca56c9d68fdcf394be134882a47"
+checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.138"
+version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "023e9b1467aef8a10fb88f25611870ada9800ef7e22afce356bb0d2387b6f27c"
+checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e"
dependencies = [
"proc-macro2",
"quote",
@@ -154,9 +154,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.98"
+version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
+checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
dependencies = [
"proc-macro2",
"quote",
@@ -165,9 +165,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.1"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "winapi"
diff --git a/vendor/backtrace/Cargo.toml b/vendor/backtrace/Cargo.toml
index 641181528..96f41466b 100644
--- a/vendor/backtrace/Cargo.toml
+++ b/vendor/backtrace/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "backtrace"
-version = "0.3.66"
+version = "0.3.67"
authors = ["The Rust Project Developers"]
build = "build.rs"
autoexamples = true
@@ -23,7 +23,7 @@ A library to acquire a stack trace (backtrace) at runtime in a Rust program.
homepage = "https://github.com/rust-lang/backtrace-rs"
documentation = "https://docs.rs/backtrace"
readme = "README.md"
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/backtrace-rs"
[[example]]
@@ -57,15 +57,21 @@ name = "concurrent-panics"
harness = false
required-features = ["std"]
+[[test]]
+name = "current-exe-mismatch"
+harness = false
+required-features = ["std"]
+
[dependencies.addr2line]
-version = "0.17.0"
+version = "0.19.0"
default-features = false
[dependencies.cfg-if]
version = "1.0"
[dependencies.cpp_demangle]
-version = "0.3.0"
+version = "0.4.0"
+features = ["alloc"]
optional = true
default-features = false
@@ -74,11 +80,11 @@ version = "0.2.94"
default-features = false
[dependencies.miniz_oxide]
-version = "0.5.0"
+version = "0.6.0"
default-features = false
[dependencies.object]
-version = "0.29.0"
+version = "0.30.0"
features = [
"read_core",
"elf",
diff --git a/vendor/backtrace/src/backtrace/miri.rs b/vendor/backtrace/src/backtrace/miri.rs
index 9a5f65b80..f8c496428 100644
--- a/vendor/backtrace/src/backtrace/miri.rs
+++ b/vendor/backtrace/src/backtrace/miri.rs
@@ -91,7 +91,7 @@ pub fn resolve_addr(ptr: *mut c_void) -> Frame {
}
}
-pub unsafe fn trace_unsynchronized<F: FnMut(&super::Frame) -> bool>(mut cb: F) {
+unsafe fn trace_unsynchronized<F: FnMut(&super::Frame) -> bool>(mut cb: F) {
let len = miri_backtrace_size(0);
let mut frames = Vec::with_capacity(len);
@@ -102,6 +102,8 @@ pub unsafe fn trace_unsynchronized<F: FnMut(&super::Frame) -> bool>(mut cb: F) {
for ptr in frames.iter() {
let frame = resolve_addr(*ptr as *mut c_void);
- cb(&super::Frame { inner: frame });
+ if !cb(&super::Frame { inner: frame }) {
+ return;
+ }
}
}
diff --git a/vendor/backtrace/src/print.rs b/vendor/backtrace/src/print.rs
index cc677122a..174d8ae5c 100644
--- a/vendor/backtrace/src/print.rs
+++ b/vendor/backtrace/src/print.rs
@@ -135,7 +135,7 @@ impl BacktraceFrameFmt<'_, '_, '_> {
symbol.name(),
// TODO: this isn't great that we don't end up printing anything
// with non-utf8 filenames. Thankfully almost everything is utf8 so
- // this shouldn't be too too bad.
+ // this shouldn't be too bad.
symbol
.filename()
.and_then(|p| Some(BytesOrWideString::Bytes(p.to_str()?.as_bytes()))),
diff --git a/vendor/backtrace/src/symbolize/gimli.rs b/vendor/backtrace/src/symbolize/gimli.rs
index 5f10122dd..cd4cec58c 100644
--- a/vendor/backtrace/src/symbolize/gimli.rs
+++ b/vendor/backtrace/src/symbolize/gimli.rs
@@ -184,6 +184,8 @@ cfg_if::cfg_if! {
))] {
mod libs_dl_iterate_phdr;
use libs_dl_iterate_phdr::native_libraries;
+ #[path = "gimli/parse_running_mmaps_unix.rs"]
+ mod parse_running_mmaps;
} else if #[cfg(target_env = "libnx")] {
mod libs_libnx;
use libs_libnx::native_libraries;
diff --git a/vendor/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs b/vendor/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs
index a011e6080..9f0304ce8 100644
--- a/vendor/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs
+++ b/vendor/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs
@@ -17,6 +17,20 @@ pub(super) fn native_libraries() -> Vec<Library> {
return ret;
}
+fn infer_current_exe(base_addr: usize) -> OsString {
+ if let Ok(entries) = super::parse_running_mmaps::parse_maps() {
+ let opt_path = entries
+ .iter()
+ .find(|e| e.ip_matches(base_addr) && e.pathname().len() > 0)
+ .map(|e| e.pathname())
+ .cloned();
+ if let Some(path) = opt_path {
+ return path;
+ }
+ }
+ env::current_exe().map(|e| e.into()).unwrap_or_default()
+}
+
// `info` should be a valid pointers.
// `vec` should be a valid pointer to a `std::Vec`.
unsafe extern "C" fn callback(
@@ -28,8 +42,12 @@ unsafe extern "C" fn callback(
let libs = &mut *(vec as *mut Vec<Library>);
let is_main_prog = info.dlpi_name.is_null() || *info.dlpi_name == 0;
let name = if is_main_prog {
+ // The man page for dl_iterate_phdr says that the first object visited by
+ // callback is the main program; so the first time we encounter a
+ // nameless entry, we can assume its the main program and try to infer its path.
+ // After that, we cannot continue that assumption, and we use an empty string.
if libs.is_empty() {
- env::current_exe().map(|e| e.into()).unwrap_or_default()
+ infer_current_exe(info.dlpi_addr as usize)
} else {
OsString::new()
}
diff --git a/vendor/backtrace/src/symbolize/gimli/libs_macos.rs b/vendor/backtrace/src/symbolize/gimli/libs_macos.rs
index 17703b88a..438bbff6f 100644
--- a/vendor/backtrace/src/symbolize/gimli/libs_macos.rs
+++ b/vendor/backtrace/src/symbolize/gimli/libs_macos.rs
@@ -113,8 +113,8 @@ fn native_library(i: u32) -> Option<Library> {
// file offset 0 with a nonzero size. For whatever reason when this
// is present it appears to mean that the symbol table is relative
// to just the vmaddr slide for the library. If it's *not* present
- // then the symbol table is relative to the the vmaddr slide plus
- // the segment's stated address.
+ // then the symbol table is relative to the vmaddr slide plus the
+ // segment's stated address.
//
// To handle this situation if we *don't* find a text section at
// file offset zero then we increase the bias by the first text
diff --git a/vendor/backtrace/src/symbolize/gimli/macho.rs b/vendor/backtrace/src/symbolize/gimli/macho.rs
index ec5673843..adea97a09 100644
--- a/vendor/backtrace/src/symbolize/gimli/macho.rs
+++ b/vendor/backtrace/src/symbolize/gimli/macho.rs
@@ -13,8 +13,8 @@ type MachSection = <Mach as MachHeader>::Section;
type MachNlist = <Mach as MachHeader>::Nlist;
impl Mapping {
- // The loading path for OSX is is so different we just have a completely
- // different implementation of the function here. On OSX we need to go
+ // The loading path for macOS is so different we just have a completely
+ // different implementation of the function here. On macOS we need to go
// probing the filesystem for a bunch of files.
pub fn new(path: &Path) -> Option<Mapping> {
// First up we need to load the unique UUID which is stored in the macho
diff --git a/vendor/backtrace/src/symbolize/gimli/parse_running_mmaps_unix.rs b/vendor/backtrace/src/symbolize/gimli/parse_running_mmaps_unix.rs
new file mode 100644
index 000000000..a196ffcfb
--- /dev/null
+++ b/vendor/backtrace/src/symbolize/gimli/parse_running_mmaps_unix.rs
@@ -0,0 +1,242 @@
+// Note: This file is only currently used on targets that call out to the code
+// in `mod libs_dl_iterate_phdr` (e.g. linux, freebsd, ...); it may be more
+// general purpose, but it hasn't been tested elsewhere.
+
+use super::mystd::fs::File;
+use super::mystd::io::Read;
+use super::mystd::str::FromStr;
+use super::{OsString, String, Vec};
+
+#[derive(PartialEq, Eq, Debug)]
+pub(super) struct MapsEntry {
+ /// start (inclusive) and limit (exclusive) of address range.
+ address: (usize, usize),
+ /// The perms field are the permissions for the entry
+ ///
+ /// r = read
+ /// w = write
+ /// x = execute
+ /// s = shared
+ /// p = private (copy on write)
+ perms: [char; 4],
+ /// Offset into the file (or "whatever").
+ offset: usize,
+ /// device (major, minor)
+ dev: (usize, usize),
+ /// inode on the device. 0 indicates that no inode is associated with the memory region (e.g. uninitalized data aka BSS).
+ inode: usize,
+ /// Usually the file backing the mapping.
+ ///
+ /// Note: The man page for proc includes a note about "coordination" by
+ /// using readelf to see the Offset field in ELF program headers. pnkfelix
+ /// is not yet sure if that is intended to be a comment on pathname, or what
+ /// form/purpose such coordination is meant to have.
+ ///
+ /// There are also some pseudo-paths:
+ /// "[stack]": The initial process's (aka main thread's) stack.
+ /// "[stack:<tid>]": a specific thread's stack. (This was only present for a limited range of Linux verisons; it was determined to be too expensive to provide.)
+ /// "[vdso]": Virtual dynamically linked shared object
+ /// "[heap]": The process's heap
+ ///
+ /// The pathname can be blank, which means it is an anonymous mapping
+ /// obtained via mmap.
+ ///
+ /// Newlines in pathname are replaced with an octal escape sequence.
+ ///
+ /// The pathname may have "(deleted)" appended onto it if the file-backed
+ /// path has been deleted.
+ ///
+ /// Note that modifications like the latter two indicated above imply that
+ /// in general the pathname may be ambiguous. (I.e. you cannot tell if the
+ /// denoted filename actually ended with the text "(deleted)", or if that
+ /// was added by the maps rendering.
+ pathname: OsString,
+}
+
+pub(super) fn parse_maps() -> Result<Vec<MapsEntry>, &'static str> {
+ let mut v = Vec::new();
+ let mut proc_self_maps =
+ File::open("/proc/self/maps").map_err(|_| "Couldn't open /proc/self/maps")?;
+ let mut buf = String::new();
+ let _bytes_read = proc_self_maps
+ .read_to_string(&mut buf)
+ .map_err(|_| "Couldn't read /proc/self/maps")?;
+ for line in buf.lines() {
+ v.push(line.parse()?);
+ }
+
+ Ok(v)
+}
+
+impl MapsEntry {
+ pub(super) fn pathname(&self) -> &OsString {
+ &self.pathname
+ }
+
+ pub(super) fn ip_matches(&self, ip: usize) -> bool {
+ self.address.0 <= ip && ip < self.address.1
+ }
+}
+
+impl FromStr for MapsEntry {
+ type Err = &'static str;
+
+ // Format: address perms offset dev inode pathname
+ // e.g.: "ffffffffff600000-ffffffffff601000 --xp 00000000 00:00 0 [vsyscall]"
+ // e.g.: "7f5985f46000-7f5985f48000 rw-p 00039000 103:06 76021795 /usr/lib/x86_64-linux-gnu/ld-linux-x86-64.so.2"
+ // e.g.: "35b1a21000-35b1a22000 rw-p 00000000 00:00 0"
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let mut parts = s
+ .split(' ') // space-separated fields
+ .filter(|s| s.len() > 0); // multiple spaces implies empty strings that need to be skipped.
+ let range_str = parts.next().ok_or("Couldn't find address")?;
+ let perms_str = parts.next().ok_or("Couldn't find permissions")?;
+ let offset_str = parts.next().ok_or("Couldn't find offset")?;
+ let dev_str = parts.next().ok_or("Couldn't find dev")?;
+ let inode_str = parts.next().ok_or("Couldn't find inode")?;
+ let pathname_str = parts.next().unwrap_or(""); // pathname may be omitted.
+
+ let hex = |s| usize::from_str_radix(s, 16).map_err(|_| "Couldn't parse hex number");
+ let address = {
+ // This could use `range_str.split_once('-')` once the MSRV passes 1.52.
+ if let Some(idx) = range_str.find('-') {
+ let (start, rest) = range_str.split_at(idx);
+ let (_div, limit) = rest.split_at(1);
+ (hex(start)?, hex(limit)?)
+ } else {
+ return Err("Couldn't parse address range");
+ }
+ };
+ let perms: [char; 4] = {
+ let mut chars = perms_str.chars();
+ let mut c = || chars.next().ok_or("insufficient perms");
+ let perms = [c()?, c()?, c()?, c()?];
+ if chars.next().is_some() {
+ return Err("too many perms");
+ }
+ perms
+ };
+ let offset = hex(offset_str)?;
+ let dev = {
+ // This could use `dev_str.split_once(':')` once the MSRV passes 1.52.
+ if let Some(idx) = dev_str.find(':') {
+ let (major, rest) = dev_str.split_at(idx);
+ let (_div, minor) = rest.split_at(1);
+ (hex(major)?, hex(minor)?)
+ } else {
+ return Err("Couldn't parse dev")?;
+ }
+ };
+ let inode = hex(inode_str)?;
+ let pathname = pathname_str.into();
+
+ Ok(MapsEntry {
+ address,
+ perms,
+ offset,
+ dev,
+ inode,
+ pathname,
+ })
+ }
+}
+
+// Make sure we can parse 64-bit sample output if we're on a 64-bit target.
+#[cfg(target_pointer_width = "64")]
+#[test]
+fn check_maps_entry_parsing_64bit() {
+ assert_eq!(
+ "ffffffffff600000-ffffffffff601000 --xp 00000000 00:00 0 \
+ [vsyscall]"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0xffffffffff600000, 0xffffffffff601000),
+ perms: ['-', '-', 'x', 'p'],
+ offset: 0x00000000,
+ dev: (0x00, 0x00),
+ inode: 0x0,
+ pathname: "[vsyscall]".into(),
+ }
+ );
+
+ assert_eq!(
+ "7f5985f46000-7f5985f48000 rw-p 00039000 103:06 76021795 \
+ /usr/lib/x86_64-linux-gnu/ld-linux-x86-64.so.2"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0x7f5985f46000, 0x7f5985f48000),
+ perms: ['r', 'w', '-', 'p'],
+ offset: 0x00039000,
+ dev: (0x103, 0x06),
+ inode: 0x76021795,
+ pathname: "/usr/lib/x86_64-linux-gnu/ld-linux-x86-64.so.2".into(),
+ }
+ );
+ assert_eq!(
+ "35b1a21000-35b1a22000 rw-p 00000000 00:00 0"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0x35b1a21000, 0x35b1a22000),
+ perms: ['r', 'w', '-', 'p'],
+ offset: 0x00000000,
+ dev: (0x00, 0x00),
+ inode: 0x0,
+ pathname: Default::default(),
+ }
+ );
+}
+
+// (This output was taken from a 32-bit machine, but will work on any target)
+#[test]
+fn check_maps_entry_parsing_32bit() {
+ /* Example snippet of output:
+ 08056000-08077000 rw-p 00000000 00:00 0 [heap]
+ b7c79000-b7e02000 r--p 00000000 08:01 60662705 /usr/lib/locale/locale-archive
+ b7e02000-b7e03000 rw-p 00000000 00:00 0
+ */
+ assert_eq!(
+ "08056000-08077000 rw-p 00000000 00:00 0 \
+ [heap]"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0x08056000, 0x08077000),
+ perms: ['r', 'w', '-', 'p'],
+ offset: 0x00000000,
+ dev: (0x00, 0x00),
+ inode: 0x0,
+ pathname: "[heap]".into(),
+ }
+ );
+
+ assert_eq!(
+ "b7c79000-b7e02000 r--p 00000000 08:01 60662705 \
+ /usr/lib/locale/locale-archive"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0xb7c79000, 0xb7e02000),
+ perms: ['r', '-', '-', 'p'],
+ offset: 0x00000000,
+ dev: (0x08, 0x01),
+ inode: 0x60662705,
+ pathname: "/usr/lib/locale/locale-archive".into(),
+ }
+ );
+ assert_eq!(
+ "b7e02000-b7e03000 rw-p 00000000 00:00 0"
+ .parse::<MapsEntry>()
+ .unwrap(),
+ MapsEntry {
+ address: (0xb7e02000, 0xb7e03000),
+ perms: ['r', 'w', '-', 'p'],
+ offset: 0x00000000,
+ dev: (0x00, 0x00),
+ inode: 0x0,
+ pathname: Default::default(),
+ }
+ );
+}
diff --git a/vendor/backtrace/src/windows.rs b/vendor/backtrace/src/windows.rs
index d091874f1..9ec3ba99b 100644
--- a/vendor/backtrace/src/windows.rs
+++ b/vendor/backtrace/src/windows.rs
@@ -162,8 +162,8 @@ macro_rules! ffi {
ffi!($($rest)*);
);
- (extern "system" { $(pub fn $name:ident($($args:tt)*) -> $ret:ty;)* } $($rest:tt)*) => (
- extern "system" {
+ ($(#[$meta:meta])* extern "system" { $(pub fn $name:ident($($args:tt)*) -> $ret:ty;)* } $($rest:tt)*) => (
+ $(#[$meta])* extern "system" {
$(pub fn $name($($args)*) -> $ret;)*
}
@@ -371,6 +371,7 @@ ffi! {
pub type LPCVOID = *const c_void;
pub type LPMODULEENTRY32W = *mut MODULEENTRY32W;
+ #[link(name = "kernel32")]
extern "system" {
pub fn GetCurrentProcess() -> HANDLE;
pub fn GetCurrentThread() -> HANDLE;
@@ -438,6 +439,7 @@ ffi! {
#[cfg(target_pointer_width = "64")]
ffi! {
+ #[link(name = "kernel32")]
extern "system" {
pub fn RtlLookupFunctionEntry(
ControlPc: DWORD64,
diff --git a/vendor/backtrace/tests/common/mod.rs b/vendor/backtrace/tests/common/mod.rs
new file mode 100644
index 000000000..3c07934fd
--- /dev/null
+++ b/vendor/backtrace/tests/common/mod.rs
@@ -0,0 +1,14 @@
+/// Some tests only make sense in contexts where they can re-exec the test
+/// itself. Not all contexts support this, so you can call this method to find
+/// out which case you are in.
+pub fn cannot_reexec_the_test() -> bool {
+ // These run in docker containers on CI where they can't re-exec the test,
+ // so just skip these for CI. No other reason this can't run on those
+ // platforms though.
+ // Miri does not have support for re-execing a file
+ cfg!(unix)
+ && (cfg!(target_arch = "arm")
+ || cfg!(target_arch = "aarch64")
+ || cfg!(target_arch = "s390x"))
+ || cfg!(miri)
+}
diff --git a/vendor/backtrace/tests/concurrent-panics.rs b/vendor/backtrace/tests/concurrent-panics.rs
index 470245cc9..a44a26771 100644
--- a/vendor/backtrace/tests/concurrent-panics.rs
+++ b/vendor/backtrace/tests/concurrent-panics.rs
@@ -9,17 +9,11 @@ const PANICS: usize = 100;
const THREADS: usize = 8;
const VAR: &str = "__THE_TEST_YOU_ARE_LUKE";
+mod common;
+
fn main() {
- // These run in docker containers on CI where they can't re-exec the test,
- // so just skip these for CI. No other reason this can't run on those
- // platforms though.
- // Miri does not have support for re-execing a file
- if cfg!(unix)
- && (cfg!(target_arch = "arm")
- || cfg!(target_arch = "aarch64")
- || cfg!(target_arch = "s390x"))
- || cfg!(miri)
- {
+ // If we cannot re-exec this test, there's no point in trying to do it.
+ if common::cannot_reexec_the_test() {
println!("test result: ok");
return;
}
diff --git a/vendor/backtrace/tests/current-exe-mismatch.rs b/vendor/backtrace/tests/current-exe-mismatch.rs
new file mode 100644
index 000000000..21c67bcbf
--- /dev/null
+++ b/vendor/backtrace/tests/current-exe-mismatch.rs
@@ -0,0 +1,137 @@
+// rust-lang/rust#101913: when you run your program explicitly via `ld.so`,
+// `std::env::current_exe` will return the path of *that* program, and not
+// the Rust program itself.
+
+use std::io::{BufRead, BufReader};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+mod common;
+
+fn main() {
+ if std::env::var(VAR).is_err() {
+ // the parent waits for the child; then we then handle either printing
+ // "test result: ok", "test result: ignored", or panicking.
+ match parent() {
+ Ok(()) => {
+ println!("test result: ok");
+ }
+ Err(EarlyExit::IgnoreTest(_)) => {
+ println!("test result: ignored");
+ }
+ Err(EarlyExit::IoError(e)) => {
+ println!("{} parent encoutered IoError: {:?}", file!(), e);
+ panic!();
+ }
+ }
+ } else {
+ // println!("{} running child", file!());
+ child().unwrap();
+ }
+}
+
+const VAR: &str = "__THE_TEST_YOU_ARE_LUKE";
+
+#[derive(Debug)]
+enum EarlyExit {
+ IgnoreTest(String),
+ IoError(std::io::Error),
+}
+
+impl From<std::io::Error> for EarlyExit {
+ fn from(e: std::io::Error) -> Self {
+ EarlyExit::IoError(e)
+ }
+}
+
+fn parent() -> Result<(), EarlyExit> {
+ // If we cannot re-exec this test, there's no point in trying to do it.
+ if common::cannot_reexec_the_test() {
+ return Err(EarlyExit::IgnoreTest("(cannot reexec)".into()));
+ }
+
+ let me = std::env::current_exe().unwrap();
+ let ld_so = find_interpreter(&me)?;
+
+ // use interp to invoke current exe, yielding child test.
+ //
+ // (if you're curious what you might compare this against, you can try
+ // swapping in the below definition for `result`, which is the easy case of
+ // not using the ld.so interpreter directly that Rust handled fine even
+ // prior to resolution of rust-lang/rust#101913.)
+ //
+ // let result = Command::new(me).env(VAR, "1").output()?;
+ let result = Command::new(ld_so).env(VAR, "1").arg(&me).output().unwrap();
+
+ if result.status.success() {
+ return Ok(());
+ }
+ println!("stdout:\n{}", String::from_utf8_lossy(&result.stdout));
+ println!("stderr:\n{}", String::from_utf8_lossy(&result.stderr));
+ println!("code: {}", result.status);
+ panic!();
+}
+
+fn child() -> Result<(), EarlyExit> {
+ let bt = backtrace::Backtrace::new();
+ println!("{:?}", bt);
+
+ let mut found_my_name = false;
+
+ let my_filename = file!();
+ 'frames: for frame in bt.frames() {
+ let symbols = frame.symbols();
+ if symbols.is_empty() {
+ continue;
+ }
+
+ for sym in symbols {
+ if let Some(filename) = sym.filename() {
+ if filename.ends_with(my_filename) {
+ // huzzah!
+ found_my_name = true;
+ break 'frames;
+ }
+ }
+ }
+ }
+
+ assert!(found_my_name);
+
+ Ok(())
+}
+
+// we use the `readelf` command to extract the path to the interpreter requested
+// by our binary.
+//
+// if we cannot `readelf` for some reason, or if we fail to parse its output,
+// then we will just give up on this test (and not treat it as a test failure).
+fn find_interpreter(me: &Path) -> Result<PathBuf, EarlyExit> {
+ let result = Command::new("readelf")
+ .arg("-l")
+ .arg(me)
+ .output()
+ .map_err(|_err| EarlyExit::IgnoreTest("readelf invocation failed".into()))?;
+ if result.status.success() {
+ let r = BufReader::new(&result.stdout[..]);
+ for line in r.lines() {
+ let line = line?;
+ let line = line.trim();
+ let prefix = "[Requesting program interpreter: ";
+ // This could use `line.split_once` and `suffix.rsplit_once` once the MSRV passes 1.52
+ if let Some(idx) = line.find(prefix) {
+ let (_, suffix) = line.split_at(idx + prefix.len());
+ if let Some(idx) = suffix.rfind("]") {
+ let (found_path, _ignore_remainder) = suffix.split_at(idx);
+ return Ok(found_path.into());
+ }
+ }
+ }
+
+ Err(EarlyExit::IgnoreTest(
+ "could not find interpreter from readelf output".into(),
+ ))
+ } else {
+ Err(EarlyExit::IgnoreTest("readelf returned non-success".into()))
+ }
+}
diff --git a/vendor/backtrace/tests/skip_inner_frames.rs b/vendor/backtrace/tests/skip_inner_frames.rs
index 8b57bef52..60bba35e6 100644
--- a/vendor/backtrace/tests/skip_inner_frames.rs
+++ b/vendor/backtrace/tests/skip_inner_frames.rs
@@ -4,7 +4,7 @@ use backtrace::Backtrace;
// function for frames which reports the starting address of a symbol. As a
// result it's only enabled on a few platforms.
const ENABLED: bool = cfg!(all(
- // Windows hasn't really been tested, and OSX doesn't support actually
+ // Windows hasn't really been tested, and macOS doesn't support actually
// finding an enclosing frame, so disable this
target_os = "linux",
// On ARM finding the enclosing function is simply returning the ip itself.
diff --git a/vendor/camino/.cargo-checksum.json b/vendor/camino/.cargo-checksum.json
index 81a1f0f60..8686980ae 100644
--- a/vendor/camino/.cargo-checksum.json
+++ b/vendor/camino/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"2530b12d7ddf3395bbc4c10557e57531498039ba28528b91ffc3f3deefc76f77","CODE_OF_CONDUCT.md":"f51e207c2961ec061cac5c8aa9dd3098c3437de2c106d740c2aae90771bc0f86","Cargo.toml":"7bf7de17ab10cd95aadf102f37c0b8a58f0c31f90cfeee81126ead8ad3cb0a26","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"84762d717d0f2358c56f54ee46a6ca5f1582b7d3843f7a4d6e553ea04a57ca1b","build.rs":"ca6914ad35e69842b6fba6e436d0417f39dbe6ee18694d4dd89b372d31cbf715","clippy.toml":"818cba7332cc56b019d59e09805a3498f523da788f51454742905f1987c0b563","rustfmt.toml":"bf9776adb152b3fdc0d75c0929ede148c3e28c58f909a7d052865bc332e8958f","src/lib.rs":"3a744ca7df473bb8bd97e1f4a961d517cb7379b8756e38893dd9d53a169c41e1","src/proptest_impls.rs":"aa17bad810abe4a7b6c7a2c3163ae9749b03dd8bcef5043b0c4b9d00977f981c","src/serde_impls.rs":"eb7f00d1ceb7135506047dbefd7e6acee0364b5a9194111f49dbf2d1eb3661ac","src/tests.rs":"d6108c540dc93446b17d297b50372f799ef777c2cb0280fd37824a102ec24533","tests/integration_tests.rs":"b664a7555d2e5ac9ab71384e3ccfb73c01abe4c401f8de32e234c03b4d19d0f8"},"package":"88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"ea1979dff05f6bdd9e2c6c65b8fdb28395bf59f16e7e7fa2a9d398bdd6ae106f","CODE_OF_CONDUCT.md":"f51e207c2961ec061cac5c8aa9dd3098c3437de2c106d740c2aae90771bc0f86","Cargo.toml":"b024a45f39757c112132e6ae883c0052b8bd86bfc166b89b9615621744fd8583","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"84762d717d0f2358c56f54ee46a6ca5f1582b7d3843f7a4d6e553ea04a57ca1b","build.rs":"ca6914ad35e69842b6fba6e436d0417f39dbe6ee18694d4dd89b372d31cbf715","clippy.toml":"818cba7332cc56b019d59e09805a3498f523da788f51454742905f1987c0b563","release.toml":"287514631fde7a1d29a8e8027bc37c585c7e30c173c8254f551b042e38f4bb81","rustfmt.toml":"bf9776adb152b3fdc0d75c0929ede148c3e28c58f909a7d052865bc332e8958f","src/lib.rs":"52da457eae1c189a1728df994b478fbf37baa85b630541b7a6cbb48670c1c329","src/proptest_impls.rs":"4f36f5804bd3cbbf65177db4b20d808ed56405388b552bad375aab308a535236","src/serde_impls.rs":"eb7f00d1ceb7135506047dbefd7e6acee0364b5a9194111f49dbf2d1eb3661ac","src/tests.rs":"d6108c540dc93446b17d297b50372f799ef777c2cb0280fd37824a102ec24533","tests/integration_tests.rs":"56aa396a173d0c255369159b57e4a7db294e607aa6cfb65efebd95d79cc51a04"},"package":"c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055"} \ No newline at end of file
diff --git a/vendor/camino/CHANGELOG.md b/vendor/camino/CHANGELOG.md
index 67c2ea8d3..03d672242 100644
--- a/vendor/camino/CHANGELOG.md
+++ b/vendor/camino/CHANGELOG.md
@@ -3,6 +3,13 @@
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.1.2] - 2022-08-12
+
+### Added
+
+- New convenience methods [`FromPathBufError::into_io_error`] and
+ [`FromPathError::into_io_error`].
+
## [1.1.1] - 2022-08-12
### Fixed
@@ -101,6 +108,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
Initial release.
+[1.1.2]: https://github.com/camino-rs/camino/releases/tag/camino-1.1.2
[1.1.1]: https://github.com/camino-rs/camino/releases/tag/camino-1.1.1
[1.1.0]: https://github.com/camino-rs/camino/releases/tag/camino-1.1.0
[1.0.9]: https://github.com/camino-rs/camino/releases/tag/camino-1.0.9
diff --git a/vendor/camino/Cargo.toml b/vendor/camino/Cargo.toml
index 28c007855..1b2eb24d8 100644
--- a/vendor/camino/Cargo.toml
+++ b/vendor/camino/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "camino"
-version = "1.1.1"
+version = "1.1.2"
authors = [
"Without Boats <saoirse@without.boats>",
"Ashley Williams <ashley666ashley@gmail.com>",
diff --git a/vendor/camino/release.toml b/vendor/camino/release.toml
new file mode 100644
index 000000000..b7977bad5
--- /dev/null
+++ b/vendor/camino/release.toml
@@ -0,0 +1,8 @@
+sign-tag = true
+# Required for templates below to work
+consolidate-commits = false
+pre-release-commit-message = "[{{crate_name}}] version {{version}}"
+tag-message = "[{{crate_name}}] version {{version}}"
+tag-name = "camino-{{version}}"
+publish = false
+dependent-version = "upgrade"
diff --git a/vendor/camino/src/lib.rs b/vendor/camino/src/lib.rs
index fcfba3805..44684b096 100644
--- a/vendor/camino/src/lib.rs
+++ b/vendor/camino/src/lib.rs
@@ -222,7 +222,7 @@ impl Utf8PathBuf {
#[must_use]
pub fn as_path(&self) -> &Utf8Path {
// SAFETY: every Utf8PathBuf constructor ensures that self is valid UTF-8
- unsafe { Utf8Path::assume_utf8(&*self.0) }
+ unsafe { Utf8Path::assume_utf8(&self.0) }
}
/// Extends `self` with `path`.
@@ -1175,10 +1175,8 @@ impl Utf8Path {
/// assert_eq!(path.canonicalize_utf8().unwrap(), Utf8PathBuf::from("/foo/test/bar.rs"));
/// ```
pub fn canonicalize_utf8(&self) -> io::Result<Utf8PathBuf> {
- self.canonicalize().and_then(|path| {
- path.try_into()
- .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))
- })
+ self.canonicalize()
+ .and_then(|path| path.try_into().map_err(FromPathBufError::into_io_error))
}
/// Reads a symbolic link, returning the file that the link points to.
@@ -1224,10 +1222,8 @@ impl Utf8Path {
/// let path_link = path.read_link_utf8().expect("read_link call failed");
/// ```
pub fn read_link_utf8(&self) -> io::Result<Utf8PathBuf> {
- self.read_link().and_then(|path| {
- path.try_into()
- .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))
- })
+ self.read_link()
+ .and_then(|path| path.try_into().map_err(FromPathBufError::into_io_error))
}
/// Returns an iterator over the entries within a directory.
@@ -2488,13 +2484,24 @@ impl FromPathBufError {
self.path
}
- /// Fetch a [`FromPathError`] for more about the conversion failure.
+ /// Fetches a [`FromPathError`] for more about the conversion failure.
///
/// At the moment this struct does not contain any additional information, but is provided for
/// completeness.
pub fn from_path_error(&self) -> FromPathError {
self.error
}
+
+ /// Converts self into a [`std::io::Error`] with kind
+ /// [`InvalidData`](io::ErrorKind::InvalidData).
+ ///
+ /// Many users of `FromPathBufError` will want to convert it into an `io::Error`. This is a
+ /// convenience method to do that.
+ pub fn into_io_error(self) -> io::Error {
+ // NOTE: we don't currently implement `From<FromPathBufError> for io::Error` because we want
+ // to ensure the user actually desires that conversion.
+ io::Error::new(io::ErrorKind::InvalidData, self)
+ }
}
impl fmt::Display for FromPathBufError {
@@ -2539,6 +2546,19 @@ impl error::Error for FromPathBufError {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct FromPathError(());
+impl FromPathError {
+ /// Converts self into a [`std::io::Error`] with kind
+ /// [`InvalidData`](io::ErrorKind::InvalidData).
+ ///
+ /// Many users of `FromPathError` will want to convert it into an `io::Error`. This is a
+ /// convenience method to do that.
+ pub fn into_io_error(self) -> io::Error {
+ // NOTE: we don't currently implement `From<FromPathBufError> for io::Error` because we want
+ // to ensure the user actually desires that conversion.
+ io::Error::new(io::ErrorKind::InvalidData, self)
+ }
+}
+
impl fmt::Display for FromPathError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Path contains invalid UTF-8")
@@ -2587,7 +2607,7 @@ impl AsRef<Path> for Utf8Path {
impl AsRef<Path> for Utf8PathBuf {
fn as_ref(&self) -> &Path {
- &*self.0
+ &self.0
}
}
diff --git a/vendor/camino/src/proptest_impls.rs b/vendor/camino/src/proptest_impls.rs
index 997c88c8c..81776f226 100644
--- a/vendor/camino/src/proptest_impls.rs
+++ b/vendor/camino/src/proptest_impls.rs
@@ -8,9 +8,8 @@
// NOTE: #[cfg(feature = "proptest1")] is specified here to work with `doc_cfg`.
-use proptest::{arbitrary::StrategyFor, prelude::*, strategy::MapInto};
-
use crate::{Utf8Path, Utf8PathBuf};
+use proptest::{arbitrary::StrategyFor, prelude::*, strategy::MapInto};
/// The [`Arbitrary`] impl for `Utf8PathBuf` returns a path with between 0 and 8 components,
/// joined by the [`MAIN_SEPARATOR`](std::path::MAIN_SEPARATOR) for the platform. (Each component is
diff --git a/vendor/camino/tests/integration_tests.rs b/vendor/camino/tests/integration_tests.rs
index a8b00f2fe..190ba82ba 100644
--- a/vendor/camino/tests/integration_tests.rs
+++ b/vendor/camino/tests/integration_tests.rs
@@ -106,7 +106,7 @@ fn test_borrow_hash() {
let owned = Utf8PathBuf::from(path);
assert_eq!(
- hash_output(&owned),
+ hash_output(owned),
hash_output(borrowed),
"consistent Hash: {}",
borrowed
diff --git a/vendor/cargo_metadata/.cargo-checksum.json b/vendor/cargo_metadata/.cargo-checksum.json
index 2d183cd80..7838a4a29 100644
--- a/vendor/cargo_metadata/.cargo-checksum.json
+++ b/vendor/cargo_metadata/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"1ea11e47024269503340c3764c13c6e0d47f45eb9f2eef3ea75eedee64c02e98","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"d51a5b3347bed2441b20986be81bfd4611ca2c5614f950116b273199a9bcf2de","src/dependency.rs":"c593ddc73d863c5712e2aba58b5f4d9bd915a5ac0bc17df71642aa79aa93bfdc","src/diagnostic.rs":"fee47d27390f1026ff99ffade5dfd2ab3e9b9839c3f33ce91a7dcde875551374","src/errors.rs":"2d67e46ef8f29a9ae2dd00ce39cc50e6ccae4dec9a09a9bad7c36bd8be4f62cc","src/lib.rs":"41b0d89ec02f698b8fdeab9c1f76536a4ba34c2a4361b2c921340a10288e002d","src/messages.rs":"caaa7c906595768587007c72fcc3ac32880bbb02293b004f498a296e078fbbff","tests/selftest.rs":"73afd494c1bf7dd4e1a99971e9ff66a0e21fc7bf3e327663df15d2350dcdfc70","tests/test_samples.rs":"3374f4a054d440f8fc567b233c9d680be98aa481c622845ae1dc5cb28aa5f804"},"package":"3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"8b3e29799cdedf02f169bb519072ace2e2b6b9413f4ce8fa0666c2d1d964084e","Cargo.toml":"57d432cd172cc87ee4c31b0e4c21c52d06ba1a48da9decd34581b2671c47d71d","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"d51a5b3347bed2441b20986be81bfd4611ca2c5614f950116b273199a9bcf2de","src/dependency.rs":"c593ddc73d863c5712e2aba58b5f4d9bd915a5ac0bc17df71642aa79aa93bfdc","src/diagnostic.rs":"fee47d27390f1026ff99ffade5dfd2ab3e9b9839c3f33ce91a7dcde875551374","src/errors.rs":"797afd61efdd843ae570d9e972dd2425d33823d4a78c0c488028493dffb45c7a","src/lib.rs":"5ec701f3589c5d71c152b5abe7ad5f222aee4d4a5f9992bced1d357bad36e227","src/messages.rs":"a8e3ee31dc8cce5762b4b085be29fe4d7189a789f3a149ef2b6c17604d94528b","tests/selftest.rs":"73afd494c1bf7dd4e1a99971e9ff66a0e21fc7bf3e327663df15d2350dcdfc70","tests/test_samples.rs":"ee2b4737adfa1930c1610bb3ec0fc94b7f1a3691bb09545da69044eef2f5ba6b"},"package":"08a1ec454bc3eead8719cb56e15dbbfecdbc14e4b3a3ae4936cc6e31f5fc0d07"} \ No newline at end of file
diff --git a/vendor/cargo_metadata/CHANGELOG.md b/vendor/cargo_metadata/CHANGELOG.md
new file mode 100644
index 000000000..fcc0b2642
--- /dev/null
+++ b/vendor/cargo_metadata/CHANGELOG.md
@@ -0,0 +1,38 @@
+# Changelog
+
+## Unreleased
+
+### Added
+
+- Re-exported `semver` crate directly.
+
+### Changed
+
+- Made `parse_stream` more versatile by accepting anything that implements `Read`.
+
+### Removed
+
+- Removed re-exports for `BuildMetadata` and `Prerelease` from `semver` crate.
+
+### Fixed
+
+- Added missing `manifest_path` field to `Artifact`. Fixes #187.
+
+## [0.15.0] - 2022-06-22
+
+### Added
+
+- Re-exported `BuildMetadata` and `Prerelease` from `semver` crate.
+- Added `workspace_packages` function.
+- Added `Edition` enum to better parse edition field.
+- Added `rust-version` field to Cargo manifest.
+
+### Changed
+
+- Bumped msrv from `1.40.0` to `1.42.0`.
+
+### Internal Changes
+
+- Updated `derive_builder` to the latest version.
+- Made use of `matches!` macros where possible.
+- Fixed some tests
diff --git a/vendor/cargo_metadata/Cargo.toml b/vendor/cargo_metadata/Cargo.toml
index 90f1b1ce0..25a28ef4f 100644
--- a/vendor/cargo_metadata/Cargo.toml
+++ b/vendor/cargo_metadata/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.42.0"
name = "cargo_metadata"
-version = "0.15.0"
+version = "0.15.3"
authors = ["Oliver Schneider <git-spam-no-reply9815368754983@oli-obk.de>"]
description = "structured access to the output of `cargo metadata`"
readme = "README.md"
@@ -47,6 +47,9 @@ features = ["derive"]
version = "1.0.79"
features = ["unbounded_depth"]
+[dependencies.thiserror]
+version = "1.0.31"
+
[features]
builder = ["derive_builder"]
default = []
diff --git a/vendor/cargo_metadata/src/errors.rs b/vendor/cargo_metadata/src/errors.rs
index 7172057e7..4d08200c8 100644
--- a/vendor/cargo_metadata/src/errors.rs
+++ b/vendor/cargo_metadata/src/errors.rs
@@ -1,7 +1,4 @@
-use std::fmt;
-use std::io;
-use std::str::Utf8Error;
-use std::string::FromUtf8Error;
+use std::{io, str::Utf8Error, string::FromUtf8Error};
/// Custom result type for `cargo_metadata::Error`
pub type Result<T> = ::std::result::Result<T, Error>;
@@ -24,87 +21,32 @@ pub type Result<T> = ::std::result::Result<T, Error>;
/// really want to. (Either through foreign_links or by making it a field
/// value of a `ErrorKind` variant).
///
-#[derive(Debug)]
+#[derive(Debug, thiserror::Error)]
pub enum Error {
/// Error during execution of `cargo metadata`
+ #[error("`cargo metadata` exited with an error: {stderr}")]
CargoMetadata {
/// stderr returned by the `cargo metadata` command
stderr: String,
},
/// IO Error during execution of `cargo metadata`
- Io(io::Error),
+ #[error("failed to start `cargo metadata`: {0}")]
+ Io(#[from] io::Error),
/// Output of `cargo metadata` was not valid utf8
- Utf8(Utf8Error),
+ #[error("cannot convert the stdout of `cargo metadata`: {0}")]
+ Utf8(#[from] Utf8Error),
/// Error output of `cargo metadata` was not valid utf8
- ErrUtf8(FromUtf8Error),
+ #[error("cannot convert the stderr of `cargo metadata`: {0}")]
+ ErrUtf8(#[from] FromUtf8Error),
/// Deserialization error (structure of json did not match expected structure)
- Json(::serde_json::Error),
+ #[error("failed to interpret `cargo metadata`'s json: {0}")]
+ Json(#[from] ::serde_json::Error),
/// The output did not contain any json
+ #[error("could not find any json in the output of `cargo metadata`")]
NoJson,
}
-
-impl From<io::Error> for Error {
- fn from(v: io::Error) -> Self {
- Error::Io(v)
- }
-}
-
-impl From<Utf8Error> for Error {
- fn from(v: Utf8Error) -> Self {
- Error::Utf8(v)
- }
-}
-
-impl From<FromUtf8Error> for Error {
- fn from(v: FromUtf8Error) -> Self {
- Error::ErrUtf8(v)
- }
-}
-
-impl From<::serde_json::Error> for Error {
- fn from(v: ::serde_json::Error) -> Self {
- Error::Json(v)
- }
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Error::CargoMetadata { stderr } => {
- write!(
- f,
- "`cargo metadata` exited with an error: {}",
- stderr.trim_end()
- )
- }
- Error::Io(err) => write!(f, "failed to start `cargo metadata`: {}", err),
- Error::Utf8(err) => write!(f, "cannot convert the stdout of `cargo metadata`: {}", err),
- Error::ErrUtf8(err) => {
- write!(f, "cannot convert the stderr of `cargo metadata`: {}", err)
- }
- Error::Json(err) => write!(f, "failed to interpret `cargo metadata`'s json: {}", err),
- Error::NoJson => write!(
- f,
- "could not find any json in the output of `cargo metadata`"
- ),
- }
- }
-}
-
-impl ::std::error::Error for Error {
- fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
- match self {
- Error::CargoMetadata { .. } => None,
- Error::Io(err) => Some(err),
- Error::Utf8(err) => Some(err),
- Error::ErrUtf8(err) => Some(err),
- Error::Json(err) => Some(err),
- Error::NoJson => None,
- }
- }
-}
diff --git a/vendor/cargo_metadata/src/lib.rs b/vendor/cargo_metadata/src/lib.rs
index a250aa176..27c72da0d 100644
--- a/vendor/cargo_metadata/src/lib.rs
+++ b/vendor/cargo_metadata/src/lib.rs
@@ -83,9 +83,11 @@ use camino::Utf8PathBuf;
use derive_builder::Builder;
use std::collections::HashMap;
use std::env;
+use std::ffi::OsString;
use std::fmt;
+use std::hash::Hash;
use std::path::PathBuf;
-use std::process::Command;
+use std::process::{Command, Stdio};
use std::str::from_utf8;
pub use camino;
@@ -154,10 +156,22 @@ pub struct Metadata {
}
impl Metadata {
- /// Get the root package of this metadata instance.
+ /// Get the workspace's root package of this metadata instance.
pub fn root_package(&self) -> Option<&Package> {
- let root = self.resolve.as_ref()?.root.as_ref()?;
- self.packages.iter().find(|pkg| &pkg.id == root)
+ match &self.resolve {
+ Some(resolve) => {
+ // if dependencies are resolved, use Cargo's answer
+ let root = resolve.root.as_ref()?;
+ self.packages.iter().find(|pkg| &pkg.id == root)
+ }
+ None => {
+ // if dependencies aren't resolved, check for a root package manually
+ let root_manifest_path = self.workspace_root.join("Cargo.toml");
+ self.packages
+ .iter()
+ .find(|pkg| pkg.manifest_path == root_manifest_path)
+ }
+ }
}
/// Get the workspace packages.
@@ -374,9 +388,12 @@ impl Package {
/// Full path to the readme file if one is present in the manifest
pub fn readme(&self) -> Option<Utf8PathBuf> {
- self.readme
- .as_ref()
- .map(|file| self.manifest_path.join(file))
+ self.readme.as_ref().map(|file| {
+ self.manifest_path
+ .parent()
+ .unwrap_or(&self.manifest_path)
+ .join(file)
+ })
}
}
@@ -409,7 +426,7 @@ impl std::fmt::Display for Source {
pub struct Target {
/// Name as given in the `Cargo.toml` or generated from the file name
pub name: String,
- /// Kind of target ("bin", "example", "test", "bench", "lib")
+ /// Kind of target ("bin", "example", "test", "bench", "lib", "custom-build")
pub kind: Vec<String>,
/// Almost the same as `kind`, except when an example is a library instead of an executable.
/// In that case `crate_types` contains things like `rlib` and `dylib` while `kind` is `example`
@@ -450,9 +467,47 @@ pub struct Target {
pub doc: bool,
}
-#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
+impl Target {
+ fn is_kind(&self, name: &str) -> bool {
+ self.kind.iter().any(|kind| kind == name)
+ }
+
+ /// Return true if this target is of kind "lib".
+ pub fn is_lib(&self) -> bool {
+ self.is_kind("lib")
+ }
+
+ /// Return true if this target is of kind "bin".
+ pub fn is_bin(&self) -> bool {
+ self.is_kind("bin")
+ }
+
+ /// Return true if this target is of kind "example".
+ pub fn is_example(&self) -> bool {
+ self.is_kind("example")
+ }
+
+ /// Return true if this target is of kind "test".
+ pub fn is_test(&self) -> bool {
+ self.is_kind("test")
+ }
+
+ /// Return true if this target is of kind "bench".
+ pub fn is_bench(&self) -> bool {
+ self.is_kind("bench")
+ }
+
+ /// Return true if this target is of kind "custom-build".
+ pub fn is_custom_build(&self) -> bool {
+ self.is_kind("custom-build")
+ }
+}
+
+/// The Rust edition
+///
+/// As of writing this comment rust editions 2024, 2027 and 2030 are not actually a thing yet but are parsed nonetheless for future proofing.
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[non_exhaustive]
-/// The rust edition
pub enum Edition {
/// Edition 2015
#[serde(rename = "2015")]
@@ -463,6 +518,36 @@ pub enum Edition {
/// Edition 2021
#[serde(rename = "2021")]
E2021,
+ #[doc(hidden)]
+ #[serde(rename = "2024")]
+ _E2024,
+ #[doc(hidden)]
+ #[serde(rename = "2027")]
+ _E2027,
+ #[doc(hidden)]
+ #[serde(rename = "2030")]
+ _E2030,
+}
+
+impl Edition {
+ /// Return the string representation of the edition
+ pub fn as_str(&self) -> &'static str {
+ use Edition::*;
+ match self {
+ E2015 => "2015",
+ E2018 => "2018",
+ E2021 => "2021",
+ _E2024 => "2024",
+ _E2027 => "2027",
+ _E2030 => "2030",
+ }
+ }
+}
+
+impl fmt::Display for Edition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
}
impl Default for Edition {
@@ -497,7 +582,7 @@ pub struct MetadataCommand {
manifest_path: Option<PathBuf>,
/// Current directory of the `cargo metadata` process.
current_dir: Option<PathBuf>,
- /// Output information only about the root package and don't fetch dependencies.
+ /// Output information only about workspace members and don't fetch dependencies.
no_deps: bool,
/// Collections of `CargoOpt::SomeFeatures(..)`
features: Vec<String>,
@@ -508,6 +593,11 @@ pub struct MetadataCommand {
/// Arbitrary command line flags to pass to `cargo`. These will be added
/// to the end of the command line invocation.
other_options: Vec<String>,
+ /// Arbitrary environment variables to set when running `cargo`. These will be merged into
+ /// the calling environment, overriding any which clash.
+ env: HashMap<OsString, OsString>,
+ /// Show stderr
+ verbose: bool,
}
impl MetadataCommand {
@@ -533,7 +623,7 @@ impl MetadataCommand {
self.current_dir = Some(path.into());
self
}
- /// Output information only about the root package and don't fetch dependencies.
+ /// Output information only about workspace members and don't fetch dependencies.
pub fn no_deps(&mut self) -> &mut MetadataCommand {
self.no_deps = true;
self
@@ -603,6 +693,38 @@ impl MetadataCommand {
self
}
+ /// Arbitrary environment variables to set when running `cargo`. These will be merged into
+ /// the calling environment, overriding any which clash.
+ ///
+ /// Some examples of when you may want to use this:
+ /// 1. Setting cargo config values without needing a .cargo/config.toml file, e.g. to set
+ /// `CARGO_NET_GIT_FETCH_WITH_CLI=true`
+ /// 2. To specify a custom path to RUSTC if your rust toolchain components aren't laid out in
+ /// the way cargo expects by default.
+ ///
+ /// ```no_run
+ /// # use cargo_metadata::{CargoOpt, MetadataCommand};
+ /// MetadataCommand::new()
+ /// .env("CARGO_NET_GIT_FETCH_WITH_CLI", "true")
+ /// .env("RUSTC", "/path/to/rustc")
+ /// // ...
+ /// # ;
+ /// ```
+ pub fn env<K: Into<OsString>, V: Into<OsString>>(
+ &mut self,
+ key: K,
+ val: V,
+ ) -> &mut MetadataCommand {
+ self.env.insert(key.into(), val.into());
+ self
+ }
+
+ /// Set whether to show stderr
+ pub fn verbose(&mut self, verbose: bool) -> &mut MetadataCommand {
+ self.verbose = verbose;
+ self
+ }
+
/// Builds a command for `cargo metadata`. This is the first
/// part of the work of `exec`.
pub fn cargo_command(&self) -> Command {
@@ -637,6 +759,8 @@ impl MetadataCommand {
}
cmd.args(&self.other_options);
+ cmd.envs(&self.env);
+
cmd
}
@@ -649,7 +773,11 @@ impl MetadataCommand {
/// Runs configured `cargo metadata` and returns parsed `Metadata`.
pub fn exec(&self) -> Result<Metadata> {
- let output = self.cargo_command().output()?;
+ let mut command = self.cargo_command();
+ if self.verbose {
+ command.stderr(Stdio::inherit());
+ }
+ let output = command.output()?;
if !output.status.success() {
return Err(Error::CargoMetadata {
stderr: String::from_utf8(output.stderr)?,
diff --git a/vendor/cargo_metadata/src/messages.rs b/vendor/cargo_metadata/src/messages.rs
index 0d086d3c3..ea2abd250 100644
--- a/vendor/cargo_metadata/src/messages.rs
+++ b/vendor/cargo_metadata/src/messages.rs
@@ -34,6 +34,9 @@ pub struct ArtifactProfile {
pub struct Artifact {
/// The package this artifact belongs to
pub package_id: PackageId,
+ /// Path to the `Cargo.toml` file
+ #[serde(default)]
+ pub manifest_path: Utf8PathBuf,
/// The target this artifact was compiled for
pub target: Target,
/// The profile this artifact was compiled with
diff --git a/vendor/cargo_metadata/tests/test_samples.rs b/vendor/cargo_metadata/tests/test_samples.rs
index 27a02f58f..3c747c595 100644
--- a/vendor/cargo_metadata/tests/test_samples.rs
+++ b/vendor/cargo_metadata/tests/test_samples.rs
@@ -344,6 +344,7 @@ fn all_the_fields() {
assert_eq!(all.categories, vec!["command-line-utilities"]);
assert_eq!(all.keywords, vec!["cli"]);
assert_eq!(all.readme, Some(Utf8PathBuf::from("README.md")));
+ assert!(all.readme().unwrap().ends_with("tests/all/README.md"));
assert_eq!(
all.repository,
Some("https://github.com/oli-obk/cargo_metadata/".to_string())
@@ -532,9 +533,9 @@ fn current_dir() {
fn parse_stream_is_robust() {
// Proc macros can print stuff to stdout, which naturally breaks JSON messages.
// Let's check that we don't die horribly in this case, and report an error.
- let json_output = r##"{"reason":"compiler-artifact","package_id":"chatty 0.1.0 (path+file:///chatty-macro/chatty)","target":{"kind":["proc-macro"],"crate_types":["proc-macro"],"name":"chatty","src_path":"/chatty-macro/chatty/src/lib.rs","edition":"2018","doctest":true},"profile":{"opt_level":"0","debuginfo":2,"debug_assertions":true,"overflow_checks":true,"test":false},"features":[],"filenames":["/chatty-macro/target/debug/deps/libchatty-f2adcff24cdf3bb2.so"],"executable":null,"fresh":false}
+ let json_output = r##"{"reason":"compiler-artifact","package_id":"chatty 0.1.0 (path+file:///chatty-macro/chatty)","manifest_path":"chatty-macro/Cargo.toml","target":{"kind":["proc-macro"],"crate_types":["proc-macro"],"name":"chatty","src_path":"/chatty-macro/chatty/src/lib.rs","edition":"2018","doctest":true},"profile":{"opt_level":"0","debuginfo":2,"debug_assertions":true,"overflow_checks":true,"test":false},"features":[],"filenames":["/chatty-macro/target/debug/deps/libchatty-f2adcff24cdf3bb2.so"],"executable":null,"fresh":false}
Evil proc macro was here!
-{"reason":"compiler-artifact","package_id":"chatty-macro 0.1.0 (path+file:///chatty-macro)","target":{"kind":["lib"],"crate_types":["lib"],"name":"chatty-macro","src_path":"/chatty-macro/src/lib.rs","edition":"2018","doctest":true},"profile":{"opt_level":"0","debuginfo":2,"debug_assertions":true,"overflow_checks":true,"test":false},"features":[],"filenames":["/chatty-macro/target/debug/libchatty_macro.rlib","/chatty-macro/target/debug/deps/libchatty_macro-cb5956ed52a11fb6.rmeta"],"executable":null,"fresh":false}
+{"reason":"compiler-artifact","package_id":"chatty-macro 0.1.0 (path+file:///chatty-macro)","manifest_path":"chatty-macro/Cargo.toml","target":{"kind":["lib"],"crate_types":["lib"],"name":"chatty-macro","src_path":"/chatty-macro/src/lib.rs","edition":"2018","doctest":true},"profile":{"opt_level":"0","debuginfo":2,"debug_assertions":true,"overflow_checks":true,"test":false},"features":[],"filenames":["/chatty-macro/target/debug/libchatty_macro.rlib","/chatty-macro/target/debug/deps/libchatty_macro-cb5956ed52a11fb6.rmeta"],"executable":null,"fresh":false}
"##;
let mut n_messages = 0;
let mut text = String::new();
@@ -622,3 +623,25 @@ fn depkind_to_string() {
assert_eq!(DependencyKind::Build.to_string(), "build");
assert_eq!(DependencyKind::Unknown.to_string(), "Unknown");
}
+
+#[test]
+fn basic_workspace_root_package_exists() {
+ // First try with dependencies
+ let meta = MetadataCommand::new()
+ .manifest_path("tests/basic_workspace/Cargo.toml")
+ .exec()
+ .unwrap();
+ assert_eq!(meta.root_package().unwrap().name, "ex_bin");
+ // Now with no_deps, it should still work exactly the same
+ let meta = MetadataCommand::new()
+ .manifest_path("tests/basic_workspace/Cargo.toml")
+ .no_deps()
+ .exec()
+ .unwrap();
+ assert_eq!(
+ meta.root_package()
+ .expect("workspace root still exists when no_deps used")
+ .name,
+ "ex_bin"
+ );
+}
diff --git a/vendor/cc/.cargo-checksum.json b/vendor/cc/.cargo-checksum.json
index 1b81a54d2..4dc2fe239 100644
--- a/vendor/cc/.cargo-checksum.json
+++ b/vendor/cc/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"35e9b5c9b3f3c70765a2749ea977aa0b2c8f5ce8872afbd4a5ba7cd59befba6a","Cargo.toml":"129464bf762db9e7db00f3e80d4b702b8c69a6e5fab070db0c8e66d4f693765e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"58af5106352aafa62175a90f8a5f25fa114028bf909220dc0735d79745999ec1","src/bin/gcc-shim.rs":"b77907875029494b6288841c3aed2e4939ed40708c7f597fca5c9e2570490ca6","src/com.rs":"29d0dee08a656ab1a4cc3e5fe24542e0fab5c1373cbc9b05059f7572cf9b8313","src/lib.rs":"2403bbe39ff511ea5a517c0841d825173a4fdc8a0899c64282bba49127f0dc33","src/registry.rs":"98ae2b71781acc49297e5544fa0cf059f735636f8f1338edef8dbf7232443945","src/setup_config.rs":"72deaf1927c0b713fd5c2b2d5b8f0ea3a303a00fda1579427895cac26a94122d","src/vs_instances.rs":"2d3f8278a803b0e7052f4eeb1979b29f963dd0143f4458e2cb5f33c4e5f0963b","src/winapi.rs":"e128e95b2d39ae7a02f54a7e25d33c488c14759b9f1a50a449e10545856950c3","src/windows_registry.rs":"c0340379c1f540cf96f45bbd4cf8fc28db555826f30ac937b75b87e4377b716b","tests/cc_env.rs":"e02b3b0824ad039b47e4462c5ef6dbe6c824c28e7953af94a0f28f7b5158042e","tests/cflags.rs":"57f06eb5ce1557e5b4a032d0c4673e18fbe6f8d26c1deb153126e368b96b41b3","tests/cxxflags.rs":"c2c6c6d8a0d7146616fa1caed26876ee7bc9fcfffd525eb4743593cade5f3371","tests/support/mod.rs":"a3c8d116973bb16066bf6ec4de5143183f97de7aad085d85f8118a2eaac3e1e0","tests/test.rs":"61fb35ae6dd5cf506ada000bdd82c92e9f8eac9cc053b63e83d3f897436fbf8f"},"package":"e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"} \ No newline at end of file
+{"files":{"Cargo.lock":"23c26d62ba5114f5ac6e7ffa3ea233cea77e5cb7f98d9f056f40fe2c49971f67","Cargo.toml":"fd4b39488866b6717476fadc460ff91c89511628080769516eec452c0def8bc7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"58af5106352aafa62175a90f8a5f25fa114028bf909220dc0735d79745999ec1","src/bin/gcc-shim.rs":"b77907875029494b6288841c3aed2e4939ed40708c7f597fca5c9e2570490ca6","src/com.rs":"29d0dee08a656ab1a4cc3e5fe24542e0fab5c1373cbc9b05059f7572cf9b8313","src/lib.rs":"e0cc228db97675d6a0d86b219a20e9e48925a1ccbfd9e9fd038ccf6ef129957e","src/registry.rs":"98ae2b71781acc49297e5544fa0cf059f735636f8f1338edef8dbf7232443945","src/setup_config.rs":"72deaf1927c0b713fd5c2b2d5b8f0ea3a303a00fda1579427895cac26a94122d","src/vs_instances.rs":"2d3f8278a803b0e7052f4eeb1979b29f963dd0143f4458e2cb5f33c4e5f0963b","src/winapi.rs":"e128e95b2d39ae7a02f54a7e25d33c488c14759b9f1a50a449e10545856950c3","src/windows_registry.rs":"c0340379c1f540cf96f45bbd4cf8fc28db555826f30ac937b75b87e4377b716b","tests/cc_env.rs":"e02b3b0824ad039b47e4462c5ef6dbe6c824c28e7953af94a0f28f7b5158042e","tests/cflags.rs":"57f06eb5ce1557e5b4a032d0c4673e18fbe6f8d26c1deb153126e368b96b41b3","tests/cxxflags.rs":"c2c6c6d8a0d7146616fa1caed26876ee7bc9fcfffd525eb4743593cade5f3371","tests/support/mod.rs":"a3c8d116973bb16066bf6ec4de5143183f97de7aad085d85f8118a2eaac3e1e0","tests/test.rs":"61fb35ae6dd5cf506ada000bdd82c92e9f8eac9cc053b63e83d3f897436fbf8f"},"package":"a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"} \ No newline at end of file
diff --git a/vendor/cc/Cargo.lock b/vendor/cc/Cargo.lock
index c9564008e..2d065bc6a 100644
--- a/vendor/cc/Cargo.lock
+++ b/vendor/cc/Cargo.lock
@@ -10,7 +10,7 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cc"
-version = "1.0.77"
+version = "1.0.78"
dependencies = [
"jobserver",
"tempfile",
@@ -51,9 +51,9 @@ dependencies = [
[[package]]
name = "libc"
-version = "0.2.137"
+version = "0.2.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
+checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
[[package]]
name = "redox_syscall"
diff --git a/vendor/cc/Cargo.toml b/vendor/cc/Cargo.toml
index 4ec5fa658..c4ec0bf79 100644
--- a/vendor/cc/Cargo.toml
+++ b/vendor/cc/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "cc"
-version = "1.0.77"
+version = "1.0.78"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
exclude = ["/.github"]
description = """
diff --git a/vendor/cc/src/lib.rs b/vendor/cc/src/lib.rs
index 486d67e0b..1ebd2cc7a 100644
--- a/vendor/cc/src/lib.rs
+++ b/vendor/cc/src/lib.rs
@@ -56,11 +56,12 @@
#![allow(deprecated)]
#![deny(missing_docs)]
-use std::collections::HashMap;
+use std::collections::{hash_map, HashMap};
use std::env;
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display, Formatter};
use std::fs;
+use std::hash::Hasher;
use std::io::{self, BufRead, BufReader, Read, Write};
use std::path::{Component, Path, PathBuf};
use std::process::{Child, Command, Stdio};
@@ -1023,7 +1024,24 @@ impl Build {
let mut objects = Vec::new();
for file in self.files.iter() {
- let obj = dst.join(file).with_extension("o");
+ let obj = if file.has_root() {
+ // If `file` is an absolute path, prefix the `basename`
+ // with the `dirname`'s hash to ensure name uniqueness.
+ let basename = file
+ .file_name()
+ .ok_or_else(|| Error::new(ErrorKind::InvalidArgument, "file_name() failure"))?
+ .to_string_lossy();
+ let dirname = file
+ .parent()
+ .ok_or_else(|| Error::new(ErrorKind::InvalidArgument, "parent() failure"))?
+ .to_string_lossy();
+ let mut hasher = hash_map::DefaultHasher::new();
+ hasher.write(dirname.to_string().as_bytes());
+ dst.join(format!("{:016x}-{}", hasher.finish(), basename))
+ .with_extension("o")
+ } else {
+ dst.join(file).with_extension("o")
+ };
let obj = if !obj.starts_with(&dst) {
dst.join(obj.file_name().ok_or_else(|| {
Error::new(ErrorKind::IOError, "Getting object file details failed.")
@@ -1339,12 +1357,14 @@ impl Build {
}
fn compile_object(&self, obj: &Object) -> Result<(), Error> {
- let is_asm = is_asm(&obj.src);
+ let asm_ext = AsmFileExt::from_path(&obj.src);
+ let is_asm = asm_ext.is_some();
let target = self.get_target()?;
let msvc = target.contains("msvc");
let compiler = self.try_get_compiler()?;
let clang = compiler.family == ToolFamily::Clang;
- let (mut cmd, name) = if msvc && is_asm {
+
+ let (mut cmd, name) = if msvc && asm_ext == Some(AsmFileExt::DotAsm) {
self.msvc_macro_assembler()?
} else {
let mut cmd = compiler.to_command();
@@ -1367,7 +1387,7 @@ impl Build {
if !msvc || !is_asm || !is_arm {
cmd.arg("-c");
}
- if self.cuda && self.files.len() > 1 {
+ if self.cuda && self.cuda_file_count() > 1 {
cmd.arg("--device-c");
}
if is_asm {
@@ -1690,7 +1710,7 @@ impl Build {
cmd.args.push("--target=aarch64-unknown-windows-gnu".into())
}
} else {
- cmd.args.push(format!("--target={}", target).into());
+ cmd.push_cc_arg(format!("--target={}", target).into());
}
}
}
@@ -2035,7 +2055,7 @@ impl Build {
self.assemble_progressive(dst, chunk)?;
}
- if self.cuda {
+ if self.cuda && self.cuda_file_count() > 0 {
// Link the device-side code and add it to the target library,
// so that non-CUDA linker can link the final binary.
@@ -2645,10 +2665,29 @@ impl Build {
"emar".to_string()
} else if target.contains("msvc") {
- match windows_registry::find(&target, "lib.exe") {
- Some(t) => return Ok((t, "lib.exe".to_string())),
- None => "lib.exe".to_string(),
+ let compiler = self.get_base_compiler()?;
+ let mut lib = String::new();
+ if compiler.family == (ToolFamily::Msvc { clang_cl: true }) {
+ // See if there is 'llvm-lib' next to 'clang-cl'
+ // Another possibility could be to see if there is 'clang'
+ // next to 'clang-cl' and use 'search_programs()' to locate
+ // 'llvm-lib'. This is because 'clang-cl' doesn't support
+ // the -print-search-dirs option.
+ if let Some(mut cmd) = which(&compiler.path) {
+ cmd.pop();
+ cmd.push("llvm-lib.exe");
+ if let Some(llvm_lib) = which(&cmd) {
+ lib = llvm_lib.to_str().unwrap().to_owned();
+ }
+ }
+ }
+ if lib.is_empty() {
+ lib = match windows_registry::find(&target, "lib.exe") {
+ Some(t) => return Ok((t, "lib.exe".to_string())),
+ None => "lib.exe".to_string(),
+ }
}
+ lib
} else if target.contains("illumos") {
// The default 'ar' on illumos uses a non-standard flags,
// but the OS comes bundled with a GNU-compatible variant.
@@ -3010,6 +3049,13 @@ impl Build {
cache.insert(sdk.into(), ret.clone());
Ok(ret)
}
+
+ fn cuda_file_count(&self) -> usize {
+ self.files
+ .iter()
+ .filter(|file| file.extension() == Some(OsStr::new("cu")))
+ .count()
+ }
}
impl Default for Build {
@@ -3496,14 +3542,27 @@ fn which(tool: &Path) -> Option<PathBuf> {
})
}
-/// Check if the file's extension is either "asm" or "s", case insensitive.
-fn is_asm(file: &Path) -> bool {
- if let Some(ext) = file.extension() {
- if let Some(ext) = ext.to_str() {
- let ext = ext.to_lowercase();
- return ext == "asm" || ext == "s";
+#[derive(Clone, Copy, PartialEq)]
+enum AsmFileExt {
+ /// `.asm` files. On MSVC targets, we assume these should be passed to MASM
+ /// (`ml{,64}.exe`).
+ DotAsm,
+ /// `.s` or `.S` files, which do not have the special handling on MSVC targets.
+ DotS,
+}
+
+impl AsmFileExt {
+ fn from_path(file: &Path) -> Option<Self> {
+ if let Some(ext) = file.extension() {
+ if let Some(ext) = ext.to_str() {
+ let ext = ext.to_lowercase();
+ match &*ext {
+ "asm" => return Some(AsmFileExt::DotAsm),
+ "s" => return Some(AsmFileExt::DotS),
+ _ => return None,
+ }
+ }
}
+ None
}
-
- false
}
diff --git a/vendor/compiler_builtins/.cargo-checksum.json b/vendor/compiler_builtins/.cargo-checksum.json
index 54ae276f1..f32559335 100644
--- a/vendor/compiler_builtins/.cargo-checksum.json
+++ b/vendor/compiler_builtins/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"a9e7ce22f7353779f9059b6b92112460a3918f153eac67f97ef4480422fdea47","Cargo.toml":"ac23c639d992d31e4aa00259623a3a1bf2e28c14ae6eec9bc2c6fe61d4249cda","LICENSE.txt":"0e13fed90654e0bc677d624a2d770833a09541fe0c0bdb3d051b3d081207393a","README.md":"5eb36fbab30693dbbe9f0de54749c95bd06fd6e42013b5b9eff3c062b9fdd34f","build.rs":"eea8b74d2b7ad2d3b51df7900d9af31b37ee00faacd9deff1a486d7b557e228a","examples/intrinsics.rs":"a7aa69c17af3aa8f6edff32c214e80827d3cbe3aea386a2be42244444752d253","libm/src/math/acos.rs":"fb066ba84aba1372d706425ec14f35ff8d971756d15eeebd22ecf42a716493bb","libm/src/math/acosf.rs":"a112b82309bba1d35c4e3d6ad4d6c21ef305343d9ab601ddf4bc61d43bc9f1af","libm/src/math/acosh.rs":"99de01ded7922bb93a882ad5ad8b472b5cae0059dea0bdca2077f65e94483150","libm/src/math/acoshf.rs":"10750c4d39ef6717b20a15ef1ce43e15eb851682d2f820f7e94501adec98b9a5","libm/src/math/asin.rs":"095a1e98996daff45df0b154ca0ec35bbf31db964ee9fdda0207308cb20df441","libm/src/math/asinf.rs":"49cccb4db2881982643a4a7d5453f4f8daf527711bbb67313607a3c178856d61","libm/src/math/asinh.rs":"4dd51affa71cce34a192ad66154e248f8d1c4b40fb497f29052333e425bb740f","libm/src/math/asinhf.rs":"914bfecf449f5e2bce786aa12c056d419073c6011d41c1bab7c39ba765fa4c53","libm/src/math/atan.rs":"d4fe46e1c5739dd09997869dcfbc3c85f03c534af52e700d6c6bcf9c3fedda07","libm/src/math/atan2.rs":"2623bc8ca707d13a7092ce49adf68e9cbf4452ad1bf4a861dc40ca858606a747","libm/src/math/atan2f.rs":"dd01943e0e1f1955912e5c3ffc9467529cf64bd02ac0a6ad5ab31dbe6657f05d","libm/src/math/atanf.rs":"e41b41569474a59c970ede3538e00bda4072cf4d90040017101cc79d7dc28caa","libm/src/math/atanh.rs":"57a8fb3f0f116fa4a966ac6bc2abd5f80236ead8e79013f468bd3786921f7110","libm/src/math/atanhf.rs":"6f2e57aaec1b5fc7609cb3938b3d155f51b4237dbda530739c34a0448cd9beb9","libm/src/math/cbrt.rs":"f2c45612d2eecd93cfcdd9ebf824c754fc8f8dfd6d16862c0b9c4ccea78c2a0f","libm/src/math/cbrtf.rs":"ad0b483854aa9f17a44d36c049bf0e8ebab34c27e90b787c05f45cc230ec7d19","libm/src/math/ceil.rs":"57ba5b6e207a0ccbd34190d1aa544389ca12126be23821dfb5746497f620ce03","libm/src/math/ceilf.rs":"c922a0475a599b9ea5473e615f74700b99707cebd6927f24ea59cb2a3cb3bbc3","libm/src/math/copysign.rs":"8b6440a251f0f1509d87f18122f74d0d5c03d0b60517e89e441434a3c5d84591","libm/src/math/copysignf.rs":"87d35436d224852ada93a2e93f6730cf1a727b808dd10e7d49ab4585866e336b","libm/src/math/cos.rs":"74babdc13ede78e400c5ca1854c3e22d2e08cbdc5618aefa5bba6f9303ef65b6","libm/src/math/cosf.rs":"09c40f93c445b741e22477ceedf163ca33b6a47f973f7c9876cfba2692edb29c","libm/src/math/cosh.rs":"0d0a7cef18577f321996b8b87561963139f754ad7f2ea0a3b3883811f3f0693a","libm/src/math/coshf.rs":"be8ca8739e4cf1978425b349f941cb4838bba8c10cb559c7940b9fd4fdde21ad","libm/src/math/erf.rs":"52cc9d9d54074a692001fb2d8215cd6903b645d4291ea20482455bc7f6947726","libm/src/math/erff.rs":"d37af67007fe4e9bce994c8c9805dd8af1b0ada68a10db8d8db13424dce65d09","libm/src/math/exp.rs":"ca7405ad0d1993fffcf9aae96f9256307bed3c4916545aaebd1cf1d2df1807fa","libm/src/math/exp10.rs":"2e136c6ecedd8e57a6c31796f57fae4546fcfd8bc6be66c836f553df9c74b907","libm/src/math/exp10f.rs":"9a3ce506ec587066a355ab74e0eb69a03a214ac405718087ae9772365050b20b","libm/src/math/exp2.rs":"94a9304a2ce3bc81f6d2aefd3cde6faa30f13260d46cb13692863cdea1c9a3a1","libm/src/math/exp2f.rs":"785f2630accd35118ec07bf60273e219ed91a215b956b1552eeea5bc2a708cc8","libm/src/math/expf.rs":"ec14c18f891a9e37735ec39e6fc2e9bf674a2c2e083f22e2533b481177359c98","libm/src/math/expm1.rs":"124069f456c8ad331f265c7509d9e223b2a300e461bbfd3d6adfdcdd2ee5b8ac","libm/src/math/expm1f.rs":"18e2116d31ea8410051cc709b9d04b754b0e3ba6758ee1bf0b48749f4999b840","libm/src/math/expo2.rs":"4f4f9fecfccb43f30c2784aa7c0bb656754a52b8ab431f7d1b551c673ab133f1","libm/src/math/fabs.rs":"e6c7db39f98508098cdf64ac0c2f53866c466149a7490afb9fe22b44c4dd81b3","libm/src/math/fabsf.rs":"83a1f5f4d9ca899ba2b701d7332e18b40258b83e111db4c5d8fab2cc1be58aa3","libm/src/math/fdim.rs":"8ec091996005207297c2389ae563e1b18dbc6a9eac951de29a976c5cd7bc32a7","libm/src/math/fdimf.rs":"c7f3f2269834d55be26b6580ddc07c42531577955fa4de35bad1e2a361085614","libm/src/math/fenv.rs":"916ae11e4763588518d64dee82afb41be9d1ee38ecc0679c821d4e7e22cd3dc5","libm/src/math/floor.rs":"5050804cae173af6775c0678d6c1aafb5ca2b744bc8a2f50d9d03b95dcee1fb0","libm/src/math/floorf.rs":"c903e0c57bc60a888c513eb7a873a87a4759ba68fc791b6b931652f8ee74cc03","libm/src/math/fma.rs":"d4995977bb2362efa5986002c904b28a63e0210c85758af37f9ef80278d46a07","libm/src/math/fmaf.rs":"1db6ee0d47ddbdb441cfe167edf89b431239f5805708fd0376cf5c01349a4bd6","libm/src/math/fmax.rs":"f6c8e96a8b1a170648d2fa3513e7b6b459085d708c839869f82e305fe58fac37","libm/src/math/fmaxf.rs":"dff0025433232e8a5ec7bd54d847ccf596d762ea4e35f5c54fbaac9404d732fd","libm/src/math/fmin.rs":"95b6cb66ca0e0e22276f0bf88dbe8fb69796a69a196a7491bd4802efbcf2e298","libm/src/math/fminf.rs":"304bc839b15ea3d84e68d2af9f40524ec120d30a36a667b22fcb98a6c258f4c7","libm/src/math/fmod.rs":"a1c0550fc7df8164733d914e222ff0966a2ab886d6e75a1098f24fe0283ae227","libm/src/math/fmodf.rs":"ee51ed092c0eeb8195f35735ff725cfd46612e0d689a7c483538bd92fbe61828","libm/src/math/frexp.rs":"28af70026922a8ab979744c7ad4d8faba6079c4743b7eeb6d14c983a982fbbcc","libm/src/math/frexpf.rs":"2e2593ae8002ba420809ebfaf737ef001cdc912354be3d978a8c0cb930350d4d","libm/src/math/hypot.rs":"841131c4a0cea75bc8a86e29f3f6d0815a61fc99731c9984651ce83d3050d218","libm/src/math/hypotf.rs":"5f317323edc2eb699580fe54b074b7e570a7734d51a0a149c0b49b54470a836c","libm/src/math/ilogb.rs":"d178ad7ca3439f82d565962b143f20448e45b2e2c51357b127abaec683297e32","libm/src/math/ilogbf.rs":"00f2b1b0496e21c6a42d68aea74d7156fa2ff0a735741b9051f3ca1cf0f57586","libm/src/math/j0.rs":"9572b6396c489927d332d0e717920e61ec0618e5e9c31f7eeeec70f5e4abab06","libm/src/math/j0f.rs":"802c8254bded9b3afb6eea8b9af240038a5a4a5d811396729f69ca509e3e7d87","libm/src/math/j1.rs":"97b1af1611fa3d110c2b349ee8e4176100132ea1391b619086b47ac063b81803","libm/src/math/j1f.rs":"9c9b128752e8ea2e7d81b637ba84907ab54a545e7602c49167b313743927930b","libm/src/math/jn.rs":"847d122334e5707ad9627146cddccc082a1f2f5bcd3e5ef54399013a7007ce88","libm/src/math/jnf.rs":"4045076f7d1a1b89882ed60d4dd60a4cbbc66b85cfb90491378c8015effcc476","libm/src/math/k_cos.rs":"f34a69e44d6b8901b03b578a75972f438ab20a7b98a0903fc1903d6fde3899be","libm/src/math/k_cosf.rs":"8f7117ff21cebf8e890a5bcfd7ea858a94172f4172b79a66d53824c2cb0888b1","libm/src/math/k_expo2.rs":"eb4ca9e6a525b7ea6da868c3cb136896682cc46f8396ba2a2ebc3ae9e9ba54b0","libm/src/math/k_expo2f.rs":"d51ad5df61cb5d1258bdb90c52bfed4572bb446a9337de9c04411ed9454ae0cb","libm/src/math/k_sin.rs":"14b2aba6ca07150c92768b5a72acaf5cde6a11d6619e14896512a7ba242e289a","libm/src/math/k_sinf.rs":"2775fcc710807164e6f37a4f8da3c8143cd5f16e19ce7c31c5591522151d7a96","libm/src/math/k_tan.rs":"a72beae4ccd9631eeeb61d6365bbeecae81c8411f3120a999c515cca0d5ea5c5","libm/src/math/k_tanf.rs":"6a794be56fa4b2f60452b9bab19af01c388f174560acbf829a351378ea39495d","libm/src/math/ldexp.rs":"b647f0096e80e4d926d8dd18d294c892ee2cb1778effe2c5e1b2664ae5cb1a4e","libm/src/math/ldexpf.rs":"98743fad2cd97a7be496f40ba3157ac1438fce0d0c25d5ab90c3b8c71c3fd0ed","libm/src/math/lgamma.rs":"0edd18e4f96bfcbe8b1b5af3eeca5208cd6d2d479dfa5ad117c9dfeccecf614f","libm/src/math/lgamma_r.rs":"f44a37aeccd56559ef784ae8edf217d14ad5cc2d910f0a65e70ffc86d7dc23dd","libm/src/math/lgammaf.rs":"967845357758b868a571857ec001f9f9154001110b8e97c08b6d10586bed9c49","libm/src/math/lgammaf_r.rs":"7143016d60e11fa235d53968125e57231b1104ce52149b5e1eed39629e0d1ff0","libm/src/math/log.rs":"b5e0c5f30d9e94351488732801be3107c12b854c3f95ad37e256dd88eeca408f","libm/src/math/log10.rs":"3425ff8be001fd1646ba15e254eb6ef4bdc6ccaf0cbee27ddf1fa84e04178b90","libm/src/math/log10f.rs":"fee4f71879bc4c99259e68c0c641364901629fb29a8ebddfcc0d090102cceddd","libm/src/math/log1p.rs":"9cf400852f165e6be19b97036ae9521fb9ca857d0a9a91c117d9123221622185","libm/src/math/log1pf.rs":"2716e6d2afa271996b7c8f47fd9e4952c88f4c1fd8c07c3e8ce8c62794bf71d8","libm/src/math/log2.rs":"dbbbfbaaa8aa6a4dbefea554ea3983090a9691228b011910c751f6adca912c40","libm/src/math/log2f.rs":"92a90350d8edce21c31c285c3e620fca7c62a2366008921715945c2c73b5b79f","libm/src/math/logf.rs":"845342cffc34d3db1f5ec12d8e5b773cd5a79056e28662fcb9bcd80207596f50","libm/src/math/mod.rs":"d694260529d51d0bc17f88ad557d852b9bb0bc3f7466cf7f62b679dc95ebba42","libm/src/math/modf.rs":"d012ed5a708ef52b6d1313c22a46cadaf5764dde1220816e3df2f03a0fcc60ae","libm/src/math/modff.rs":"f8f1e4c27a85d2cdb3c8e74439d59ef64aa543b948f22c23227d02d8388d61c2","libm/src/math/nextafter.rs":"3282e7eef214a32736fb6928d490198ad394b26b402b45495115b104839eebfe","libm/src/math/nextafterf.rs":"0937dc8a8155c19842c12181e741cec1f7df1f7a00cee81fcb2475e2842761b7","libm/src/math/pow.rs":"17c38297c5bf99accd915f292b777f8716ecf328916297c8bb9dfde6fd8ce522","libm/src/math/powf.rs":"2c423a0ea57fdc4e20f3533f744c6e6288c998b4de8f2914fafaa0e78be81b04","libm/src/math/rem_pio2.rs":"3e53234977daf61c89c29c940791714aad2f676a6f38188c7d17543a2aa8806f","libm/src/math/rem_pio2_large.rs":"482f31ff4e4eacf885f6130ae26a1d59f76b382059d6c742f30e5036811d3ca8","libm/src/math/rem_pio2f.rs":"07fb48f6d5cbadfd32ce4124b2b74af98b8391a2a6f36ce2a7d32e4500cb65ac","libm/src/math/remainder.rs":"63865f4370853c476b45bb27a5c54a4072146aa4a626835ae5263871a4e7e5dc","libm/src/math/remainderf.rs":"dd3fa432dbda8f2135428198be7bd69c57f8d13df3f365b12f52bf6a82352ac4","libm/src/math/remquo.rs":"3cc0bf55069f165c4843f2c358b3a27279c01e8cdd99f9057a3f7f31f45408f2","libm/src/math/remquof.rs":"cc749e18ecb7e766b8b8eeabdbf89ac99087d3d587e71e30f690676a3d2c1f9b","libm/src/math/rint.rs":"2c17047bcfd0ccdca8669f7cf70c628154ae4abc142660f30e37f9c073928706","libm/src/math/rintf.rs":"3b54af9eaa1bb6808159ca435246acf6a4e7aebbc344e3f4a4c5636345155897","libm/src/math/round.rs":"f10797ef15dd34a74e912ba8621d60bc0200c87b94308c9de3cc88d7aec4feb4","libm/src/math/roundf.rs":"27e37cfcf82373709e7debf9c0c18f7ed00ae0f5d97a214c388041f7a6996d35","libm/src/math/scalbn.rs":"b5c9d6d4177fe393cbfe1c634d75ce14b754f6cbce87c5bf979a9661491748a2","libm/src/math/scalbnf.rs":"4f198d06db1896386256fb9a5ac5b805b16b836226c18780a475cf18d7c1449c","libm/src/math/sin.rs":"bb483a2138ca779e03a191222636f0c60fd75a77a2a12f263bda4b6aa9136317","libm/src/math/sincos.rs":"1cf62a16c215e367f51078a3ba23a3f257682032a8f3c657293029a886b18d82","libm/src/math/sincosf.rs":"b0f589e6ada8215944d7784f420c6721c90387d799e349ce7676674f3c475e75","libm/src/math/sinf.rs":"dcddac1d56b084cbb8d0e019433c9c5fe2201d9b257a7dcf2f85c9a8f14b79cf","libm/src/math/sinh.rs":"d8ee4c7af883a526f36c1a6da13bb81fba9181b477e2f2538161a2bee97edc35","libm/src/math/sinhf.rs":"d06eb030ba9dbf7094df127262bfe99f149b4db49fa8ab8c15499660f1e46b26","libm/src/math/sqrt.rs":"824570a631c2542ccee68b65e3eb08fe79c037a29bbaaf54da5367e7b236124a","libm/src/math/sqrtf.rs":"4cf418d74f7751d522a642a9a8d6b86ee3472c6aaef44f0eb1bc26f4d8a90985","libm/src/math/tan.rs":"930ecedaadc60f704c2dfa4e15186f59713c1ba7d948529d215223b424827db5","libm/src/math/tanf.rs":"894156a3b107aee08461eb4e7e412fc049aa237d176ae705c6e3e2d7060d94e3","libm/src/math/tanh.rs":"f1f08eb98ed959a17370a7aaf0177be36e3764543424e78feb033ed3f5e8ec98","libm/src/math/tanhf.rs":"74027b0c672a4e64bdef6d7a3069b90caec50e1e7dbb2c12d2828f310502f41e","libm/src/math/tgamma.rs":"c889cfa49bbeb4dbb0941fe9fac3b4da7d5879dcf04a3b9bb6e56de529baf374","libm/src/math/tgammaf.rs":"0737b34777095d0e4d07fe533e8f105082dd4e8ece411bba6ae5993b45b9388c","libm/src/math/trunc.rs":"642264897cc1505e720c8cf313be81aa9fd53aae866644a2e988d01dbc77fd8a","libm/src/math/truncf.rs":"dee3607baf1af0f01deae46e429e097234c50b268eaefebbe716f19f38597900","src/arm.rs":"acf149932aa46a2755cf8cd2eb7d6ae249e46b1e10ad45ce5f924561945d1273","src/arm_linux.rs":"35a4cb7b75015543feb15b0c692da0faf0e6037d3b97a4a18067ba416eae1a70","src/float/add.rs":"3ec32ceaf470a89777b54f9cde61832fdadeade0f4894f268a949e968520bc57","src/float/cmp.rs":"79b1fdc8d5f943c4ad5ea4ad32623b18f63e17ac3852fbc64a4942228007e1fc","src/float/conv.rs":"e2b5e6fe398f35c7db4af62ba1fd79b39591fe1bfaf304ae825ed3c8cf902d9c","src/float/div.rs":"fe21115ecb1b3330569fd85cb51c650bf80683f152333db988d8e0d564a9ae11","src/float/extend.rs":"180b2e791c58e0526de0a798845c580ce3222c8a15c8665e6e6a4bf5cf1a34aa","src/float/mod.rs":"a91cf65abb6e715c5559e3e4bd87a69cd99a9552d54804d0b7137c02c513f158","src/float/mul.rs":"0d0c1f0c28c149ecadeafd459d3c4c9327e4cfcae2cba479957bb8010ef51a01","src/float/pow.rs":"2ada190738731eb6f24104f8fb8c4d6f03cfb16451536dbee32f2b33db0c4b19","src/float/sub.rs":"c2a87f4628f51d5d908d0f25b5d51ce0599dc559d5a72b20e131261f484d5848","src/float/trunc.rs":"d21d2a2f9a1918b4bbb594691e397972a7c04b74b2acf04016c55693abf6d24b","src/int/addsub.rs":"7ec45ce1ba15b56a5b7129d3e5722c4db764c6545306d3fa9090983bcabd6f17","src/int/leading_zeros.rs":"ccf5e9d098c80034dcf6e38437c9a2eb670fa8043558bbfb574f2293164729a6","src/int/mod.rs":"bab1b77535ceebdebb89fd4e59e7105f8c45347bb638351a626615b24544a0b1","src/int/mul.rs":"bb48d8fd42d8f9f5fe9271d8d0f7a92dbae320bf4346e19d1071eb2093cb8ed9","src/int/sdiv.rs":"ace4cb0ec388a38834e01cab2c5bc87182d31588dfc0b1ae117c11ed0c4781cf","src/int/shift.rs":"3967c28a8d61279546e91958d64745fec63f15aee9175eb0602cc6353830da6c","src/int/specialized_div_rem/asymmetric.rs":"27f5bf70a35109f9d4e4e1ad1e8003aa17da5a1e436bf3e63a493d7528a3a566","src/int/specialized_div_rem/binary_long.rs":"9f1ced81a394f000a21a329683144d68ee431a954136a3634eb55b1ee2cf6d51","src/int/specialized_div_rem/delegate.rs":"9df141af98e391361e25d71ae38d5e845a91d896edd2c041132fd46af8268e85","src/int/specialized_div_rem/mod.rs":"73c98b9f69cc9b101ae4c9081e82d66af1df4a58cf0c9bb2a8c8659265687f12","src/int/specialized_div_rem/norm_shift.rs":"3be7ee0dea545c1f702d9daf67dad2b624bf7b17b075c8b90d3d3f7b53df4c21","src/int/specialized_div_rem/trifecta.rs":"87eef69da255b809fd710b14f2eb3f9f59e3dac625f8564ebc8ba78f9763523b","src/int/udiv.rs":"3732b490a472505411577f008b92f489287745968ce6791665201201377d3475","src/lib.rs":"b1d55a4aa6ce37b086dd512060f380de4eb1944031eea4b4546403e007d38db2","src/macros.rs":"de690dffc59a5884ed06c67d38f06c41ed02fcd6318189397a0d4aafbd375ad8","src/math.rs":"3d7571ea68747f1e492e1fa5fe86512e0829654043f888892dbc0eb109fd0e69","src/mem/impls.rs":"a8d1c28a77d9b334872abbebfcba3fd1802175bef53c0b545e85242860698780","src/mem/mod.rs":"5034543d963149c14a6823bee32a1fb9dfd950c32153d37f97e9df1dc6c23129","src/mem/x86_64.rs":"9f740891f666acf384159128eef233d9e15c6120da8016370c6f9f05cc29d653","src/probestack.rs":"ef5c07e9b95de7b2b77a937789fcfefd9846274317489ad6d623e377c9888601","src/riscv.rs":"b43ede1713454c3e50b5a011964d336146155026cac6119767c96b70a165f10f","src/x86.rs":"117b50d6725ee0af0a7b3d197ea580655561f66a870ebc450d96af22bf7f39f6","src/x86_64.rs":"4f16bc9fad7757d48a6da3a078c715dd3a22154aadb4f1998d4c1b5d91396f9e"},"package":"13e81c6cd7ab79f51a0c927d22858d61ad12bd0b3865f0b13ece02a4486aeabb"} \ No newline at end of file
+{"files":{"Cargo.lock":"8875e3218be36f270df5da9194a8b49dd3762131ab5db87998fbf91a7e7a8ac7","Cargo.toml":"b7d74c9f0375b4ed4630c446b9a126e700821df6b4f4a2aa06054aea0d545269","LICENSE.txt":"0e13fed90654e0bc677d624a2d770833a09541fe0c0bdb3d051b3d081207393a","README.md":"5eb36fbab30693dbbe9f0de54749c95bd06fd6e42013b5b9eff3c062b9fdd34f","build.rs":"eea8b74d2b7ad2d3b51df7900d9af31b37ee00faacd9deff1a486d7b557e228a","examples/intrinsics.rs":"a7aa69c17af3aa8f6edff32c214e80827d3cbe3aea386a2be42244444752d253","libm/src/math/acos.rs":"fb066ba84aba1372d706425ec14f35ff8d971756d15eeebd22ecf42a716493bb","libm/src/math/acosf.rs":"a112b82309bba1d35c4e3d6ad4d6c21ef305343d9ab601ddf4bc61d43bc9f1af","libm/src/math/acosh.rs":"99de01ded7922bb93a882ad5ad8b472b5cae0059dea0bdca2077f65e94483150","libm/src/math/acoshf.rs":"10750c4d39ef6717b20a15ef1ce43e15eb851682d2f820f7e94501adec98b9a5","libm/src/math/asin.rs":"095a1e98996daff45df0b154ca0ec35bbf31db964ee9fdda0207308cb20df441","libm/src/math/asinf.rs":"49cccb4db2881982643a4a7d5453f4f8daf527711bbb67313607a3c178856d61","libm/src/math/asinh.rs":"4dd51affa71cce34a192ad66154e248f8d1c4b40fb497f29052333e425bb740f","libm/src/math/asinhf.rs":"914bfecf449f5e2bce786aa12c056d419073c6011d41c1bab7c39ba765fa4c53","libm/src/math/atan.rs":"d4fe46e1c5739dd09997869dcfbc3c85f03c534af52e700d6c6bcf9c3fedda07","libm/src/math/atan2.rs":"2623bc8ca707d13a7092ce49adf68e9cbf4452ad1bf4a861dc40ca858606a747","libm/src/math/atan2f.rs":"dd01943e0e1f1955912e5c3ffc9467529cf64bd02ac0a6ad5ab31dbe6657f05d","libm/src/math/atanf.rs":"e41b41569474a59c970ede3538e00bda4072cf4d90040017101cc79d7dc28caa","libm/src/math/atanh.rs":"57a8fb3f0f116fa4a966ac6bc2abd5f80236ead8e79013f468bd3786921f7110","libm/src/math/atanhf.rs":"6f2e57aaec1b5fc7609cb3938b3d155f51b4237dbda530739c34a0448cd9beb9","libm/src/math/cbrt.rs":"f2c45612d2eecd93cfcdd9ebf824c754fc8f8dfd6d16862c0b9c4ccea78c2a0f","libm/src/math/cbrtf.rs":"ad0b483854aa9f17a44d36c049bf0e8ebab34c27e90b787c05f45cc230ec7d19","libm/src/math/ceil.rs":"57ba5b6e207a0ccbd34190d1aa544389ca12126be23821dfb5746497f620ce03","libm/src/math/ceilf.rs":"c922a0475a599b9ea5473e615f74700b99707cebd6927f24ea59cb2a3cb3bbc3","libm/src/math/copysign.rs":"8b6440a251f0f1509d87f18122f74d0d5c03d0b60517e89e441434a3c5d84591","libm/src/math/copysignf.rs":"87d35436d224852ada93a2e93f6730cf1a727b808dd10e7d49ab4585866e336b","libm/src/math/cos.rs":"74babdc13ede78e400c5ca1854c3e22d2e08cbdc5618aefa5bba6f9303ef65b6","libm/src/math/cosf.rs":"09c40f93c445b741e22477ceedf163ca33b6a47f973f7c9876cfba2692edb29c","libm/src/math/cosh.rs":"0d0a7cef18577f321996b8b87561963139f754ad7f2ea0a3b3883811f3f0693a","libm/src/math/coshf.rs":"be8ca8739e4cf1978425b349f941cb4838bba8c10cb559c7940b9fd4fdde21ad","libm/src/math/erf.rs":"52cc9d9d54074a692001fb2d8215cd6903b645d4291ea20482455bc7f6947726","libm/src/math/erff.rs":"d37af67007fe4e9bce994c8c9805dd8af1b0ada68a10db8d8db13424dce65d09","libm/src/math/exp.rs":"ca7405ad0d1993fffcf9aae96f9256307bed3c4916545aaebd1cf1d2df1807fa","libm/src/math/exp10.rs":"2e136c6ecedd8e57a6c31796f57fae4546fcfd8bc6be66c836f553df9c74b907","libm/src/math/exp10f.rs":"9a3ce506ec587066a355ab74e0eb69a03a214ac405718087ae9772365050b20b","libm/src/math/exp2.rs":"94a9304a2ce3bc81f6d2aefd3cde6faa30f13260d46cb13692863cdea1c9a3a1","libm/src/math/exp2f.rs":"785f2630accd35118ec07bf60273e219ed91a215b956b1552eeea5bc2a708cc8","libm/src/math/expf.rs":"ec14c18f891a9e37735ec39e6fc2e9bf674a2c2e083f22e2533b481177359c98","libm/src/math/expm1.rs":"124069f456c8ad331f265c7509d9e223b2a300e461bbfd3d6adfdcdd2ee5b8ac","libm/src/math/expm1f.rs":"18e2116d31ea8410051cc709b9d04b754b0e3ba6758ee1bf0b48749f4999b840","libm/src/math/expo2.rs":"4f4f9fecfccb43f30c2784aa7c0bb656754a52b8ab431f7d1b551c673ab133f1","libm/src/math/fabs.rs":"e6c7db39f98508098cdf64ac0c2f53866c466149a7490afb9fe22b44c4dd81b3","libm/src/math/fabsf.rs":"83a1f5f4d9ca899ba2b701d7332e18b40258b83e111db4c5d8fab2cc1be58aa3","libm/src/math/fdim.rs":"8ec091996005207297c2389ae563e1b18dbc6a9eac951de29a976c5cd7bc32a7","libm/src/math/fdimf.rs":"c7f3f2269834d55be26b6580ddc07c42531577955fa4de35bad1e2a361085614","libm/src/math/fenv.rs":"916ae11e4763588518d64dee82afb41be9d1ee38ecc0679c821d4e7e22cd3dc5","libm/src/math/floor.rs":"5050804cae173af6775c0678d6c1aafb5ca2b744bc8a2f50d9d03b95dcee1fb0","libm/src/math/floorf.rs":"c903e0c57bc60a888c513eb7a873a87a4759ba68fc791b6b931652f8ee74cc03","libm/src/math/fma.rs":"d4995977bb2362efa5986002c904b28a63e0210c85758af37f9ef80278d46a07","libm/src/math/fmaf.rs":"1db6ee0d47ddbdb441cfe167edf89b431239f5805708fd0376cf5c01349a4bd6","libm/src/math/fmax.rs":"f6c8e96a8b1a170648d2fa3513e7b6b459085d708c839869f82e305fe58fac37","libm/src/math/fmaxf.rs":"dff0025433232e8a5ec7bd54d847ccf596d762ea4e35f5c54fbaac9404d732fd","libm/src/math/fmin.rs":"95b6cb66ca0e0e22276f0bf88dbe8fb69796a69a196a7491bd4802efbcf2e298","libm/src/math/fminf.rs":"304bc839b15ea3d84e68d2af9f40524ec120d30a36a667b22fcb98a6c258f4c7","libm/src/math/fmod.rs":"a1c0550fc7df8164733d914e222ff0966a2ab886d6e75a1098f24fe0283ae227","libm/src/math/fmodf.rs":"ee51ed092c0eeb8195f35735ff725cfd46612e0d689a7c483538bd92fbe61828","libm/src/math/frexp.rs":"28af70026922a8ab979744c7ad4d8faba6079c4743b7eeb6d14c983a982fbbcc","libm/src/math/frexpf.rs":"2e2593ae8002ba420809ebfaf737ef001cdc912354be3d978a8c0cb930350d4d","libm/src/math/hypot.rs":"841131c4a0cea75bc8a86e29f3f6d0815a61fc99731c9984651ce83d3050d218","libm/src/math/hypotf.rs":"5f317323edc2eb699580fe54b074b7e570a7734d51a0a149c0b49b54470a836c","libm/src/math/ilogb.rs":"d178ad7ca3439f82d565962b143f20448e45b2e2c51357b127abaec683297e32","libm/src/math/ilogbf.rs":"00f2b1b0496e21c6a42d68aea74d7156fa2ff0a735741b9051f3ca1cf0f57586","libm/src/math/j0.rs":"9572b6396c489927d332d0e717920e61ec0618e5e9c31f7eeeec70f5e4abab06","libm/src/math/j0f.rs":"802c8254bded9b3afb6eea8b9af240038a5a4a5d811396729f69ca509e3e7d87","libm/src/math/j1.rs":"97b1af1611fa3d110c2b349ee8e4176100132ea1391b619086b47ac063b81803","libm/src/math/j1f.rs":"9c9b128752e8ea2e7d81b637ba84907ab54a545e7602c49167b313743927930b","libm/src/math/jn.rs":"847d122334e5707ad9627146cddccc082a1f2f5bcd3e5ef54399013a7007ce88","libm/src/math/jnf.rs":"4045076f7d1a1b89882ed60d4dd60a4cbbc66b85cfb90491378c8015effcc476","libm/src/math/k_cos.rs":"f34a69e44d6b8901b03b578a75972f438ab20a7b98a0903fc1903d6fde3899be","libm/src/math/k_cosf.rs":"8f7117ff21cebf8e890a5bcfd7ea858a94172f4172b79a66d53824c2cb0888b1","libm/src/math/k_expo2.rs":"eb4ca9e6a525b7ea6da868c3cb136896682cc46f8396ba2a2ebc3ae9e9ba54b0","libm/src/math/k_expo2f.rs":"d51ad5df61cb5d1258bdb90c52bfed4572bb446a9337de9c04411ed9454ae0cb","libm/src/math/k_sin.rs":"14b2aba6ca07150c92768b5a72acaf5cde6a11d6619e14896512a7ba242e289a","libm/src/math/k_sinf.rs":"2775fcc710807164e6f37a4f8da3c8143cd5f16e19ce7c31c5591522151d7a96","libm/src/math/k_tan.rs":"a72beae4ccd9631eeeb61d6365bbeecae81c8411f3120a999c515cca0d5ea5c5","libm/src/math/k_tanf.rs":"6a794be56fa4b2f60452b9bab19af01c388f174560acbf829a351378ea39495d","libm/src/math/ldexp.rs":"b647f0096e80e4d926d8dd18d294c892ee2cb1778effe2c5e1b2664ae5cb1a4e","libm/src/math/ldexpf.rs":"98743fad2cd97a7be496f40ba3157ac1438fce0d0c25d5ab90c3b8c71c3fd0ed","libm/src/math/lgamma.rs":"0edd18e4f96bfcbe8b1b5af3eeca5208cd6d2d479dfa5ad117c9dfeccecf614f","libm/src/math/lgamma_r.rs":"f44a37aeccd56559ef784ae8edf217d14ad5cc2d910f0a65e70ffc86d7dc23dd","libm/src/math/lgammaf.rs":"967845357758b868a571857ec001f9f9154001110b8e97c08b6d10586bed9c49","libm/src/math/lgammaf_r.rs":"7143016d60e11fa235d53968125e57231b1104ce52149b5e1eed39629e0d1ff0","libm/src/math/log.rs":"b5e0c5f30d9e94351488732801be3107c12b854c3f95ad37e256dd88eeca408f","libm/src/math/log10.rs":"3425ff8be001fd1646ba15e254eb6ef4bdc6ccaf0cbee27ddf1fa84e04178b90","libm/src/math/log10f.rs":"fee4f71879bc4c99259e68c0c641364901629fb29a8ebddfcc0d090102cceddd","libm/src/math/log1p.rs":"9cf400852f165e6be19b97036ae9521fb9ca857d0a9a91c117d9123221622185","libm/src/math/log1pf.rs":"2716e6d2afa271996b7c8f47fd9e4952c88f4c1fd8c07c3e8ce8c62794bf71d8","libm/src/math/log2.rs":"dbbbfbaaa8aa6a4dbefea554ea3983090a9691228b011910c751f6adca912c40","libm/src/math/log2f.rs":"92a90350d8edce21c31c285c3e620fca7c62a2366008921715945c2c73b5b79f","libm/src/math/logf.rs":"845342cffc34d3db1f5ec12d8e5b773cd5a79056e28662fcb9bcd80207596f50","libm/src/math/mod.rs":"d694260529d51d0bc17f88ad557d852b9bb0bc3f7466cf7f62b679dc95ebba42","libm/src/math/modf.rs":"d012ed5a708ef52b6d1313c22a46cadaf5764dde1220816e3df2f03a0fcc60ae","libm/src/math/modff.rs":"f8f1e4c27a85d2cdb3c8e74439d59ef64aa543b948f22c23227d02d8388d61c2","libm/src/math/nextafter.rs":"3282e7eef214a32736fb6928d490198ad394b26b402b45495115b104839eebfe","libm/src/math/nextafterf.rs":"0937dc8a8155c19842c12181e741cec1f7df1f7a00cee81fcb2475e2842761b7","libm/src/math/pow.rs":"17c38297c5bf99accd915f292b777f8716ecf328916297c8bb9dfde6fd8ce522","libm/src/math/powf.rs":"2c423a0ea57fdc4e20f3533f744c6e6288c998b4de8f2914fafaa0e78be81b04","libm/src/math/rem_pio2.rs":"3e53234977daf61c89c29c940791714aad2f676a6f38188c7d17543a2aa8806f","libm/src/math/rem_pio2_large.rs":"482f31ff4e4eacf885f6130ae26a1d59f76b382059d6c742f30e5036811d3ca8","libm/src/math/rem_pio2f.rs":"07fb48f6d5cbadfd32ce4124b2b74af98b8391a2a6f36ce2a7d32e4500cb65ac","libm/src/math/remainder.rs":"63865f4370853c476b45bb27a5c54a4072146aa4a626835ae5263871a4e7e5dc","libm/src/math/remainderf.rs":"dd3fa432dbda8f2135428198be7bd69c57f8d13df3f365b12f52bf6a82352ac4","libm/src/math/remquo.rs":"3cc0bf55069f165c4843f2c358b3a27279c01e8cdd99f9057a3f7f31f45408f2","libm/src/math/remquof.rs":"cc749e18ecb7e766b8b8eeabdbf89ac99087d3d587e71e30f690676a3d2c1f9b","libm/src/math/rint.rs":"2c17047bcfd0ccdca8669f7cf70c628154ae4abc142660f30e37f9c073928706","libm/src/math/rintf.rs":"3b54af9eaa1bb6808159ca435246acf6a4e7aebbc344e3f4a4c5636345155897","libm/src/math/round.rs":"f10797ef15dd34a74e912ba8621d60bc0200c87b94308c9de3cc88d7aec4feb4","libm/src/math/roundf.rs":"27e37cfcf82373709e7debf9c0c18f7ed00ae0f5d97a214c388041f7a6996d35","libm/src/math/scalbn.rs":"b5c9d6d4177fe393cbfe1c634d75ce14b754f6cbce87c5bf979a9661491748a2","libm/src/math/scalbnf.rs":"4f198d06db1896386256fb9a5ac5b805b16b836226c18780a475cf18d7c1449c","libm/src/math/sin.rs":"bb483a2138ca779e03a191222636f0c60fd75a77a2a12f263bda4b6aa9136317","libm/src/math/sincos.rs":"1cf62a16c215e367f51078a3ba23a3f257682032a8f3c657293029a886b18d82","libm/src/math/sincosf.rs":"b0f589e6ada8215944d7784f420c6721c90387d799e349ce7676674f3c475e75","libm/src/math/sinf.rs":"dcddac1d56b084cbb8d0e019433c9c5fe2201d9b257a7dcf2f85c9a8f14b79cf","libm/src/math/sinh.rs":"d8ee4c7af883a526f36c1a6da13bb81fba9181b477e2f2538161a2bee97edc35","libm/src/math/sinhf.rs":"d06eb030ba9dbf7094df127262bfe99f149b4db49fa8ab8c15499660f1e46b26","libm/src/math/sqrt.rs":"824570a631c2542ccee68b65e3eb08fe79c037a29bbaaf54da5367e7b236124a","libm/src/math/sqrtf.rs":"4cf418d74f7751d522a642a9a8d6b86ee3472c6aaef44f0eb1bc26f4d8a90985","libm/src/math/tan.rs":"930ecedaadc60f704c2dfa4e15186f59713c1ba7d948529d215223b424827db5","libm/src/math/tanf.rs":"894156a3b107aee08461eb4e7e412fc049aa237d176ae705c6e3e2d7060d94e3","libm/src/math/tanh.rs":"f1f08eb98ed959a17370a7aaf0177be36e3764543424e78feb033ed3f5e8ec98","libm/src/math/tanhf.rs":"74027b0c672a4e64bdef6d7a3069b90caec50e1e7dbb2c12d2828f310502f41e","libm/src/math/tgamma.rs":"c889cfa49bbeb4dbb0941fe9fac3b4da7d5879dcf04a3b9bb6e56de529baf374","libm/src/math/tgammaf.rs":"0737b34777095d0e4d07fe533e8f105082dd4e8ece411bba6ae5993b45b9388c","libm/src/math/trunc.rs":"642264897cc1505e720c8cf313be81aa9fd53aae866644a2e988d01dbc77fd8a","libm/src/math/truncf.rs":"dee3607baf1af0f01deae46e429e097234c50b268eaefebbe716f19f38597900","src/arm.rs":"acf149932aa46a2755cf8cd2eb7d6ae249e46b1e10ad45ce5f924561945d1273","src/arm_linux.rs":"35a4cb7b75015543feb15b0c692da0faf0e6037d3b97a4a18067ba416eae1a70","src/float/add.rs":"3ec32ceaf470a89777b54f9cde61832fdadeade0f4894f268a949e968520bc57","src/float/cmp.rs":"79b1fdc8d5f943c4ad5ea4ad32623b18f63e17ac3852fbc64a4942228007e1fc","src/float/conv.rs":"d95b386e483d2bc77b2d5c41b62d01a8cc791fb3fb18ce97317947ecd5a3c02b","src/float/div.rs":"fe21115ecb1b3330569fd85cb51c650bf80683f152333db988d8e0d564a9ae11","src/float/extend.rs":"180b2e791c58e0526de0a798845c580ce3222c8a15c8665e6e6a4bf5cf1a34aa","src/float/mod.rs":"a91cf65abb6e715c5559e3e4bd87a69cd99a9552d54804d0b7137c02c513f158","src/float/mul.rs":"0d0c1f0c28c149ecadeafd459d3c4c9327e4cfcae2cba479957bb8010ef51a01","src/float/pow.rs":"2ada190738731eb6f24104f8fb8c4d6f03cfb16451536dbee32f2b33db0c4b19","src/float/sub.rs":"c2a87f4628f51d5d908d0f25b5d51ce0599dc559d5a72b20e131261f484d5848","src/float/trunc.rs":"d21d2a2f9a1918b4bbb594691e397972a7c04b74b2acf04016c55693abf6d24b","src/int/addsub.rs":"7ec45ce1ba15b56a5b7129d3e5722c4db764c6545306d3fa9090983bcabd6f17","src/int/leading_zeros.rs":"ccf5e9d098c80034dcf6e38437c9a2eb670fa8043558bbfb574f2293164729a6","src/int/mod.rs":"bab1b77535ceebdebb89fd4e59e7105f8c45347bb638351a626615b24544a0b1","src/int/mul.rs":"bb48d8fd42d8f9f5fe9271d8d0f7a92dbae320bf4346e19d1071eb2093cb8ed9","src/int/sdiv.rs":"ace4cb0ec388a38834e01cab2c5bc87182d31588dfc0b1ae117c11ed0c4781cf","src/int/shift.rs":"40e213fe382a7a1a469fdea85a26f1b0b4b681345f0f8ccaed3e423f19a73633","src/int/specialized_div_rem/asymmetric.rs":"27f5bf70a35109f9d4e4e1ad1e8003aa17da5a1e436bf3e63a493d7528a3a566","src/int/specialized_div_rem/binary_long.rs":"9f1ced81a394f000a21a329683144d68ee431a954136a3634eb55b1ee2cf6d51","src/int/specialized_div_rem/delegate.rs":"9df141af98e391361e25d71ae38d5e845a91d896edd2c041132fd46af8268e85","src/int/specialized_div_rem/mod.rs":"73c98b9f69cc9b101ae4c9081e82d66af1df4a58cf0c9bb2a8c8659265687f12","src/int/specialized_div_rem/norm_shift.rs":"3be7ee0dea545c1f702d9daf67dad2b624bf7b17b075c8b90d3d3f7b53df4c21","src/int/specialized_div_rem/trifecta.rs":"87eef69da255b809fd710b14f2eb3f9f59e3dac625f8564ebc8ba78f9763523b","src/int/udiv.rs":"3732b490a472505411577f008b92f489287745968ce6791665201201377d3475","src/lib.rs":"b1d55a4aa6ce37b086dd512060f380de4eb1944031eea4b4546403e007d38db2","src/macros.rs":"de690dffc59a5884ed06c67d38f06c41ed02fcd6318189397a0d4aafbd375ad8","src/math.rs":"3d7571ea68747f1e492e1fa5fe86512e0829654043f888892dbc0eb109fd0e69","src/mem/impls.rs":"a8d1c28a77d9b334872abbebfcba3fd1802175bef53c0b545e85242860698780","src/mem/mod.rs":"5034543d963149c14a6823bee32a1fb9dfd950c32153d37f97e9df1dc6c23129","src/mem/x86_64.rs":"9f740891f666acf384159128eef233d9e15c6120da8016370c6f9f05cc29d653","src/probestack.rs":"ef5c07e9b95de7b2b77a937789fcfefd9846274317489ad6d623e377c9888601","src/riscv.rs":"b43ede1713454c3e50b5a011964d336146155026cac6119767c96b70a165f10f","src/x86.rs":"117b50d6725ee0af0a7b3d197ea580655561f66a870ebc450d96af22bf7f39f6","src/x86_64.rs":"4f16bc9fad7757d48a6da3a078c715dd3a22154aadb4f1998d4c1b5d91396f9e"},"package":"f867ce54c09855ccd135ad4a50c777182a0c7af5ff20a8f537617bd648b10d50"} \ No newline at end of file
diff --git a/vendor/compiler_builtins/Cargo.lock b/vendor/compiler_builtins/Cargo.lock
index 4784f2917..4e415bc2c 100644
--- a/vendor/compiler_builtins/Cargo.lock
+++ b/vendor/compiler_builtins/Cargo.lock
@@ -10,7 +10,7 @@ checksum = "7db2f146208d7e0fbee761b09cd65a7f51ccc38705d4e7262dad4d73b12a76b1"
[[package]]
name = "compiler_builtins"
-version = "0.1.85"
+version = "0.1.87"
dependencies = [
"cc",
"rustc-std-workspace-core",
diff --git a/vendor/compiler_builtins/Cargo.toml b/vendor/compiler_builtins/Cargo.toml
index 0a3bb6df3..df7d4f8e7 100644
--- a/vendor/compiler_builtins/Cargo.toml
+++ b/vendor/compiler_builtins/Cargo.toml
@@ -11,7 +11,7 @@
[package]
name = "compiler_builtins"
-version = "0.1.85"
+version = "0.1.87"
authors = ["Jorge Aparicio <japaricious@gmail.com>"]
links = "compiler-rt"
include = [
diff --git a/vendor/compiler_builtins/src/float/conv.rs b/vendor/compiler_builtins/src/float/conv.rs
index 19fdc2fdc..a27d542fa 100644
--- a/vendor/compiler_builtins/src/float/conv.rs
+++ b/vendor/compiler_builtins/src/float/conv.rs
@@ -92,12 +92,12 @@ intrinsics! {
f64::from_bits(int_to_float::u64_to_f64_bits(i))
}
- #[cfg_attr(any(not(target_feature = "llvm14-builtins-abi"), target_os = "uefi"), unadjusted_on_win64)]
+ #[cfg_attr(target_os = "uefi", unadjusted_on_win64)]
pub extern "C" fn __floatuntisf(i: u128) -> f32 {
f32::from_bits(int_to_float::u128_to_f32_bits(i))
}
- #[cfg_attr(any(not(target_feature = "llvm14-builtins-abi"), target_os = "uefi"), unadjusted_on_win64)]
+ #[cfg_attr(target_os = "uefi", unadjusted_on_win64)]
pub extern "C" fn __floatuntidf(i: u128) -> f64 {
f64::from_bits(int_to_float::u128_to_f64_bits(i))
}
@@ -129,13 +129,13 @@ intrinsics! {
f64::from_bits(int_to_float::u64_to_f64_bits(i.unsigned_abs()) | sign_bit)
}
- #[cfg_attr(any(not(target_feature = "llvm14-builtins-abi"), target_os = "uefi"), unadjusted_on_win64)]
+ #[cfg_attr(target_os = "uefi", unadjusted_on_win64)]
pub extern "C" fn __floattisf(i: i128) -> f32 {
let sign_bit = ((i >> 127) as u32) << 31;
f32::from_bits(int_to_float::u128_to_f32_bits(i.unsigned_abs()) | sign_bit)
}
- #[cfg_attr(any(not(target_feature = "llvm14-builtins-abi"), target_os = "uefi"), unadjusted_on_win64)]
+ #[cfg_attr(target_os = "uefi", unadjusted_on_win64)]
pub extern "C" fn __floattidf(i: i128) -> f64 {
let sign_bit = ((i >> 127) as u64) << 63;
f64::from_bits(int_to_float::u128_to_f64_bits(i.unsigned_abs()) | sign_bit)
@@ -176,8 +176,7 @@ intrinsics! {
}
}
- #[cfg_attr(target_feature = "llvm14-builtins-abi", win64_128bit_abi_hack)]
- #[cfg_attr(not(target_feature = "llvm14-builtins-abi"), unadjusted_on_win64)]
+ #[win64_128bit_abi_hack]
pub extern "C" fn __fixunssfti(f: f32) -> u128 {
let fbits = f.to_bits();
if fbits < 127 << 23 { // >= 0, < 1
@@ -225,8 +224,7 @@ intrinsics! {
}
}
- #[cfg_attr(target_feature = "llvm14-builtins-abi", win64_128bit_abi_hack)]
- #[cfg_attr(not(target_feature = "llvm14-builtins-abi"), unadjusted_on_win64)]
+ #[win64_128bit_abi_hack]
pub extern "C" fn __fixunsdfti(f: f64) -> u128 {
let fbits = f.to_bits();
if fbits < 1023 << 52 { // >= 0, < 1
@@ -279,8 +277,7 @@ intrinsics! {
}
}
- #[cfg_attr(target_feature = "llvm14-builtins-abi", win64_128bit_abi_hack)]
- #[cfg_attr(not(target_feature = "llvm14-builtins-abi"), unadjusted_on_win64)]
+ #[win64_128bit_abi_hack]
pub extern "C" fn __fixsfti(f: f32) -> i128 {
let fbits = f.to_bits() & !0 >> 1; // Remove sign bit.
if fbits < 127 << 23 { // >= 0, < 1
@@ -331,8 +328,7 @@ intrinsics! {
}
}
- #[cfg_attr(target_feature = "llvm14-builtins-abi", win64_128bit_abi_hack)]
- #[cfg_attr(not(target_feature = "llvm14-builtins-abi"), unadjusted_on_win64)]
+ #[win64_128bit_abi_hack]
pub extern "C" fn __fixdfti(f: f64) -> i128 {
let fbits = f.to_bits() & !0 >> 1; // Remove sign bit.
if fbits < 1023 << 52 { // >= 0, < 1
diff --git a/vendor/compiler_builtins/src/int/shift.rs b/vendor/compiler_builtins/src/int/shift.rs
index 908e619e1..2d2c081a6 100644
--- a/vendor/compiler_builtins/src/int/shift.rs
+++ b/vendor/compiler_builtins/src/int/shift.rs
@@ -69,47 +69,56 @@ impl Lshr for u64 {}
impl Lshr for u128 {}
intrinsics! {
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
pub extern "C" fn __ashlsi3(a: u32, b: u32) -> u32 {
a.ashl(b)
}
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
#[arm_aeabi_alias = __aeabi_llsl]
pub extern "C" fn __ashldi3(a: u64, b: u32) -> u64 {
a.ashl(b)
}
+ #[avr_skip]
pub extern "C" fn __ashlti3(a: u128, b: u32) -> u128 {
a.ashl(b)
}
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
pub extern "C" fn __ashrsi3(a: i32, b: u32) -> i32 {
a.ashr(b)
}
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
#[arm_aeabi_alias = __aeabi_lasr]
pub extern "C" fn __ashrdi3(a: i64, b: u32) -> i64 {
a.ashr(b)
}
+ #[avr_skip]
pub extern "C" fn __ashrti3(a: i128, b: u32) -> i128 {
a.ashr(b)
}
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
pub extern "C" fn __lshrsi3(a: u32, b: u32) -> u32 {
a.lshr(b)
}
+ #[avr_skip]
#[maybe_use_optimized_c_shim]
#[arm_aeabi_alias = __aeabi_llsr]
pub extern "C" fn __lshrdi3(a: u64, b: u32) -> u64 {
a.lshr(b)
}
+ #[avr_skip]
pub extern "C" fn __lshrti3(a: u128, b: u32) -> u128 {
a.lshr(b)
}
diff --git a/vendor/dissimilar/.cargo-checksum.json b/vendor/dissimilar/.cargo-checksum.json
index 889c53a44..cfb8c46ea 100644
--- a/vendor/dissimilar/.cargo-checksum.json
+++ b/vendor/dissimilar/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"abe95bc027ce5fe4aae082e8560c12d43f015ea85453be0ca6df6ded8f29e4da","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"a9480cd29fe4eefae782c5ec20a05f88ca28d3ca1573a893fa423b931e3ca392","benches/bench.rs":"e62b50ebe922590a7251197b50047a3924be98c6193e1f0fbca552d66fd05f9d","benches/document1.txt":"92a6f5c3992d98632eea7a6c6261cf1a26ae484b34358778b774a2d58fd356d3","benches/document2.txt":"8d106ddba8bd4a85a8bb4b59e481b88f536de98046dc8e4f76f7551c265c5dd3","src/find.rs":"32f68fa18bd547f5c716895bc580c12d6fe8503f86044fc0334f1b1e3cd3ac97","src/lib.rs":"7c1bf347cb87d22dde987da421931644dea3ed84e5e48b8dad44cb5579cb9f04","src/range.rs":"8652a374da1f7959ed912891e610a1e72026ba5315362cfb529bd2724ce69fc6","src/tests.rs":"e2a68e2b724ec65a062b634b86114e3a2e445c4693e312c08d9648f6621ea9d2","tests/test.rs":"4dcc2007359d6bf6a48590fcdab9cc81787a18aac8dc9c1c4be1019d95ca690e"},"package":"8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"} \ No newline at end of file
+{"files":{"Cargo.toml":"b5579f3eb0d811d6ad116c247138911fee4404c80f49bb6020df06394d0467ec","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"e081b60ac4ad261ee3c57f32131634017044193b94ce2ebd64d134d22185e79a","benches/bench.rs":"e62b50ebe922590a7251197b50047a3924be98c6193e1f0fbca552d66fd05f9d","benches/document1.txt":"92a6f5c3992d98632eea7a6c6261cf1a26ae484b34358778b774a2d58fd356d3","benches/document2.txt":"8d106ddba8bd4a85a8bb4b59e481b88f536de98046dc8e4f76f7551c265c5dd3","src/find.rs":"4587b9fc3fc32149898c6daf50624c41ab9de9ec4de5baa3a4d3644436dccf5f","src/lib.rs":"9749cf7915c3f5682144f3eb0ec3d1b5d9c3457b740a7ced8e91211be6a8549c","src/range.rs":"9b4f5f0125d927f985cf5c3a92452e4c28273842d9ff7debc2d3584db5d7d0f6","src/tests.rs":"222464295b0558fe505f9d2d53f315dd164cc85e323e32e523f738eec771cdbe","tests/test.rs":"2a4ccfea35304fa2fc2b2b38efe9da6a1b5fd5f6c1247ba01e85232d19b70206"},"package":"210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"} \ No newline at end of file
diff --git a/vendor/dissimilar/Cargo.toml b/vendor/dissimilar/Cargo.toml
index d657b1742..3fdb995a5 100644
--- a/vendor/dissimilar/Cargo.toml
+++ b/vendor/dissimilar/Cargo.toml
@@ -11,9 +11,9 @@
[package]
edition = "2018"
-rust-version = "1.31"
+rust-version = "1.36"
name = "dissimilar"
-version = "1.0.4"
+version = "1.0.6"
authors = ["David Tolnay <dtolnay@gmail.com>"]
description = "Diff library with semantic cleanup, based on Google's diff-match-patch"
documentation = "https://docs.rs/dissimilar"
@@ -28,3 +28,9 @@ repository = "https://github.com/dtolnay/dissimilar"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+doc-scrape-examples = false
+
+[dev-dependencies.once_cell]
+version = "1"
diff --git a/vendor/dissimilar/LICENSE-APACHE b/vendor/dissimilar/LICENSE-APACHE
index 16fe87b06..1b5ec8b78 100644
--- a/vendor/dissimilar/LICENSE-APACHE
+++ b/vendor/dissimilar/LICENSE-APACHE
@@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/vendor/dissimilar/README.md b/vendor/dissimilar/README.md
index 492bced22..82ce66995 100644
--- a/vendor/dissimilar/README.md
+++ b/vendor/dissimilar/README.md
@@ -3,8 +3,8 @@ Dissimilar: diff library with semantic cleanup
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/dissimilar-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/dissimilar)
[<img alt="crates.io" src="https://img.shields.io/crates/v/dissimilar.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/dissimilar)
-[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-dissimilar-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/dissimilar)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/dissimilar/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/dissimilar/actions?query=branch%3Amaster)
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-dissimilar-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/dissimilar)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/dissimilar/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/dissimilar/actions?query=branch%3Amaster)
This library is a port of the Diff component of [Diff Match Patch] to Rust. The
diff implementation is based on [Myers' diff algorithm] but includes some
@@ -22,7 +22,7 @@ Diff Match Patch was originally built in 2006 to power Google Docs.
dissimilar = "1.0"
```
-*Compiler support: requires rustc 1.31+*
+*Compiler support: requires rustc 1.36+*
<br>
diff --git a/vendor/dissimilar/src/find.rs b/vendor/dissimilar/src/find.rs
index 90ca2c6c5..4af3b8bee 100644
--- a/vendor/dissimilar/src/find.rs
+++ b/vendor/dissimilar/src/find.rs
@@ -1,5 +1,5 @@
// The strstr implementation in this file is extracted from the Rust standard
-// library's str::find. The algorithm works for arbitrary &[u8] haystack and
+// library's str::find. The algorithm works for arbitrary &[T] haystack and
// needle but is only exposed by the standard library on UTF-8 strings.
//
// https://github.com/rust-lang/rust/blob/1.40.0/src/libcore/str/pattern.rs
@@ -80,7 +80,7 @@
use std::cmp;
use std::usize;
-pub fn find(haystack: &[u8], needle: &[u8]) -> Option<usize> {
+pub fn find(haystack: &[char], needle: &[char]) -> Option<usize> {
assert!(!needle.is_empty());
// crit_pos: critical factorization index
@@ -177,12 +177,12 @@ pub fn find(haystack: &[u8], needle: &[u8]) -> Option<usize> {
}
}
-fn byteset_create(bytes: &[u8]) -> u64 {
- bytes.iter().fold(0, |a, &b| (1 << (b & 0x3f)) | a)
+fn byteset_create(chars: &[char]) -> u64 {
+ chars.iter().fold(0, |a, &ch| (1 << (ch as u8 & 0x3f)) | a)
}
-fn byteset_contains(byteset: u64, byte: u8) -> bool {
- (byteset >> ((byte & 0x3f) as usize)) & 1 != 0
+fn byteset_contains(byteset: u64, ch: char) -> bool {
+ (byteset >> ((ch as u8 & 0x3f) as usize)) & 1 != 0
}
// Compute the maximal suffix of `arr`.
@@ -197,7 +197,7 @@ fn byteset_contains(byteset: u64, byte: u8) -> bool {
// a critical factorization.
//
// For long period cases, the resulting period is not exact (it is too short).
-fn maximal_suffix(arr: &[u8], order_greater: bool) -> (usize, usize) {
+fn maximal_suffix(arr: &[char], order_greater: bool) -> (usize, usize) {
let mut left = 0; // Corresponds to i in the paper
let mut right = 1; // Corresponds to j in the paper
let mut offset = 0; // Corresponds to k in the paper, but starting at 0
diff --git a/vendor/dissimilar/src/lib.rs b/vendor/dissimilar/src/lib.rs
index 8ce9faad3..b66434ade 100644
--- a/vendor/dissimilar/src/lib.rs
+++ b/vendor/dissimilar/src/lib.rs
@@ -2,7 +2,7 @@
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
-//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
@@ -37,9 +37,10 @@
//! [Myers' diff algorithm]: https://neil.fraser.name/writing/diff/myers.pdf
//! [semantic cleanups]: https://neil.fraser.name/writing/diff/
-#![doc(html_root_url = "https://docs.rs/dissimilar/1.0.4")]
+#![doc(html_root_url = "https://docs.rs/dissimilar/1.0.6")]
#![allow(
clippy::blocks_in_if_conditions,
+ clippy::bool_to_int_with_if,
clippy::cast_possible_wrap,
clippy::cast_sign_loss,
clippy::cloned_instead_of_copied, // https://github.com/rust-lang/rust-clippy/issues/7127
@@ -63,10 +64,10 @@ mod range;
#[cfg(test)]
mod tests;
-use crate::range::{bytes, str, Range};
+use crate::range::{slice, Range};
use std::cmp;
use std::collections::VecDeque;
-use std::fmt::{self, Debug};
+use std::fmt::{self, Debug, Display, Write};
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum Chunk<'a> {
@@ -121,20 +122,49 @@ impl<'tmp, 'a: 'tmp, 'b: 'tmp> Diff<'a, 'b> {
}
pub fn diff<'a>(text1: &'a str, text2: &'a str) -> Vec<Chunk<'a>> {
- let text1 = Range::new(text1, ..);
- let text2 = Range::new(text2, ..);
- let mut solution = main(text1, text2);
+ let chars1: Vec<char> = text1.chars().collect();
+ let chars2: Vec<char> = text2.chars().collect();
+ let range1 = Range::new(&chars1, ..);
+ let range2 = Range::new(&chars2, ..);
+
+ let mut solution = main(range1, range2);
cleanup_char_boundary(&mut solution);
cleanup_semantic(&mut solution);
cleanup_merge(&mut solution);
- solution.diffs.into_iter().map(Chunk::from).collect()
+
+ let mut chunks = Vec::new();
+ let mut pos1 = 0;
+ let mut pos2 = 0;
+ for diff in solution.diffs {
+ chunks.push(match diff {
+ Diff::Equal(range, _) => {
+ let len = range.len_bytes();
+ let chunk = Chunk::Equal(&text1[pos1..pos1 + len]);
+ pos1 += len;
+ pos2 += len;
+ chunk
+ }
+ Diff::Delete(range) => {
+ let len = range.len_bytes();
+ let chunk = Chunk::Delete(&text1[pos1..pos1 + len]);
+ pos1 += len;
+ chunk
+ }
+ Diff::Insert(range) => {
+ let len = range.len_bytes();
+ let chunk = Chunk::Insert(&text2[pos2..pos2 + len]);
+ pos2 += len;
+ chunk
+ }
+ });
+ }
+ chunks
}
struct Solution<'a, 'b> {
text1: Range<'a>,
text2: Range<'b>,
diffs: Vec<Diff<'a, 'b>>,
- utf8: bool,
}
fn main<'a, 'b>(mut text1: Range<'a>, mut text2: Range<'b>) -> Solution<'a, 'b> {
@@ -142,7 +172,7 @@ fn main<'a, 'b>(mut text1: Range<'a>, mut text2: Range<'b>) -> Solution<'a, 'b>
let whole2 = text2;
// Trim off common prefix.
- let common_prefix_len = common_prefix_bytes(text1, text2);
+ let common_prefix_len = common_prefix(text1, text2);
let common_prefix = Diff::Equal(
text1.substring(..common_prefix_len),
text2.substring(..common_prefix_len),
@@ -151,7 +181,7 @@ fn main<'a, 'b>(mut text1: Range<'a>, mut text2: Range<'b>) -> Solution<'a, 'b>
text2 = text2.substring(common_prefix_len..);
// Trim off common suffix.
- let common_suffix_len = common_suffix_bytes(text1, text2);
+ let common_suffix_len = common_suffix(text1, text2);
let common_suffix = Diff::Equal(
text1.substring(text1.len - common_suffix_len..),
text2.substring(text2.len - common_suffix_len..),
@@ -164,7 +194,6 @@ fn main<'a, 'b>(mut text1: Range<'a>, mut text2: Range<'b>) -> Solution<'a, 'b>
text1: whole1,
text2: whole2,
diffs: compute(text1, text2),
- utf8: false,
};
// Restore the prefix and suffix.
@@ -252,7 +281,7 @@ fn bisect<'a, 'b>(text1: Range<'a>, text2: Range<'b>) -> Vec<Diff<'a, 'b>> {
} as usize;
let mut y1 = (x1 as isize - k1) as usize;
if let (Some(s1), Some(s2)) = (text1.get(x1..), text2.get(y1..)) {
- let advance = common_prefix_bytes(s1, s2);
+ let advance = common_prefix(s1, s2);
x1 += advance;
y1 += advance;
}
@@ -288,7 +317,7 @@ fn bisect<'a, 'b>(text1: Range<'a>, text2: Range<'b>) -> Vec<Diff<'a, 'b>> {
} as usize;
let mut y2 = (x2 as isize - k2) as usize;
if x2 < text1.len && y2 < text2.len {
- let advance = common_suffix_bytes(
+ let advance = common_suffix(
text1.substring(..text1.len - x2),
text2.substring(..text2.len - y2),
);
@@ -342,8 +371,8 @@ fn bisect_split<'a, 'b>(
// Determine the length of the common prefix of two strings.
fn common_prefix(text1: Range, text2: Range) -> usize {
- for ((i, ch1), ch2) in text1.char_indices().zip(text2.chars()) {
- if ch1 != ch2 {
+ for (i, (b1, b2)) in text1.chars().zip(text2.chars()).enumerate() {
+ if b1 != b2 {
return i;
}
}
@@ -352,25 +381,7 @@ fn common_prefix(text1: Range, text2: Range) -> usize {
// Determine the length of the common suffix of two strings.
fn common_suffix(text1: Range, text2: Range) -> usize {
- for ((i, ch1), ch2) in text1.char_indices().rev().zip(text2.chars().rev()) {
- if ch1 != ch2 {
- return text1.len - i - ch1.len_utf8();
- }
- }
- cmp::min(text1.len, text2.len)
-}
-
-fn common_prefix_bytes(text1: Range, text2: Range) -> usize {
- for (i, (b1, b2)) in text1.bytes().zip(text2.bytes()).enumerate() {
- if b1 != b2 {
- return i;
- }
- }
- cmp::min(text1.len, text2.len)
-}
-
-fn common_suffix_bytes(text1: Range, text2: Range) -> usize {
- for (i, (b1, b2)) in text1.bytes().rev().zip(text2.bytes().rev()).enumerate() {
+ for (i, (b1, b2)) in text1.chars().rev().zip(text2.chars().rev()).enumerate() {
if b1 != b2 {
return i;
}
@@ -394,7 +405,7 @@ fn common_overlap(mut text1: Range, mut text2: Range) -> usize {
text2 = text2.substring(..text1.len);
}
// Quick check for the worst case.
- if bytes(text1) == bytes(text2) {
+ if slice(text1) == slice(text2) {
return text1.len;
}
@@ -411,7 +422,7 @@ fn common_overlap(mut text1: Range, mut text2: Range) -> usize {
};
length += found;
if found == 0
- || bytes(text1.substring(text1.len - length..)) == bytes(text2.substring(..length))
+ || slice(text1.substring(text1.len - length..)) == slice(text2.substring(..length))
{
best = length;
length += 1;
@@ -420,17 +431,24 @@ fn common_overlap(mut text1: Range, mut text2: Range) -> usize {
}
fn cleanup_char_boundary(solution: &mut Solution) {
- fn boundary_down(doc: &str, pos: usize) -> usize {
+ fn is_segmentation_boundary(doc: &[char], pos: usize) -> bool {
+ // FIXME: use unicode-segmentation crate?
+ let _ = doc;
+ let _ = pos;
+ true
+ }
+
+ fn boundary_down(doc: &[char], pos: usize) -> usize {
let mut adjust = 0;
- while !doc.is_char_boundary(pos - adjust) {
+ while !is_segmentation_boundary(doc, pos - adjust) {
adjust += 1;
}
adjust
}
- fn boundary_up(doc: &str, pos: usize) -> usize {
+ fn boundary_up(doc: &[char], pos: usize) -> usize {
let mut adjust = 0;
- while !doc.is_char_boundary(pos + adjust) {
+ while !is_segmentation_boundary(doc, pos + adjust) {
adjust += 1;
}
adjust
@@ -498,7 +516,6 @@ fn cleanup_char_boundary(solution: &mut Solution) {
}
solution.diffs.truncate(retain);
- solution.utf8 = true;
}
// Reduce the number of edits by eliminating semantically trivial equalities.
@@ -658,14 +675,13 @@ fn cleanup_semantic_lossless(solution: &mut Solution) {
&& !next_equal1.is_empty()
&& edit.text().chars().next().unwrap() == next_equal1.chars().next().unwrap()
{
- let increment = edit.text().chars().next().unwrap().len_utf8();
- prev_equal1.len += increment;
- prev_equal2.len += increment;
- edit.shift_right(increment);
- next_equal1.offset += increment;
- next_equal1.len -= increment;
- next_equal2.offset += increment;
- next_equal2.len -= increment;
+ prev_equal1.len += 1;
+ prev_equal2.len += 1;
+ edit.shift_right(1);
+ next_equal1.offset += 1;
+ next_equal1.len -= 1;
+ next_equal2.offset += 1;
+ next_equal2.len -= 1;
let score = cleanup_semantic_score(prev_equal1, edit.text())
+ cleanup_semantic_score(edit.text(), next_equal1);
// The >= encourages trailing rather than leading whitespace on edits.
@@ -720,8 +736,10 @@ fn cleanup_semantic_score(one: Range, two: Range) -> usize {
let whitespace2 = non_alphanumeric2 && char2.is_ascii_whitespace();
let line_break1 = whitespace1 && char1.is_control();
let line_break2 = whitespace2 && char2.is_control();
- let blank_line1 = line_break1 && (one.ends_with("\n\n") || one.ends_with("\n\r\n"));
- let blank_line2 = line_break2 && (two.starts_with("\n\n") || two.starts_with("\r\n\r\n"));
+ let blank_line1 =
+ line_break1 && (one.ends_with(['\n', '\n']) || one.ends_with(['\n', '\r', '\n']));
+ let blank_line2 =
+ line_break2 && (two.starts_with(['\n', '\n']) || two.starts_with(['\r', '\n', '\r', '\n']));
if blank_line1 || blank_line2 {
// Five points for blank lines.
@@ -747,22 +765,7 @@ fn cleanup_semantic_score(one: Range, two: Range) -> usize {
// move as long as it doesn't cross an equality.
fn cleanup_merge(solution: &mut Solution) {
let diffs = &mut solution.diffs;
- let common_prefix = if solution.utf8 {
- common_prefix
- } else {
- common_prefix_bytes
- };
- let common_suffix = if solution.utf8 {
- common_suffix
- } else {
- common_suffix_bytes
- };
-
- loop {
- if diffs.is_empty() {
- return;
- }
-
+ while !diffs.is_empty() {
diffs.push(Diff::Equal(
solution.text1.substring(solution.text1.len..),
solution.text2.substring(solution.text2.len..),
@@ -911,22 +914,22 @@ impl Debug for Chunk<'_> {
impl Debug for Diff<'_, '_> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- let (name, bytes) = match *self {
- Diff::Equal(range, _) => ("Equal", bytes(range)),
- Diff::Delete(range) => ("Delete", bytes(range)),
- Diff::Insert(range) => ("Insert", bytes(range)),
+ let (name, range) = match *self {
+ Diff::Equal(range, _) => ("Equal", range),
+ Diff::Delete(range) => ("Delete", range),
+ Diff::Insert(range) => ("Insert", range),
};
- let text = String::from_utf8_lossy(bytes);
- write!(formatter, "{}({:?})", name, text)
- }
-}
-
-impl<'a> From<Diff<'a, 'a>> for Chunk<'a> {
- fn from(diff: Diff<'a, 'a>) -> Self {
- match diff {
- Diff::Equal(range, _) => Chunk::Equal(str(range)),
- Diff::Delete(range) => Chunk::Delete(str(range)),
- Diff::Insert(range) => Chunk::Insert(str(range)),
+ formatter.write_str(name)?;
+ formatter.write_str("(\"")?;
+ for ch in range.chars() {
+ if ch == '\'' {
+ // escape_debug turns this into "\'" which is unnecessary.
+ formatter.write_char(ch)?;
+ } else {
+ Display::fmt(&ch.escape_debug(), formatter)?;
+ }
}
+ formatter.write_str("\")")?;
+ Ok(())
}
}
diff --git a/vendor/dissimilar/src/range.rs b/vendor/dissimilar/src/range.rs
index 565a94c06..55cbc448c 100644
--- a/vendor/dissimilar/src/range.rs
+++ b/vendor/dissimilar/src/range.rs
@@ -1,11 +1,10 @@
use crate::find::find;
use std::fmt::Debug;
use std::ops::{self, RangeFrom, RangeFull, RangeTo};
-use std::str::{CharIndices, Chars};
#[derive(Copy, Clone)]
pub struct Range<'a> {
- pub doc: &'a str,
+ pub doc: &'a [char],
pub offset: usize,
pub len: usize,
}
@@ -13,13 +12,13 @@ pub struct Range<'a> {
impl<'a> Range<'a> {
pub fn empty() -> Self {
Range {
- doc: "",
+ doc: &[],
offset: 0,
len: 0,
}
}
- pub fn new(doc: &'a str, bounds: impl RangeBounds) -> Self {
+ pub fn new(doc: &'a [char], bounds: impl RangeBounds) -> Self {
let (offset, len) = bounds.index(doc.len());
Range { doc, offset, len }
}
@@ -28,6 +27,10 @@ impl<'a> Range<'a> {
self.len == 0
}
+ pub fn len_bytes(&self) -> usize {
+ self.chars().map(char::len_utf8).sum()
+ }
+
pub fn substring(&self, bounds: impl RangeBounds) -> Self {
let (offset, len) = bounds.index(self.len);
Range {
@@ -50,32 +53,26 @@ impl<'a> Range<'a> {
(self.substring(..mid), self.substring(mid..))
}
- pub fn chars(&self) -> Chars<'a> {
- str(*self).chars()
- }
-
- pub fn char_indices(&self) -> CharIndices<'a> {
- str(*self).char_indices()
+ pub fn chars(
+ &self,
+ ) -> impl Iterator<Item = char> + DoubleEndedIterator + ExactSizeIterator + 'a {
+ slice(*self).iter().copied()
}
- pub fn bytes(&self) -> impl Iterator<Item = u8> + DoubleEndedIterator + ExactSizeIterator + 'a {
- bytes(*self).iter().cloned()
+ pub fn starts_with(&self, prefix: impl AsRef<[char]>) -> bool {
+ slice(*self).starts_with(prefix.as_ref())
}
- pub fn starts_with(&self, prefix: impl AsRef<[u8]>) -> bool {
- bytes(*self).starts_with(prefix.as_ref())
+ pub fn ends_with(&self, suffix: impl AsRef<[char]>) -> bool {
+ slice(*self).ends_with(suffix.as_ref())
}
- pub fn ends_with(&self, suffix: impl AsRef<[u8]>) -> bool {
- bytes(*self).ends_with(suffix.as_ref())
- }
-
- pub fn find(&self, needle: impl AsRef<[u8]>) -> Option<usize> {
- find(bytes(*self), needle.as_ref())
+ pub fn find(&self, needle: impl AsRef<[char]>) -> Option<usize> {
+ find(slice(*self), needle.as_ref())
}
}
-pub fn str(range: Range) -> &str {
+pub fn slice(range: Range) -> &[char] {
if cfg!(debug)
&& range
.doc
@@ -90,13 +87,9 @@ pub fn str(range: Range) -> &str {
&range.doc[range.offset..range.offset + range.len]
}
-pub fn bytes(range: Range) -> &[u8] {
- &range.doc.as_bytes()[range.offset..range.offset + range.len]
-}
-
-impl AsRef<[u8]> for Range<'_> {
- fn as_ref(&self) -> &[u8] {
- bytes(*self)
+impl AsRef<[char]> for Range<'_> {
+ fn as_ref(&self) -> &[char] {
+ slice(*self)
}
}
diff --git a/vendor/dissimilar/src/tests.rs b/vendor/dissimilar/src/tests.rs
index 450d7f7e4..d2e3fd643 100644
--- a/vendor/dissimilar/src/tests.rs
+++ b/vendor/dissimilar/src/tests.rs
@@ -1,4 +1,13 @@
use super::*;
+use once_cell::sync::OnceCell;
+
+macro_rules! range {
+ ($text:expr) => {{
+ static CHARS: OnceCell<Vec<char>> = OnceCell::new();
+ let chars = CHARS.get_or_init(|| $text.chars().collect());
+ Range::new(chars, ..)
+ }};
+}
macro_rules! diff_list {
() => {
@@ -6,50 +15,52 @@ macro_rules! diff_list {
text1: Range::empty(),
text2: Range::empty(),
diffs: Vec::new(),
- utf8: true,
}
};
($($kind:ident($text:literal)),+ $(,)?) => {{
+ #[allow(unused_macro_rules)]
macro_rules! text1 {
(Insert, $s:literal) => { "" };
(Delete, $s:literal) => { $s };
(Equal, $s:literal) => { $s };
}
+ #[allow(unused_macro_rules)]
macro_rules! text2 {
(Insert, $s:literal) => { $s };
(Delete, $s:literal) => { "" };
(Equal, $s:literal) => { $s };
}
- let text1 = concat!($(text1!($kind, $text)),*);
- let text2 = concat!($(text2!($kind, $text)),*);
+ let text1 = range!(concat!($(text1!($kind, $text)),*));
+ let text2 = range!(concat!($(text2!($kind, $text)),*));
let (_i, _j) = (&mut 0, &mut 0);
+ #[allow(unused_macro_rules)]
macro_rules! range {
(Insert, $s:literal) => {
- Diff::Insert(range(text2, _j, $s))
+ Diff::Insert(range(text2.doc, _j, $s))
};
(Delete, $s:literal) => {
- Diff::Delete(range(text1, _i, $s))
+ Diff::Delete(range(text1.doc, _i, $s))
};
(Equal, $s:literal) => {
- Diff::Equal(range(text1, _i, $s), range(text2, _j, $s))
+ Diff::Equal(range(text1.doc, _i, $s), range(text2.doc, _j, $s))
};
}
Solution {
- text1: Range::new(text1, ..),
- text2: Range::new(text2, ..),
+ text1,
+ text2,
diffs: vec![$(range!($kind, $text)),*],
- utf8: true,
}
}};
}
-fn range<'a>(doc: &'a str, offset: &mut usize, text: &str) -> Range<'a> {
+fn range<'a>(doc: &'a [char], offset: &mut usize, text: &str) -> Range<'a> {
+ let len = text.chars().count();
let range = Range {
doc,
offset: *offset,
- len: text.len(),
+ len,
};
- *offset += text.len();
+ *offset += len;
range
}
@@ -65,12 +76,16 @@ macro_rules! assert_diffs {
}
fn same_diffs(expected: &[Chunk], actual: &[Diff]) -> bool {
+ fn eq(expected: &str, actual: &Range) -> bool {
+ expected.chars().eq(slice(*actual).iter().copied())
+ }
+
expected.len() == actual.len()
&& expected.iter().zip(actual).all(|pair| match pair {
- (Chunk::Insert(expected), Diff::Insert(actual)) => *expected == str(*actual),
- (Chunk::Delete(expected), Diff::Delete(actual)) => *expected == str(*actual),
+ (Chunk::Insert(expected), Diff::Insert(actual)) => eq(expected, actual),
+ (Chunk::Delete(expected), Diff::Delete(actual)) => eq(expected, actual),
(Chunk::Equal(expected), Diff::Equal(actual1, actual2)) => {
- *expected == str(*actual1) && *expected == str(*actual2)
+ eq(expected, actual1) && eq(expected, actual2)
}
(_, _) => false,
})
@@ -78,59 +93,56 @@ fn same_diffs(expected: &[Chunk], actual: &[Diff]) -> bool {
#[test]
fn test_common_prefix() {
- let text1 = Range::new("abc", ..);
- let text2 = Range::new("xyz", ..);
- assert_eq!(0, common_prefix_bytes(text1, text2), "Null case");
+ let text1 = range!("abc");
+ let text2 = range!("xyz");
+ assert_eq!(0, common_prefix(text1, text2), "Null case");
- let text1 = Range::new("1234abcdef", ..);
- let text2 = Range::new("1234xyz", ..);
- assert_eq!(4, common_prefix_bytes(text1, text2), "Non-null case");
+ let text1 = range!("1234abcdef");
+ let text2 = range!("1234xyz");
+ assert_eq!(4, common_prefix(text1, text2), "Non-null case");
- let text1 = Range::new("1234", ..);
- let text2 = Range::new("1234xyz", ..);
- assert_eq!(4, common_prefix_bytes(text1, text2), "Whole case");
+ let text1 = range!("1234");
+ let text2 = range!("1234xyz");
+ assert_eq!(4, common_prefix(text1, text2), "Whole case");
}
#[test]
fn test_common_suffix() {
- let text1 = Range::new("abc", ..);
- let text2 = Range::new("xyz", ..);
+ let text1 = range!("abc");
+ let text2 = range!("xyz");
assert_eq!(0, common_suffix(text1, text2), "Null case");
- assert_eq!(0, common_suffix_bytes(text1, text2), "Null case");
- let text1 = Range::new("abcdef1234", ..);
- let text2 = Range::new("xyz1234", ..);
+ let text1 = range!("abcdef1234");
+ let text2 = range!("xyz1234");
assert_eq!(4, common_suffix(text1, text2), "Non-null case");
- assert_eq!(4, common_suffix_bytes(text1, text2), "Non-null case");
- let text1 = Range::new("1234", ..);
- let text2 = Range::new("xyz1234", ..);
+ let text1 = range!("1234");
+ let text2 = range!("xyz1234");
assert_eq!(4, common_suffix(text1, text2), "Whole case");
- assert_eq!(4, common_suffix_bytes(text1, text2), "Whole case");
}
#[test]
fn test_common_overlap() {
let text1 = Range::empty();
- let text2 = Range::new("abcd", ..);
+ let text2 = range!("abcd");
assert_eq!(0, common_overlap(text1, text2), "Null case");
- let text1 = Range::new("abc", ..);
- let text2 = Range::new("abcd", ..);
+ let text1 = range!("abc");
+ let text2 = range!("abcd");
assert_eq!(3, common_overlap(text1, text2), "Whole case");
- let text1 = Range::new("123456", ..);
- let text2 = Range::new("abcd", ..);
+ let text1 = range!("123456");
+ let text2 = range!("abcd");
assert_eq!(0, common_overlap(text1, text2), "No overlap");
- let text1 = Range::new("123456xxx", ..);
- let text2 = Range::new("xxxabcd", ..);
+ let text1 = range!("123456xxx");
+ let text2 = range!("xxxabcd");
assert_eq!(3, common_overlap(text1, text2), "Overlap");
// Some overly clever languages (C#) may treat ligatures as equal to their
// component letters. E.g. U+FB01 == 'fi'
- let text1 = Range::new("fi", ..);
- let text2 = Range::new("\u{fb01}i", ..);
+ let text1 = range!("fi");
+ let text2 = range!("\u{fb01}i");
assert_eq!(0, common_overlap(text1, text2), "Unicode");
}
@@ -420,13 +432,12 @@ fn test_cleanup_semantic() {
#[test]
fn test_bisect() {
- let text1 = Range::new("cat", ..);
- let text2 = Range::new("map", ..);
+ let text1 = range!("cat");
+ let text2 = range!("map");
let solution = Solution {
text1,
text2,
diffs: bisect(text1, text2),
- utf8: false,
};
assert_diffs!(
[
@@ -446,24 +457,24 @@ fn test_main() {
let solution = main(Range::empty(), Range::empty());
assert_diffs!([], solution, "Null case");
- let solution = main(Range::new("abc", ..), Range::new("abc", ..));
+ let solution = main(range!("abc"), range!("abc"));
assert_diffs!([Equal("abc")], solution, "Equality");
- let solution = main(Range::new("abc", ..), Range::new("ab123c", ..));
+ let solution = main(range!("abc"), range!("ab123c"));
assert_diffs!(
[Equal("ab"), Insert("123"), Equal("c")],
solution,
"Simple insertion",
);
- let solution = main(Range::new("a123bc", ..), Range::new("abc", ..));
+ let solution = main(range!("a123bc"), range!("abc"));
assert_diffs!(
[Equal("a"), Delete("123"), Equal("bc")],
solution,
"Simple deletion",
);
- let solution = main(Range::new("abc", ..), Range::new("a123b456c", ..));
+ let solution = main(range!("abc"), range!("a123b456c"));
assert_diffs!(
[
Equal("a"),
@@ -476,7 +487,7 @@ fn test_main() {
"Two insertions",
);
- let solution = main(Range::new("a123b456c", ..), Range::new("abc", ..));
+ let solution = main(range!("a123b456c"), range!("abc"));
assert_diffs!(
[
Equal("a"),
@@ -489,12 +500,12 @@ fn test_main() {
"Two deletions",
);
- let solution = main(Range::new("a", ..), Range::new("b", ..));
+ let solution = main(range!("a"), range!("b"));
assert_diffs!([Delete("a"), Insert("b")], solution, "Simple case #1");
let solution = main(
- Range::new("Apples are a fruit.", ..),
- Range::new("Bananas are also fruit.", ..),
+ range!("Apples are a fruit."),
+ range!("Bananas are also fruit."),
);
assert_diffs!(
[
@@ -508,7 +519,7 @@ fn test_main() {
"Simple case #2",
);
- let solution = main(Range::new("ax\t", ..), Range::new("\u{0680}x\000", ..));
+ let solution = main(range!("ax\t"), range!("\u{0680}x\000"));
assert_diffs!(
[
Delete("a"),
@@ -521,7 +532,7 @@ fn test_main() {
"Simple case #3",
);
- let solution = main(Range::new("1ayb2", ..), Range::new("abxab", ..));
+ let solution = main(range!("1ayb2"), range!("abxab"));
assert_diffs!(
[
Delete("1"),
@@ -535,7 +546,7 @@ fn test_main() {
"Overlap #1",
);
- let solution = main(Range::new("abcy", ..), Range::new("xaxcxabc", ..));
+ let solution = main(range!("abcy"), range!("xaxcxabc"));
assert_diffs!(
[Insert("xaxcx"), Equal("abc"), Delete("y")],
solution,
@@ -543,8 +554,8 @@ fn test_main() {
);
let solution = main(
- Range::new("ABCDa=bcd=efghijklmnopqrsEFGHIJKLMNOefg", ..),
- Range::new("a-bcd-efghijklmnopqrs", ..),
+ range!("ABCDa=bcd=efghijklmnopqrsEFGHIJKLMNOefg"),
+ range!("a-bcd-efghijklmnopqrs"),
);
assert_diffs!(
[
@@ -563,8 +574,8 @@ fn test_main() {
);
let solution = main(
- Range::new("a [[Pennsylvania]] and [[New", ..),
- Range::new(" and [[Pennsylvania]]", ..),
+ range!("a [[Pennsylvania]] and [[New"),
+ range!(" and [[Pennsylvania]]"),
);
assert_diffs!(
[
diff --git a/vendor/dissimilar/tests/test.rs b/vendor/dissimilar/tests/test.rs
index e68fd4f11..7debb0593 100644
--- a/vendor/dissimilar/tests/test.rs
+++ b/vendor/dissimilar/tests/test.rs
@@ -21,7 +21,7 @@ fn test_unicode() {
}
#[test]
-fn test_unicode2() {
+fn test_issue9() {
let a = "[乀丁abcd一]";
let b = "[一abcd丁]";
let d = diff(a, b);
@@ -35,6 +35,18 @@ fn test_unicode2() {
Chunk::Delete("一"),
Chunk::Insert("丁"),
Chunk::Equal("]"),
- ]
+ ],
+ );
+}
+
+#[test]
+fn test_issue15() {
+ let a = "A のダ";
+ let b = "A ダ";
+ let d = diff(a, b);
+
+ assert_eq!(
+ d,
+ vec![Chunk::Equal("A "), Chunk::Delete("の"), Chunk::Equal("ダ")],
);
}
diff --git a/vendor/elsa/.cargo-checksum.json b/vendor/elsa/.cargo-checksum.json
new file mode 100644
index 000000000..a43c1ffd2
--- /dev/null
+++ b/vendor/elsa/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"436a38effb1bc439febdcf0235758d480258326fb87c19c5cd482194e37d19f3","Cargo.toml":"3f3f154070e2d096c40b6080e233e0f081cd8c67b033fb5150badf7cb3f97a7f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"15656cc11a8331f28c0986b8ab97220d3e76f98e60ed388b5ffad37dfac4710c","README.md":"72ec631a7cc4907ab80d87776b8eb1929d7cbd76b260f2fdb913714787f30dac","examples/arena.rs":"dd44f11e4b4e8b1eedca5ce5205aef3efface3c8888daa16b735dd476362b335","examples/fluentresource.rs":"d2bc2a1b02e6c92819bc608d91214591d8dc7d52f7f524c24e39ba5fe28ee6fe","examples/mutable_arena.rs":"553541b20ac97339cf89e2ef60810490f8362520911f3279f4129a30c2af6eb6","examples/string_interner.rs":"d8b427b71e6c340bf8ee01bc1245c82839fa6efb3dde6b91aab8791f0dfebbf9","examples/sync.rs":"bf9f395c029129fac6247068874b9514e0a174d342174dc4c65716acdbce3741","src/index_map.rs":"c265730dc36a49c8e7966226ebab0aa74f6c828fa6a5b3779a8df4bd34981c19","src/index_set.rs":"2173479eb3cd1009ed4e6b54ebc8aab8e0869d076b7e270dc937657e49838e01","src/lib.rs":"d26839bf88764445d2ec453b269b4359761186afda947101ee42c344cb0ad940","src/map.rs":"8f663631d817081dc8eac50d5235cac28f11dac0f8ebef6f57676d495b1aaab3","src/sync.rs":"8bc2e17981f58c9773e90d5ef40f66ebb1a13ee0f794294133486e6b4f5b50a0","src/vec.rs":"c9f053f0e22dc40ff1137d60b87650bf521f09a41111c4b8329b37af59893697"},"package":"f74077c3c3aedb99a2683919698285596662518ea13e5eedcf8bdd43b0d0453b"} \ No newline at end of file
diff --git a/vendor/elsa/Cargo.lock b/vendor/elsa/Cargo.lock
new file mode 100644
index 000000000..a03e9a806
--- /dev/null
+++ b/vendor/elsa/Cargo.lock
@@ -0,0 +1,39 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "elsa"
+version = "1.8.0"
+dependencies = [
+ "indexmap",
+ "stable_deref_trait",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "indexmap"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
diff --git a/vendor/elsa/Cargo.toml b/vendor/elsa/Cargo.toml
new file mode 100644
index 000000000..b9c473a20
--- /dev/null
+++ b/vendor/elsa/Cargo.toml
@@ -0,0 +1,47 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "elsa"
+version = "1.8.0"
+authors = ["Manish Goregaokar <manishsmail@gmail.com>"]
+description = "Append-only collections for Rust where borrows to entries can outlive insertions"
+documentation = "https://docs.rs/elsa/"
+readme = "README.md"
+keywords = [
+ "data-structure",
+ "map",
+ "frozen",
+ "cache",
+ "arena",
+]
+categories = [
+ "data-structures",
+ "caching",
+]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/manishearth/elsa"
+
+[package.metadata.docs.rs]
+features = ["indexmap"]
+
+[[example]]
+name = "string_interner"
+path = "examples/string_interner.rs"
+required-features = ["indexmap"]
+
+[dependencies.indexmap]
+version = "1.6"
+optional = true
+
+[dependencies.stable_deref_trait]
+version = "1.1.1"
diff --git a/vendor/toml/LICENSE-APACHE b/vendor/elsa/LICENSE-APACHE
index 16fe87b06..16fe87b06 100644
--- a/vendor/toml/LICENSE-APACHE
+++ b/vendor/elsa/LICENSE-APACHE
diff --git a/vendor/elsa/LICENSE-MIT b/vendor/elsa/LICENSE-MIT
new file mode 100644
index 000000000..d74f9e93d
--- /dev/null
+++ b/vendor/elsa/LICENSE-MIT
@@ -0,0 +1,27 @@
+MIT License
+
+Copyright (c) 2019 Manish Goregaokar
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/elsa/README.md b/vendor/elsa/README.md
new file mode 100644
index 000000000..cd4a5b94a
--- /dev/null
+++ b/vendor/elsa/README.md
@@ -0,0 +1,19 @@
+## elsa
+
+[![Build Status](https://travis-ci.org/Manishearth/elsa.svg?branch=master)](https://travis-ci.org/Manishearth/elsa)
+[![Current Version](https://img.shields.io/crates/v/elsa.svg)](https://crates.io/crates/elsa)
+[![License: MIT/Apache-2.0](https://img.shields.io/crates/l/elsa.svg)](#license)
+
+_🎵 Immutability never bothered me anyway 🎶_
+
+This crate provides various "frozen" collections.
+
+These are append-only collections where references to entries can be held on to even across insertions. This is safe because these collections only support storing data that's present behind some indirection -- i.e. `String`, `Vec<T>`, `Box<T>`, etc, and they only yield references to the data behind the allocation (`&str`, `&[T]`, and `&T` respectively)
+
+The typical use case is having a global cache of strings or other data which the rest of the program borrows from.
+
+### Running all examples
+
+```bash
+cargo test --examples --features indexmap
+```
diff --git a/vendor/elsa/examples/arena.rs b/vendor/elsa/examples/arena.rs
new file mode 100644
index 000000000..79913c2e7
--- /dev/null
+++ b/vendor/elsa/examples/arena.rs
@@ -0,0 +1,56 @@
+use elsa::FrozenVec;
+
+fn main() {
+ let arena = Arena::new();
+ let lonely = arena.add_thing("lonely", vec![]);
+ let best_friend = arena.add_thing("best friend", vec![lonely]);
+ let threes_a_crowd = arena.add_thing("threes a crowd", vec![lonely, best_friend]);
+ let rando = arena.add_thing("rando", vec![]);
+ let _facebook = arena.add_thing("facebook", vec![rando, threes_a_crowd, lonely, best_friend]);
+
+ assert!(cmp_ref(lonely, best_friend.friends[0]));
+ assert!(cmp_ref(best_friend, threes_a_crowd.friends[1]));
+ arena.dump();
+}
+
+struct Arena<'arena> {
+ things: FrozenVec<Box<Thing<'arena>>>,
+}
+
+struct Thing<'arena> {
+ pub friends: Vec<ThingRef<'arena>>,
+ pub name: &'static str,
+}
+
+type ThingRef<'arena> = &'arena Thing<'arena>;
+
+impl<'arena> Arena<'arena> {
+ fn new() -> Arena<'arena> {
+ Arena {
+ things: FrozenVec::new(),
+ }
+ }
+
+ fn add_thing(
+ &'arena self,
+ name: &'static str,
+ friends: Vec<ThingRef<'arena>>,
+ ) -> ThingRef<'arena> {
+ let idx = self.things.len();
+ self.things.push(Box::new(Thing { name, friends }));
+ &self.things[idx]
+ }
+
+ fn dump(&'arena self) {
+ for thing in &self.things {
+ println!("friends of {}:", thing.name);
+ for friend in &thing.friends {
+ println!("\t{}", friend.name);
+ }
+ }
+ }
+}
+
+fn cmp_ref<T>(x: &T, y: &T) -> bool {
+ x as *const T as usize == y as *const T as usize
+}
diff --git a/vendor/elsa/examples/fluentresource.rs b/vendor/elsa/examples/fluentresource.rs
new file mode 100644
index 000000000..dba4aaed8
--- /dev/null
+++ b/vendor/elsa/examples/fluentresource.rs
@@ -0,0 +1,50 @@
+use elsa::FrozenMap;
+
+/// Stores some parsed AST representation of the file
+#[derive(Debug)]
+pub struct FluentResource<'mgr>(&'mgr str);
+
+impl<'mgr> FluentResource<'mgr> {
+ pub fn new(s: &'mgr str) -> Self {
+ // very simple parse step
+ FluentResource(&s[0..1])
+ }
+}
+
+/// Stores loaded files and parsed ASTs
+///
+/// Parsed ASTs are zero-copy and
+/// contain references to the files
+pub struct ResourceManager<'mgr> {
+ strings: FrozenMap<String, String>,
+ resources: FrozenMap<String, Box<FluentResource<'mgr>>>,
+}
+
+impl<'mgr> ResourceManager<'mgr> {
+ pub fn new() -> Self {
+ ResourceManager {
+ strings: FrozenMap::new(),
+ resources: FrozenMap::new(),
+ }
+ }
+
+ pub fn get_resource(&'mgr self, path: &str) -> &'mgr FluentResource<'mgr> {
+ let strings = &self.strings;
+
+ if strings.get(path).is_some() {
+ return self.resources.get(path).unwrap();
+ } else {
+ // pretend to load a file
+ let string = format!("file for {}", path);
+ let val = self.strings.insert(path.to_string(), string);
+ let res = FluentResource::new(val);
+ self.resources.insert(path.to_string(), Box::new(res))
+ }
+ }
+}
+
+fn main() {
+ let manager = ResourceManager::new();
+ let resource = manager.get_resource("somefile.ftl");
+ println!("{:?}", resource);
+}
diff --git a/vendor/elsa/examples/mutable_arena.rs b/vendor/elsa/examples/mutable_arena.rs
new file mode 100644
index 000000000..d5db2d331
--- /dev/null
+++ b/vendor/elsa/examples/mutable_arena.rs
@@ -0,0 +1,79 @@
+use elsa::FrozenVec;
+
+fn main() {
+ let arena = Arena::new();
+ let lonely = arena.add_person("lonely", vec![]);
+ let best_friend = arena.add_person("best friend", vec![lonely]);
+ let threes_a_crowd = arena.add_person("threes a crowd", vec![lonely, best_friend]);
+ let rando = arena.add_person("rando", vec![]);
+ let _everyone = arena.add_person(
+ "follows everyone",
+ vec![rando, threes_a_crowd, lonely, best_friend],
+ );
+ arena.dump();
+}
+
+struct Arena<'arena> {
+ people: FrozenVec<Box<Person<'arena>>>,
+}
+
+struct Person<'arena> {
+ pub follows: FrozenVec<PersonRef<'arena>>,
+ pub reverse_follows: FrozenVec<PersonRef<'arena>>,
+ pub name: &'static str,
+}
+
+type PersonRef<'arena> = &'arena Person<'arena>;
+
+impl<'arena> Arena<'arena> {
+ fn new() -> Arena<'arena> {
+ Arena {
+ people: FrozenVec::new(),
+ }
+ }
+
+ fn add_person(
+ &'arena self,
+ name: &'static str,
+ follows: Vec<PersonRef<'arena>>,
+ ) -> PersonRef<'arena> {
+ let idx = self.people.len();
+ self.people.push(Box::new(Person {
+ name,
+ follows: follows.into(),
+ reverse_follows: Default::default(),
+ }));
+ let me = &self.people[idx];
+ for friend in &me.follows {
+ friend.reverse_follows.push(me)
+ }
+ me
+ }
+
+ fn dump(&'arena self) {
+ for thing in &self.people {
+ println!("{} network:", thing.name);
+ println!("\tfollowing:");
+ for friend in &thing.follows {
+ println!("\t\t{}", friend.name);
+ }
+ println!("\tfollowers:");
+ for friend in &thing.reverse_follows {
+ println!("\t\t{}", friend.name);
+ }
+ }
+ }
+}
+
+// Note that the following will cause the above code to stop compiling
+// since non-eyepatched custom destructors can potentially
+// read deallocated data.
+//
+// impl<'arena> Drop for Person<'arena> {
+// fn drop(&mut self) {
+// println!("goodbye {:?}", self.name);
+// for friend in &self.follows {
+// println!("\t\t{}", friend.name);
+// }
+// }
+// }
diff --git a/vendor/elsa/examples/string_interner.rs b/vendor/elsa/examples/string_interner.rs
new file mode 100644
index 000000000..fd039f7ba
--- /dev/null
+++ b/vendor/elsa/examples/string_interner.rs
@@ -0,0 +1,61 @@
+use std::collections::BTreeSet;
+use std::convert::AsRef;
+
+use elsa::FrozenIndexSet;
+
+struct StringInterner {
+ set: FrozenIndexSet<String>,
+}
+
+impl StringInterner {
+ fn new() -> Self {
+ StringInterner {
+ set: FrozenIndexSet::new(),
+ }
+ }
+
+ fn get_or_intern<T>(&self, value: T) -> usize
+ where
+ T: AsRef<str>,
+ {
+ // TODO use Entry in case the standard Entry API gets improved
+ // (here to avoid premature allocation or double lookup)
+ self.set.insert_full(value.as_ref().to_string()).0
+ }
+
+ fn get<T>(&self, value: T) -> Option<usize>
+ where
+ T: AsRef<str>,
+ {
+ self.set.get_full(value.as_ref()).map(|(i, _r)| i)
+ }
+
+ fn resolve(&self, index: usize) -> Option<&str> {
+ self.set.get_index(index)
+ }
+}
+
+fn main() {
+ let interner = StringInterner::new();
+ let lonely = interner.get_or_intern("lonely");
+ let best_friend = interner.get_or_intern("best friend");
+ let threes_a_crowd = interner.get_or_intern("threes a crowd");
+ let rando = interner.get_or_intern("rando");
+ let _facebook = interner.get_or_intern("facebook");
+
+ let best_friend_2 = interner.get_or_intern("best friend");
+ let best_friend_3 = interner.get("best friend").unwrap();
+
+ let best_friend_ref = interner.resolve(best_friend).unwrap();
+
+ let mut set = BTreeSet::new();
+ set.insert(lonely);
+ set.insert(best_friend);
+ set.insert(threes_a_crowd);
+ set.insert(rando);
+ set.insert(best_friend_2);
+ assert_eq!(set.len(), 4);
+ assert_eq!(best_friend, best_friend_2);
+ assert_eq!(best_friend_2, best_friend_3);
+ assert_eq!(best_friend_ref, "best friend");
+}
diff --git a/vendor/elsa/examples/sync.rs b/vendor/elsa/examples/sync.rs
new file mode 100644
index 000000000..c6d9eb3cc
--- /dev/null
+++ b/vendor/elsa/examples/sync.rs
@@ -0,0 +1,26 @@
+use elsa::sync::*;
+
+use std::sync::Arc;
+use std::thread;
+use std::time::Duration;
+
+fn main() {
+ let a = Arc::new(FrozenMap::new());
+ for i in 1..10 {
+ let b = a.clone();
+ thread::spawn(move || {
+ b.insert(i, i.to_string());
+ thread::sleep(Duration::from_millis(300));
+ loop {
+ if let Some(opposite) = b.get(&(10 - i)) {
+ assert!(opposite.parse::<i32>().unwrap() == 10 - i);
+ break;
+ } else {
+ thread::sleep(Duration::from_millis(200));
+ }
+ }
+ });
+ }
+
+ thread::sleep(Duration::from_millis(1000));
+}
diff --git a/vendor/elsa/src/index_map.rs b/vendor/elsa/src/index_map.rs
new file mode 100644
index 000000000..3c97dfbaa
--- /dev/null
+++ b/vendor/elsa/src/index_map.rs
@@ -0,0 +1,215 @@
+use std::borrow::Borrow;
+use std::cell::{Cell, UnsafeCell};
+use std::collections::hash_map::RandomState;
+use std::hash::{BuildHasher, Hash};
+use std::iter::FromIterator;
+use std::ops::Index;
+
+use indexmap::IndexMap;
+use stable_deref_trait::StableDeref;
+
+/// Append-only version of `indexmap::IndexMap` where
+/// insertion does not require mutable access
+pub struct FrozenIndexMap<K, V, S = RandomState> {
+ map: UnsafeCell<IndexMap<K, V, S>>,
+ /// Eq/Hash implementations can have side-effects, and using Rc it is possible
+ /// for FrozenIndexMap::insert to be called on a key that itself contains the same
+ /// `FrozenIndexMap`, whose `eq` implementation also calls FrozenIndexMap::insert
+ ///
+ /// We use this `in_use` flag to guard against any reentrancy.
+ in_use: Cell<bool>,
+}
+
+// safety: UnsafeCell implies !Sync
+
+impl<K: Eq + Hash, V> FrozenIndexMap<K, V> {
+ pub fn new() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+impl<K: Eq + Hash, V: StableDeref, S: BuildHasher> FrozenIndexMap<K, V, S> {
+ // these should never return &K or &V
+ // these should never delete any entries
+ pub fn insert(&self, k: K, v: V) -> &V::Target {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ &*(*map).entry(k).or_insert(v)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ // these should never return &K or &V
+ // these should never delete any entries
+ pub fn insert_full(&self, k: K, v: V) -> (usize, &V::Target) {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ let entry = (*map).entry(k);
+ let index = entry.index();
+ (index, &**entry.or_insert(v))
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenIndexMap;
+ ///
+ /// let map = FrozenIndexMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(|x| &**x)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn get_index(&self, index: usize) -> Option<(&K::Target, &V::Target)>
+ where
+ K: StableDeref,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get_index(index).map(|(k, v)| (&**k, &**v))
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Applies a function to the owner of the value corresponding to the key (if any).
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenIndexMap;
+ ///
+ /// let map = FrozenIndexMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a")));
+ /// assert_eq!(map.map_get(&2, Clone::clone), None);
+ /// ```
+ pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ F: FnOnce(&V) -> T,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(f)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn into_map(self) -> IndexMap<K, V, S> {
+ self.map.into_inner()
+ }
+
+ /// Get mutable access to the underlying [`IndexMap`].
+ ///
+ /// This is safe, as it requires a `&mut self`, ensuring nothing is using
+ /// the 'frozen' contents.
+ pub fn as_mut(&mut self) -> &mut IndexMap<K, V, S> {
+ unsafe { &mut *self.map.get() }
+ }
+
+ /// Returns true if the map contains no elements.
+ pub fn is_empty(&self) -> bool {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).is_empty()
+ };
+ self.in_use.set(false);
+ ret
+ }
+}
+
+impl<K, V, S> From<IndexMap<K, V, S>> for FrozenIndexMap<K, V, S> {
+ fn from(map: IndexMap<K, V, S>) -> Self {
+ Self {
+ map: UnsafeCell::new(map),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+impl<Q: ?Sized, K: Eq + Hash, V: StableDeref, S: BuildHasher> Index<&Q> for FrozenIndexMap<K, V, S>
+ where
+ Q: Eq + Hash,
+ K: Eq + Hash + Borrow<Q>,
+ V: StableDeref,
+ S: BuildHasher
+{
+ type Output = V::Target;
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenIndexMap;
+ ///
+ /// let map = FrozenIndexMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map[&1], "a");
+ /// ```
+ fn index(&self, idx: &Q) -> &V::Target {
+ self.get(&idx)
+ .expect("attempted to index FrozenIndexMap with unknown key")
+ }
+}
+
+impl<K: Eq + Hash, V, S: BuildHasher + Default> FromIterator<(K, V)> for FrozenIndexMap<K, V, S> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = (K, V)>,
+ {
+ let map: IndexMap<_, _, _> = iter.into_iter().collect();
+ map.into()
+ }
+}
+
+impl<K: Eq + Hash, V, S: Default> Default for FrozenIndexMap<K, V, S> {
+ fn default() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+}
diff --git a/vendor/elsa/src/index_set.rs b/vendor/elsa/src/index_set.rs
new file mode 100644
index 000000000..1222cde05
--- /dev/null
+++ b/vendor/elsa/src/index_set.rs
@@ -0,0 +1,180 @@
+use std::borrow::Borrow;
+use std::cell::{Cell, UnsafeCell};
+use std::collections::hash_map::RandomState;
+use std::hash::{BuildHasher, Hash};
+use std::iter::FromIterator;
+use std::ops::Index;
+
+use indexmap::IndexSet;
+use stable_deref_trait::StableDeref;
+
+/// Append-only version of `indexmap::IndexSet` where
+/// insertion does not require mutable access
+pub struct FrozenIndexSet<T, S = RandomState> {
+ set: UnsafeCell<IndexSet<T, S>>,
+ /// Eq/Hash implementations can have side-effects, and using Rc it is possible
+ /// for FrozenIndexSet::insert to be called on a key that itself contains the same
+ /// `FrozenIndexSet`, whose `eq` implementation also calls FrozenIndexSet::insert
+ ///
+ /// We use this `in_use` flag to guard against any reentrancy.
+ in_use: Cell<bool>,
+}
+
+// safety: UnsafeCell implies !Sync
+
+impl<T: Eq + Hash> FrozenIndexSet<T> {
+ pub fn new() -> Self {
+ Self::from(IndexSet::new())
+ }
+}
+
+impl<T: Eq + Hash + StableDeref, S: BuildHasher> FrozenIndexSet<T, S> {
+ // these should never return &T
+ // these should never delete any entries
+ pub fn insert(&self, value: T) -> &T::Target {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ let (index, _was_vacant) = (*set).insert_full(value);
+ &*(*set)[index]
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ // these should never return &T
+ // these should never delete any entries
+ pub fn insert_full(&self, value: T) -> (usize, &T::Target) {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ let (index, _was_vacant) = (*set).insert_full(value);
+ (index, &*(*set)[index])
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ // TODO implement in case the standard Entry API gets improved
+ // // TODO avoid double lookup
+ // pub fn entry<Q: ?Sized>(&self, value: &Q) -> Entry<T, Q>
+ // where Q: Hash + Equivalent<T> + ToOwned<Owned = T>
+ // {
+ // assert!(!self.in_use.get());
+ // self.in_use.set(true);
+ // unsafe {
+ // let set = self.set.get();
+ // match (*set).get_full(value) {
+ // Some((index, reference)) => {
+ // Entry::Occupied(OccupiedEntry {
+ // index,
+ // reference,
+ // set: &*set,
+ // })
+ // }
+ // None => {
+ // Entry::Vacant(VacantEntry {
+ // value: Cow::Borrowed(value),
+ // set: &*set,
+ // })
+ // }
+ // }
+ // }
+ // }
+
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&T::Target>
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ (*set).get(k).map(|x| &**x)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn get_full<Q: ?Sized>(&self, k: &Q) -> Option<(usize, &T::Target)>
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ (*set).get_full(k).map(|(i, x)| (i, &**x))
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn get_index(&self, index: usize) -> Option<&T::Target> {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ (*set).get_index(index).map(|r| &**r)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn into_set(self) -> IndexSet<T, S> {
+ self.set.into_inner()
+ }
+
+ /// Get mutable access to the underlying [`IndexSet`].
+ ///
+ /// This is safe, as it requires a `&mut self`, ensuring nothing is using
+ /// the 'frozen' contents.
+ pub fn as_mut(&mut self) -> &mut IndexSet<T, S> {
+ unsafe { &mut *self.set.get() }
+ }
+
+ // TODO add more
+}
+
+impl<T, S> From<IndexSet<T, S>> for FrozenIndexSet<T, S> {
+ fn from(set: IndexSet<T, S>) -> Self {
+ Self {
+ set: UnsafeCell::new(set),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+impl<T: Eq + Hash + StableDeref, S> Index<usize> for FrozenIndexSet<T, S> {
+ type Output = T::Target;
+ fn index(&self, idx: usize) -> &T::Target {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let set = self.set.get();
+ &*(*set)[idx]
+ };
+ self.in_use.set(false);
+ ret
+ }
+}
+
+impl<T: Eq + Hash, S: Default + BuildHasher> FromIterator<T> for FrozenIndexSet<T, S> {
+ fn from_iter<U>(iter: U) -> Self
+ where
+ U: IntoIterator<Item = T>,
+ {
+ let set: IndexSet<_, _> = iter.into_iter().collect();
+ set.into()
+ }
+}
+
+impl<T: Eq + Hash, S: Default> Default for FrozenIndexSet<T, S> {
+ fn default() -> Self {
+ Self::from(IndexSet::default())
+ }
+}
diff --git a/vendor/elsa/src/lib.rs b/vendor/elsa/src/lib.rs
new file mode 100644
index 000000000..848dceb34
--- /dev/null
+++ b/vendor/elsa/src/lib.rs
@@ -0,0 +1,29 @@
+//! _🎵 Immutability never bothered me anyway 🎶_
+//!
+//! This crate provides various "Frozen" collections.
+//!
+//! These are append-only collections where references to entries can be held
+//! on to even across insertions. This is safe because these collections only
+//! support storing data that's present behind some indirection -- i.e. `String`,
+//! `Vec<T>`, `Box<T>`, etc, and they only yield references to the data behind the
+//! allocation (`&str`, `&[T]`, and `&T` respectively)
+//!
+//! The typical use case is having a global cache of strings or other data which the rest of the program borrows from.
+
+pub mod map;
+pub mod vec;
+
+#[cfg(feature = "indexmap")]
+pub mod index_map;
+#[cfg(feature = "indexmap")]
+pub mod index_set;
+
+pub mod sync;
+
+pub use map::{FrozenBTreeMap, FrozenMap};
+pub use vec::FrozenVec;
+
+#[cfg(feature = "indexmap")]
+pub use index_map::FrozenIndexMap;
+#[cfg(feature = "indexmap")]
+pub use index_set::FrozenIndexSet;
diff --git a/vendor/elsa/src/map.rs b/vendor/elsa/src/map.rs
new file mode 100644
index 000000000..2faa19ce2
--- /dev/null
+++ b/vendor/elsa/src/map.rs
@@ -0,0 +1,451 @@
+use std::borrow::Borrow;
+use std::cell::{Cell, UnsafeCell};
+use std::collections::hash_map::RandomState;
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::hash::{BuildHasher, Hash};
+use std::iter::FromIterator;
+use std::ops::Index;
+
+use stable_deref_trait::StableDeref;
+
+/// Append-only version of `std::collections::HashMap` where
+/// insertion does not require mutable access
+pub struct FrozenMap<K, V, S = RandomState> {
+ map: UnsafeCell<HashMap<K, V, S>>,
+ /// Eq/Hash implementations can have side-effects, and using Rc it is possible
+ /// for FrozenMap::insert to be called on a key that itself contains the same
+ /// `FrozenMap`, whose `eq` implementation also calls FrozenMap::insert
+ ///
+ /// We use this `in_use` flag to guard against any reentrancy.
+ in_use: Cell<bool>,
+}
+
+// safety: UnsafeCell implies !Sync
+
+impl<K: Eq + Hash, V> FrozenMap<K, V> {
+ pub fn new() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.len(), 0);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.len(), 1);
+ /// ```
+ pub fn len(&self) -> usize {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let len = unsafe {
+ let map = self.map.get();
+ (*map).len()
+ };
+ self.in_use.set(false);
+ len
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.is_empty(), true);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.is_empty(), false);
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<K: Eq + Hash, V: StableDeref, S: BuildHasher> FrozenMap<K, V, S> {
+ // these should never return &K or &V
+ // these should never delete any entries
+ pub fn insert(&self, k: K, v: V) -> &V::Target {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ &*(*map).entry(k).or_insert(v)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(|x| &**x)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Applies a function to the owner of the value corresponding to the key (if any).
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a")));
+ /// assert_eq!(map.map_get(&2, Clone::clone), None);
+ /// ```
+ pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ F: FnOnce(&V) -> T,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(f)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn into_map(self) -> HashMap<K, V, S> {
+ self.map.into_inner()
+ }
+
+ // TODO add more
+}
+
+impl<K: Eq + Hash + StableDeref, V: StableDeref, S: BuildHasher> FrozenMap<K, V, S> {
+ /// Returns a reference to the key and value matching a borrowed
+ /// key.
+ ///
+ /// The key argument may be any borrowed form of the map's key type,
+ /// but [`Hash`] and [`Eq`] on the borrowed form *must* match those
+ /// for the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(Box::new("1"), Box::new("a"));
+ /// assert_eq!(map.get_key_value(&"1"), Some((&"1", &"a")));
+ /// assert_eq!(map.get_key_value(&"2"), None);
+ /// ```
+ pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K::Target, &V::Target)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get_key_value(k).map(|(k, v)| (&**k, &**v))
+ };
+ self.in_use.set(false);
+ ret
+ }
+}
+
+impl<K, V, S> std::convert::AsMut<HashMap<K, V, S>> for FrozenMap<K, V, S> {
+ /// Get mutable access to the underlying [`HashMap`].
+ ///
+ /// This is safe, as it requires a `&mut self`, ensuring nothing is using
+ /// the 'frozen' contents.
+ fn as_mut(&mut self) -> &mut HashMap<K, V, S> {
+ unsafe { &mut *self.map.get() }
+ }
+}
+
+impl<K, V, S> From<HashMap<K, V, S>> for FrozenMap<K, V, S> {
+ fn from(map: HashMap<K, V, S>) -> Self {
+ Self {
+ map: UnsafeCell::new(map),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+impl<Q: ?Sized, K, V, S> Index<&Q> for FrozenMap<K, V, S>
+where
+ Q: Eq + Hash,
+ K: Eq + Hash + Borrow<Q>,
+ V: StableDeref,
+ S: BuildHasher,
+{
+ type Output = V::Target;
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map[&1], "a");
+ /// ```
+ fn index(&self, idx: &Q) -> &V::Target {
+ self.get(idx)
+ .expect("attempted to index FrozenMap with unknown key")
+ }
+}
+
+impl<K: Eq + Hash, V, S: BuildHasher + Default> FromIterator<(K, V)> for FrozenMap<K, V, S> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = (K, V)>,
+ {
+ let map: HashMap<_, _, _> = iter.into_iter().collect();
+ map.into()
+ }
+}
+
+impl<K: Eq + Hash, V, S: Default> Default for FrozenMap<K, V, S> {
+ fn default() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+/// Append-only version of `std::collections::BTreeMap` where
+/// insertion does not require mutable access
+pub struct FrozenBTreeMap<K, V> {
+ map: UnsafeCell<BTreeMap<K, V>>,
+ /// Eq/Hash implementations can have side-effects, and using Rc it is possible
+ /// for FrozenBTreeMap::insert to be called on a key that itself contains the same
+ /// `FrozenBTreeMap`, whose `eq` implementation also calls FrozenBTreeMap::insert
+ ///
+ /// We use this `in_use` flag to guard against any reentrancy.
+ in_use: Cell<bool>,
+}
+
+// safety: UnsafeCell implies !Sync
+
+impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> {
+ pub fn new() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// assert_eq!(map.len(), 0);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.len(), 1);
+ /// ```
+ pub fn len(&self) -> usize {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let len = unsafe {
+ let map = self.map.get();
+ (*map).len()
+ };
+ self.in_use.set(false);
+ len
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// assert_eq!(map.is_empty(), true);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.is_empty(), false);
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> {
+ // these should never return &K or &V
+ // these should never delete any entries
+ pub fn insert(&self, k: K, v: V) -> &V::Target {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ &*(*map).entry(k).or_insert(v)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(|x| &**x)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ /// Applies a function to the owner of the value corresponding to the key (if any).
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a")));
+ /// assert_eq!(map.map_get(&2, Clone::clone), None);
+ /// ```
+ pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ F: FnOnce(&V) -> T,
+ {
+ assert!(!self.in_use.get());
+ self.in_use.set(true);
+ let ret = unsafe {
+ let map = self.map.get();
+ (*map).get(k).map(f)
+ };
+ self.in_use.set(false);
+ ret
+ }
+
+ pub fn into_map(self) -> BTreeMap<K, V> {
+ self.map.into_inner()
+ }
+
+ // TODO add more
+}
+
+impl<K, V> std::convert::AsMut<BTreeMap<K, V>> for FrozenBTreeMap<K, V> {
+ /// Get mutable access to the underlying [`HashMap`].
+ ///
+ /// This is safe, as it requires a `&mut self`, ensuring nothing is using
+ /// the 'frozen' contents.
+ fn as_mut(&mut self) -> &mut BTreeMap<K, V> {
+ unsafe { &mut *self.map.get() }
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> {
+ fn from(map: BTreeMap<K, V>) -> Self {
+ Self {
+ map: UnsafeCell::new(map),
+ in_use: Cell::new(false),
+ }
+ }
+}
+
+impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V>
+where
+ Q: Ord,
+ K: Clone + Ord + Borrow<Q>,
+ V: StableDeref,
+{
+ type Output = V::Target;
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map[&1], "a");
+ /// ```
+ fn index(&self, idx: &Q) -> &V::Target {
+ self.get(idx)
+ .expect("attempted to index FrozenBTreeMap with unknown key")
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = (K, V)>,
+ {
+ let map: BTreeMap<_, _> = iter.into_iter().collect();
+ map.into()
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> {
+ fn default() -> Self {
+ Self {
+ map: UnsafeCell::new(Default::default()),
+ in_use: Cell::new(false),
+ }
+ }
+}
diff --git a/vendor/elsa/src/sync.rs b/vendor/elsa/src/sync.rs
new file mode 100644
index 000000000..afa4bb7c7
--- /dev/null
+++ b/vendor/elsa/src/sync.rs
@@ -0,0 +1,624 @@
+//! **This module is experimental**
+//!
+//! This module provides threadsafe versions of FrozenMap and FrozenVec,
+//! ideal for use as a cache.
+//!
+//! These lock internally, however locks only last as long as the method calls
+//!
+
+use stable_deref_trait::StableDeref;
+use std::alloc::Layout;
+use std::borrow::Borrow;
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::{FromIterator, IntoIterator};
+use std::mem::MaybeUninit;
+use std::ops::Index;
+
+use std::sync::atomic::AtomicPtr;
+use std::sync::atomic::AtomicUsize;
+use std::sync::atomic::Ordering;
+use std::sync::RwLock;
+
+/// Append-only threadsafe version of `std::collections::HashMap` where
+/// insertion does not require mutable access
+pub struct FrozenMap<K, V> {
+ map: RwLock<HashMap<K, V>>,
+}
+
+impl<K, V> Default for FrozenMap<K, V> {
+ fn default() -> Self {
+ Self {
+ map: Default::default(),
+ }
+ }
+}
+
+impl<K: Eq + Hash, V: StableDeref> FrozenMap<K, V> {
+ // these should never return &K or &V
+ // these should never delete any entries
+
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// If the key exists in the map, returns a reference
+ /// to the corresponding value, otherwise inserts a
+ /// new entry in the map for that key and returns a
+ /// reference to the given value.
+ ///
+ /// Existing values are never overwritten.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.insert(1, Box::new("a")), &"a");
+ /// assert_eq!(map.insert(1, Box::new("b")), &"a");
+ /// ```
+ pub fn insert(&self, k: K, v: V) -> &V::Target {
+ let mut map = self.map.write().unwrap();
+ let ret = unsafe {
+ let inserted = &**map.entry(k).or_insert(v);
+ &*(inserted as *const _)
+ };
+ ret
+ }
+
+ /// If the key exists in the map, returns a reference to the corresponding
+ /// value, otherwise inserts a new entry in the map for that key and the
+ /// value returned by the creation function, and returns a reference to the
+ /// generated value.
+ ///
+ /// Existing values are never overwritten.
+ ///
+ /// The key may be any borrowed form of the map's key type, but [`Hash`] and
+ /// [`Eq`] on the borrowed form *must* match those for the key type.
+ ///
+ /// **Note** that the write lock is held for the duration of this function’s
+ /// execution, even while the value creation function is executing (if
+ /// needed). This will block any concurrent `get` or `insert` calls.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.insert_with(1, || Box::new("a")), &"a");
+ /// assert_eq!(map.insert_with(1, || unreachable!()), &"a");
+ /// ```
+ pub fn insert_with(&self, k: K, f: impl FnOnce() -> V) -> &V::Target {
+ let mut map = self.map.write().unwrap();
+ let ret = unsafe {
+ let inserted = &**map.entry(k).or_insert_with(f);
+ &*(inserted as *const _)
+ };
+ ret
+ }
+
+ /// If the key exists in the map, returns a reference to the corresponding
+ /// value, otherwise inserts a new entry in the map for that key and the
+ /// value returned by the creation function, and returns a reference to the
+ /// generated value.
+ ///
+ /// Existing values are never overwritten.
+ ///
+ /// The key may be any borrowed form of the map's key type, but [`Hash`] and
+ /// [`Eq`] on the borrowed form *must* match those for the key type.
+ ///
+ /// **Note** that the write lock is held for the duration of this function’s
+ /// execution, even while the value creation function is executing (if
+ /// needed). This will block any concurrent `get` or `insert` calls.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.insert_with_key(1, |_| Box::new("a")), &"a");
+ /// assert_eq!(map.insert_with_key(1, |_| unreachable!()), &"a");
+ /// ```
+ pub fn insert_with_key(&self, k: K, f: impl FnOnce(&K) -> V) -> &V::Target {
+ let mut map = self.map.write().unwrap();
+ let ret = unsafe {
+ let inserted = &**map.entry(k).or_insert_with_key(f);
+ &*(inserted as *const _)
+ };
+ ret
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ let map = self.map.read().unwrap();
+ let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) };
+ ret
+ }
+
+ /// Applies a function to the owner of the value corresponding to the key (if any).
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a")));
+ /// assert_eq!(map.map_get(&2, Clone::clone), None);
+ /// ```
+ pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ F: FnOnce(&V) -> T,
+ {
+ let map = self.map.read().unwrap();
+ let ret = map.get(k).map(f);
+ ret
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.len(), 0);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.len(), 1);
+ /// ```
+ pub fn len(&self) -> usize {
+ let map = self.map.read().unwrap();
+ map.len()
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenMap;
+ ///
+ /// let map = FrozenMap::new();
+ /// assert_eq!(map.is_empty(), true);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.is_empty(), false);
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ let map = self.map.read().unwrap();
+ map.is_empty()
+ }
+
+ // TODO add more
+}
+
+/// Append-only threadsafe version of `std::vec::Vec` where
+/// insertion does not require mutable access
+pub struct FrozenVec<T> {
+ vec: RwLock<Vec<T>>,
+}
+
+impl<T> Default for FrozenVec<T> {
+ fn default() -> Self {
+ Self {
+ vec: Default::default(),
+ }
+ }
+}
+
+impl<T: StableDeref> FrozenVec<T> {
+ pub fn new() -> Self {
+ Default::default()
+ }
+
+ // these should never return &T
+ // these should never delete any entries
+
+ pub fn push(&self, val: T) {
+ let mut vec = self.vec.write().unwrap();
+ vec.push(val);
+ }
+
+ /// Push, immediately getting a reference to the element
+ pub fn push_get(&self, val: T) -> &T::Target {
+ let mut vec = self.vec.write().unwrap();
+ vec.push(val);
+ unsafe { &*(&**vec.get_unchecked(vec.len() - 1) as *const T::Target) }
+ }
+
+ /// Push, immediately getting a an index of the element
+ ///
+ /// Index can then be used with the `get` method
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenVec;
+ ///
+ /// let map = FrozenVec::new();
+ /// let idx = map.push_get_index(String::from("a"));
+ /// assert_eq!(map.get(idx), Some("a"));
+ /// assert_eq!(idx, 0);
+ /// assert_eq!(map.push_get_index(String::from("b")), 1);
+ /// ```
+ pub fn push_get_index(&self, val: T) -> usize {
+ let mut vec = self.vec.write().unwrap();
+ let index = vec.len();
+ vec.push(val);
+ return index;
+ }
+
+ pub fn get(&self, index: usize) -> Option<&T::Target> {
+ let vec = self.vec.read().unwrap();
+ unsafe { vec.get(index).map(|x| &*(&**x as *const T::Target)) }
+ }
+
+ // TODO add more
+}
+
+/// Append-only threadsafe version of `std::vec::Vec` where
+/// insertion does not require mutable access.
+/// Does not have locks, only allows `Copy` types and will
+/// spinlock on contention. The spinlocks are really rare as
+/// they only happen on reallocation due to a push going over
+/// the capacity.
+pub struct LockFreeFrozenVec<T: Copy> {
+ data: AtomicPtr<T>,
+ len: AtomicUsize,
+ cap: AtomicUsize,
+}
+
+impl<T: Copy> Drop for LockFreeFrozenVec<T> {
+ fn drop(&mut self) {
+ let cap = *self.cap.get_mut();
+ let layout = self.layout(cap);
+ unsafe {
+ std::alloc::dealloc((*self.data.get_mut()).cast(), layout);
+ }
+ }
+}
+
+impl<T: Copy> Default for LockFreeFrozenVec<T> {
+ fn default() -> Self {
+ Self {
+ // FIXME: use `std::ptr::invalid_mut()` once that is stable.
+ data: AtomicPtr::new(std::mem::align_of::<T>() as *mut T),
+ len: AtomicUsize::new(0),
+ cap: AtomicUsize::new(0),
+ }
+ }
+}
+
+impl<T: Copy> LockFreeFrozenVec<T> {
+ pub fn new() -> Self {
+ Default::default()
+ }
+
+ pub fn with_capacity(cap: usize) -> Self {
+ Self {
+ data: AtomicPtr::new(
+ Box::into_raw(vec![MaybeUninit::<T>::uninit(); cap].into_boxed_slice()).cast(),
+ ),
+ len: AtomicUsize::new(0),
+ cap: AtomicUsize::new(cap),
+ }
+ }
+
+ fn lock<U>(&self, f: impl FnOnce(&mut *mut T) -> U) -> U {
+ let mut ptr;
+ loop {
+ ptr = self.data.swap(std::ptr::null_mut(), Ordering::Acquire);
+ if !ptr.is_null() {
+ // Wheeeee spinlock
+ break;
+ }
+ }
+
+ let ret = f(&mut ptr);
+ self.data.store(ptr, Ordering::Release);
+ ret
+ }
+
+ fn layout(&self, cap: usize) -> Layout {
+ let num_bytes = std::mem::size_of::<T>() * cap;
+ let align = std::mem::align_of::<T>();
+ Layout::from_size_align(num_bytes, align).unwrap()
+ }
+
+ // these should never return &T
+ // these should never delete any entries
+
+ const NOT_ZST: () = if std::mem::size_of::<T>() == 0 {
+ panic!("`LockFreeFrozenVec` cannot be used with ZSTs");
+ };
+
+ pub fn push(&self, val: T) -> usize {
+ // This statement actually does something: it evaluates a constant.
+ #[allow(path_statements)]
+ {
+ Self::NOT_ZST
+ }
+ self.lock(|ptr| {
+ // These values must be consistent with the pointer we got.
+ let len = self.len.load(Ordering::Acquire);
+ let cap = self.cap.load(Ordering::Acquire);
+ if len >= cap {
+ if cap == 0 {
+ // No memory allocated yet
+ let layout = self.layout(128);
+ // SAFETY: `LockFreeFrozenVec` statically rejects zsts
+ unsafe {
+ *ptr = std::alloc::alloc(layout).cast::<T>();
+ }
+ // This is written before the end of the `lock` closure, so no one will observe this
+ // until the data pointer has been updated anyway.
+ self.cap.store(128, Ordering::Release);
+ } else {
+ // Out of memory, realloc with double the capacity
+ let layout = self.layout(cap);
+ let new_size = layout.size() * 2;
+ // SAFETY: `LockFreeFrozenVec` statically rejects zsts and the input `ptr` has always been
+ // allocated at the size stated in `cap`.
+ unsafe {
+ *ptr = std::alloc::realloc((*ptr).cast(), layout, new_size).cast::<T>();
+ }
+ // This is written before the end of the `lock` closure, so no one will observe this
+ // until the data pointer has been updated anyway.
+ self.cap.store(cap * 2, Ordering::Release);
+ }
+ assert!(!ptr.is_null());
+ }
+ unsafe {
+ ptr.add(len).write(val);
+ }
+ // This is written before updating the data pointer. Other `push` calls cannot observe this,
+ // because they are blocked on aquiring the data pointer before they ever read the `len`.
+ // `get` may read the length before actually aquiring the data pointer lock, but that is fine,
+ // as once it is able to aquire the lock, there will be actually the right number of elements
+ // stored.
+ self.len.store(len + 1, Ordering::Release);
+ len
+ })
+ }
+
+ pub fn get(&self, index: usize) -> Option<T> {
+ // The length can only grow, so just doing the length check
+ // independently of the `lock` and read is fine. Worst case we
+ // read an old length value and end up returning `None` even if
+ // another thread already inserted the value.
+ let len = self.len.load(Ordering::Relaxed);
+ if index >= len {
+ return None;
+ }
+ self.lock(|ptr| Some(unsafe { ptr.add(index).read() }))
+ }
+}
+
+#[test]
+fn test_non_lockfree() {
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+ struct Moo(i32);
+
+ for vec in [
+ LockFreeFrozenVec::new(),
+ LockFreeFrozenVec::with_capacity(1),
+ LockFreeFrozenVec::with_capacity(2),
+ LockFreeFrozenVec::with_capacity(1000),
+ ] {
+ assert_eq!(vec.get(1), None);
+
+ vec.push(Moo(1));
+ let i = vec.push(Moo(2));
+ vec.push(Moo(3));
+
+ assert_eq!(vec.get(i), Some(Moo(2)));
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ for i in 0..1000 {
+ vec.push(Moo(i));
+ }
+ });
+ s.spawn(|| {
+ for i in 0..1000 {
+ vec.push(Moo(i));
+ }
+ });
+ for i in 0..2000 {
+ while vec.get(i).is_none() {}
+ }
+ });
+ }
+}
+
+/// Append-only threadsafe version of `std::collections::BTreeMap` where
+/// insertion does not require mutable access
+#[derive(Debug)]
+pub struct FrozenBTreeMap<K, V>(RwLock<BTreeMap<K, V>>);
+
+impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> {
+ pub fn new() -> Self {
+ Self(RwLock::new(BTreeMap::new()))
+ }
+
+ // these should never return &K or &V
+ // these should never delete any entries
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Ord`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ let map = self.0.read().unwrap();
+ let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) };
+ ret
+ }
+
+ /// Insert a new value into the map. Does nothing if the key is already occupied.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// ```
+ pub fn insert(&self, k: K, v: V) -> &V::Target {
+ let mut map = self.0.write().unwrap();
+ let ret = unsafe {
+ let inserted = &**map.entry(k).or_insert(v);
+ &*(inserted as *const _)
+ };
+ ret
+ }
+
+ /// Applies a function to the owner of the value corresponding to the key (if any).
+ ///
+ /// The key may be any borrowed form of the map's key type, but
+ /// [`Ord`] on the borrowed form *must* match those for
+ /// the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a")));
+ /// assert_eq!(map.map_get(&2, Clone::clone), None);
+ /// ```
+ pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ F: FnOnce(&V) -> T,
+ {
+ let map = self.0.read().unwrap();
+ let ret = map.get(k).map(f);
+ ret
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// assert_eq!(map.len(), 0);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.len(), 1);
+ /// ```
+ pub fn len(&self) -> usize {
+ let map = self.0.read().unwrap();
+ map.len()
+ }
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// assert_eq!(map.is_empty(), true);
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map.is_empty(), false);
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ let map = self.0.read().unwrap();
+ map.is_empty()
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> {
+ fn from(map: BTreeMap<K, V>) -> Self {
+ Self(RwLock::new(map))
+ }
+}
+
+impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V>
+where
+ Q: Ord,
+ K: Clone + Ord + Borrow<Q>,
+ V: StableDeref,
+{
+ type Output = V::Target;
+
+ /// # Examples
+ ///
+ /// ```
+ /// use elsa::sync::FrozenBTreeMap;
+ ///
+ /// let map = FrozenBTreeMap::new();
+ /// map.insert(1, Box::new("a"));
+ /// assert_eq!(map[&1], "a");
+ /// ```
+ fn index(&self, idx: &Q) -> &V::Target {
+ self.get(idx)
+ .expect("attempted to index FrozenBTreeMap with unknown key")
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = (K, V)>,
+ {
+ let map: BTreeMap<_, _> = iter.into_iter().collect();
+ map.into()
+ }
+}
+
+impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/vendor/elsa/src/vec.rs b/vendor/elsa/src/vec.rs
new file mode 100644
index 000000000..33b6e6a50
--- /dev/null
+++ b/vendor/elsa/src/vec.rs
@@ -0,0 +1,347 @@
+use std::cell::UnsafeCell;
+use std::cmp::Ordering;
+use std::iter::FromIterator;
+use std::ops::Index;
+
+use stable_deref_trait::StableDeref;
+
+/// Append-only version of `std::vec::Vec` where
+/// insertion does not require mutable access
+pub struct FrozenVec<T> {
+ vec: UnsafeCell<Vec<T>>,
+ // XXXManishearth do we need a reentrancy guard here as well?
+ // StableDeref may not guarantee that there are no side effects
+}
+
+// safety: UnsafeCell implies !Sync
+
+impl<T> FrozenVec<T> {
+ /// Constructs a new, empty vector.
+ pub fn new() -> Self {
+ Self {
+ vec: UnsafeCell::new(Default::default()),
+ }
+ }
+}
+
+impl<T> FrozenVec<T> {
+ // these should never return &T
+ // these should never delete any entries
+
+ /// Appends an element to the back of the vector.
+ pub fn push(&self, val: T) {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).push(val)
+ }
+ }
+}
+
+impl<T: StableDeref> FrozenVec<T> {
+ /// Push, immediately getting a reference to the element
+ pub fn push_get(&self, val: T) -> &T::Target {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).push(val);
+ &*(&**(*vec).get_unchecked((*vec).len() - 1) as *const T::Target)
+ }
+ }
+
+ /// Returns a reference to an element.
+ pub fn get(&self, index: usize) -> Option<&T::Target> {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).get(index).map(|x| &**x)
+ }
+ }
+
+ /// Returns a reference to an element, without doing bounds checking.
+ ///
+ /// ## Safety
+ ///
+ /// `index` must be in bounds, i.e. it must be less than `self.len()`
+ pub unsafe fn get_unchecked(&self, index: usize) -> &T::Target {
+ let vec = self.vec.get();
+ &**(*vec).get_unchecked(index)
+ }
+}
+
+impl<T: Copy> FrozenVec<T> {
+ /// Returns a copy of an element.
+ pub fn get_copy(&self, index: usize) -> Option<T> {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).get(index).copied()
+ }
+ }
+}
+
+impl<T> FrozenVec<T> {
+ /// Returns the number of elements in the vector.
+ pub fn len(&self) -> usize {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).len()
+ }
+ }
+
+ /// Returns `true` if the vector contains no elements.
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<T: StableDeref> FrozenVec<T> {
+ /// Returns the first element of the vector, or `None` if empty.
+ pub fn first(&self) -> Option<&T::Target> {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).first().map(|x| &**x)
+ }
+ }
+
+ /// Returns the last element of the vector, or `None` if empty.
+ pub fn last(&self) -> Option<&T::Target> {
+ unsafe {
+ let vec = self.vec.get();
+ (*vec).last().map(|x| &**x)
+ }
+ }
+ /// Returns an iterator over the vector.
+ pub fn iter(&self) -> Iter<T> {
+ self.into_iter()
+ }
+}
+
+impl<T: StableDeref> FrozenVec<T> {
+ /// Converts the frozen vector into a plain vector.
+ pub fn into_vec(self) -> Vec<T> {
+ self.vec.into_inner()
+ }
+}
+
+impl<T: StableDeref> FrozenVec<T> {
+ // binary search functions: they need to be reimplemented here to be safe (instead of calling
+ // their equivalents directly on the underlying Vec), as they run user callbacks that could
+ // reentrantly call other functions on this vector
+
+ /// Binary searches this sorted vector for a given element, analogous to [slice::binary_search].
+ pub fn binary_search(&self, x: &T::Target) -> Result<usize, usize>
+ where
+ T::Target: Ord,
+ {
+ self.binary_search_by(|p| p.cmp(x))
+ }
+
+ /// Binary searches this sorted vector with a comparator function, analogous to
+ /// [slice::binary_search_by].
+ pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
+ where
+ F: FnMut(&'a T::Target) -> Ordering,
+ {
+ let mut size = self.len();
+ let mut left = 0;
+ let mut right = size;
+ while left < right {
+ let mid = left + size / 2;
+
+ // safety: like the core algorithm, mid is always within original vector len; in
+ // pathlogical cases, user could push to the vector in the meantime, but this can only
+ // increase the length, keeping this safe
+ let cmp = f(unsafe { self.get_unchecked(mid) });
+
+ if cmp == Ordering::Less {
+ left = mid + 1;
+ } else if cmp == Ordering::Greater {
+ right = mid;
+ } else {
+ return Ok(mid);
+ }
+
+ size = right - left;
+ }
+ Err(left)
+ }
+
+ /// Binary searches this sorted vector with a key extraction function, analogous to
+ /// [slice::binary_search_by_key].
+ pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
+ where
+ F: FnMut(&'a T::Target) -> B,
+ B: Ord,
+ {
+ self.binary_search_by(|k| f(k).cmp(b))
+ }
+
+ /// Returns the index of the partition point according to the given predicate
+ /// (the index of the first element of the second partition), analogous to
+ /// [slice::partition_point].
+ pub fn partition_point<P>(&self, mut pred: P) -> usize
+ where
+ P: FnMut(&T::Target) -> bool,
+ {
+ let mut left = 0;
+ let mut right = self.len();
+
+ while left != right {
+ let mid = left + (right - left) / 2;
+ // safety: like in binary_search_by
+ let value = unsafe { self.get_unchecked(mid) };
+ if pred(value) {
+ left = mid + 1;
+ } else {
+ right = mid;
+ }
+ }
+
+ left
+ }
+
+ // TODO add more
+}
+
+impl<T> std::convert::AsMut<Vec<T>> for FrozenVec<T> {
+ /// Get mutable access to the underlying vector.
+ ///
+ /// This is safe, as it requires a `&mut self`, ensuring nothing is using
+ /// the 'frozen' contents.
+ fn as_mut(&mut self) -> &mut Vec<T> {
+ unsafe { &mut *self.vec.get() }
+ }
+}
+
+impl<T> Default for FrozenVec<T> {
+ fn default() -> Self {
+ FrozenVec::new()
+ }
+}
+
+impl<T> From<Vec<T>> for FrozenVec<T> {
+ fn from(vec: Vec<T>) -> Self {
+ Self {
+ vec: UnsafeCell::new(vec),
+ }
+ }
+}
+
+impl<T: StableDeref> Index<usize> for FrozenVec<T> {
+ type Output = T::Target;
+ fn index(&self, idx: usize) -> &T::Target {
+ self.get(idx).unwrap_or_else(|| {
+ panic!(
+ "index out of bounds: the len is {} but the index is {}",
+ self.len(),
+ idx
+ )
+ })
+ }
+}
+
+impl<A> FromIterator<A> for FrozenVec<A> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = A>,
+ {
+ let vec: Vec<_> = iter.into_iter().collect();
+ vec.into()
+ }
+}
+
+/// Iterator over FrozenVec, obtained via `.iter()`
+///
+/// It is safe to push to the vector during iteration
+pub struct Iter<'a, T> {
+ vec: &'a FrozenVec<T>,
+ idx: usize,
+}
+
+impl<'a, T: StableDeref> Iterator for Iter<'a, T> {
+ type Item = &'a T::Target;
+ fn next(&mut self) -> Option<&'a T::Target> {
+ if let Some(ret) = self.vec.get(self.idx) {
+ self.idx += 1;
+ Some(ret)
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a, T: StableDeref> IntoIterator for &'a FrozenVec<T> {
+ type Item = &'a T::Target;
+ type IntoIter = Iter<'a, T>;
+ fn into_iter(self) -> Iter<'a, T> {
+ Iter { vec: self, idx: 0 }
+ }
+}
+
+#[test]
+fn test_iteration() {
+ let vec = vec!["a", "b", "c", "d"];
+ let frozen: FrozenVec<_> = vec.clone().into();
+
+ assert_eq!(vec, frozen.iter().collect::<Vec<_>>());
+ for (e1, e2) in vec.iter().zip(frozen.iter()) {
+ assert_eq!(*e1, e2);
+ }
+
+ assert_eq!(vec.len(), frozen.iter().count())
+}
+
+#[test]
+fn test_accessors() {
+ let vec: FrozenVec<String> = FrozenVec::new();
+
+ assert_eq!(vec.is_empty(), true);
+ assert_eq!(vec.len(), 0);
+ assert_eq!(vec.first(), None);
+ assert_eq!(vec.last(), None);
+ assert_eq!(vec.get(1), None);
+
+ vec.push("a".to_string());
+ vec.push("b".to_string());
+ vec.push("c".to_string());
+
+ assert_eq!(vec.is_empty(), false);
+ assert_eq!(vec.len(), 3);
+ assert_eq!(vec.first(), Some("a"));
+ assert_eq!(vec.last(), Some("c"));
+ assert_eq!(vec.get(1), Some("b"));
+}
+
+#[test]
+fn test_non_stable_deref() {
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+ struct Moo(i32);
+ let vec: FrozenVec<Moo> = FrozenVec::new();
+
+ assert_eq!(vec.is_empty(), true);
+ assert_eq!(vec.len(), 0);
+ assert_eq!(vec.get_copy(1), None);
+
+ vec.push(Moo(1));
+ vec.push(Moo(2));
+ vec.push(Moo(3));
+
+ assert_eq!(vec.is_empty(), false);
+ assert_eq!(vec.len(), 3);
+ assert_eq!(vec.get_copy(1), Some(Moo(2)));
+}
+
+#[test]
+fn test_binary_search() {
+ let vec: FrozenVec<_> = vec!["ab", "cde", "fghij"].into();
+
+ assert_eq!(vec.binary_search("cde"), Ok(1));
+ assert_eq!(vec.binary_search("cdf"), Err(2));
+ assert_eq!(vec.binary_search("a"), Err(0));
+ assert_eq!(vec.binary_search("g"), Err(3));
+
+ assert_eq!(vec.binary_search_by_key(&1, |x| x.len()), Err(0));
+ assert_eq!(vec.binary_search_by_key(&3, |x| x.len()), Ok(1));
+ assert_eq!(vec.binary_search_by_key(&4, |x| x.len()), Err(2));
+
+ assert_eq!(vec.partition_point(|x| x.len() < 4), 2);
+ assert_eq!(vec.partition_point(|_| false), 0);
+ assert_eq!(vec.partition_point(|_| true), 3);
+}
diff --git a/vendor/ena/.cargo-checksum.json b/vendor/ena/.cargo-checksum.json
index 246be2f1d..cce0ccf73 100644
--- a/vendor/ena/.cargo-checksum.json
+++ b/vendor/ena/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9cc278873c11103275c22c025d2170754767dc30ae109ddafd60b881b7d5a64b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"c623c5a776782edc92b00e4934bd50b7754a861dc6cad02ee4c2a87f946a542f","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"9b94637cb53e882625d3fb714acac37bb5fe7762d2a583ad4fd43f276f849214","src/snapshot_vec.rs":"b9fce507e3eece42c742405aea870562f99fdea3a4e30a122cea64ef5634f197","src/undo_log.rs":"5c94971d95ae1dd2de04eae2ea1ec5b99c627fbe92b2ea40a4fa3c37d340e7b8","src/unify/backing_vec.rs":"97cc2cec917ad87bb59b9f08ab3e081758ab5632d4a2e35621ba68c175ab10e5","src/unify/mod.rs":"bffe4e412b7624cf67efb64e75ecb3f537050080c8aefa69e354c2d774906976","src/unify/tests.rs":"6ffe2de338f1c8014292fdc7e764451c7af3de344fd405a46b818447304bdd23","tests/external_undo_log.rs":"215645f44d90b22b6ff07f72157b285e9cc277b856c31a0b82526b1534bef240"},"package":"d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"} \ No newline at end of file
+{"files":{"Cargo.toml":"820aad545a6df3f8e1c17d0f4d3b9ad97b90c4d307dd7df17e4c375d378efcb2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"1c15c9176a442a293a7fe046172e316f95b3acca6c01ee495162d4bc57e6841b","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"9b94637cb53e882625d3fb714acac37bb5fe7762d2a583ad4fd43f276f849214","src/snapshot_vec.rs":"b9fce507e3eece42c742405aea870562f99fdea3a4e30a122cea64ef5634f197","src/undo_log.rs":"5c94971d95ae1dd2de04eae2ea1ec5b99c627fbe92b2ea40a4fa3c37d340e7b8","src/unify/backing_vec.rs":"97cc2cec917ad87bb59b9f08ab3e081758ab5632d4a2e35621ba68c175ab10e5","src/unify/mod.rs":"986c502c11d0c921ef78916b0ccf17ffd18c5baf6e807be1295b237a2d3d87a1","src/unify/tests.rs":"6ffe2de338f1c8014292fdc7e764451c7af3de344fd405a46b818447304bdd23","tests/external_undo_log.rs":"215645f44d90b22b6ff07f72157b285e9cc277b856c31a0b82526b1534bef240"},"package":"b2e5d13ca2353ab7d0230988629def93914a8c4015f621f9b13ed2955614731d"} \ No newline at end of file
diff --git a/vendor/ena/Cargo.toml b/vendor/ena/Cargo.toml
index 620558398..d19260c80 100644
--- a/vendor/ena/Cargo.toml
+++ b/vendor/ena/Cargo.toml
@@ -3,23 +3,26 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
name = "ena"
-version = "0.14.0"
+version = "0.14.1"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
-homepage = "https://github.com/rust-lang-nursery/ena"
+homepage = "https://github.com/rust-lang/ena"
readme = "README.md"
-keywords = ["unification", "union-find"]
-license = "MIT/Apache-2.0"
-repository = "https://github.com/rust-lang-nursery/ena"
+keywords = [
+ "unification",
+ "union-find",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/ena"
+
[dependencies.dogged]
version = "0.2.0"
optional = true
diff --git a/vendor/ena/README.md b/vendor/ena/README.md
index afa6567c6..416598ba3 100644
--- a/vendor/ena/README.md
+++ b/vendor/ena/README.md
@@ -1,4 +1,4 @@
-[![Build Status](https://travis-ci.com/rust-lang-nursery/ena.svg?branch=master)](https://travis-ci.com/rust-lang-nursery/ena)
+[![Build Status](https://travis-ci.org/rust-lang/ena.svg?branch=master)](https://travis-ci.org/rust-lang/ena)
An implementation of union-find in Rust; extracted from (and used by)
rustc.
diff --git a/vendor/ena/src/unify/mod.rs b/vendor/ena/src/unify/mod.rs
index a26d699d8..5377177be 100644
--- a/vendor/ena/src/unify/mod.rs
+++ b/vendor/ena/src/unify/mod.rs
@@ -230,19 +230,8 @@ impl<K: UnifyKey> VarValue<K> {
self.rank = rank;
self.value = value;
}
-
- fn parent(&self, self_key: K) -> Option<K> {
- self.if_not_self(self.parent, self_key)
- }
-
- fn if_not_self(&self, key: K, self_key: K) -> Option<K> {
- if key == self_key {
- None
- } else {
- Some(key)
- }
- }
}
+
impl<K> UnificationTableStorage<K>
where
K: UnifyKey,
@@ -358,13 +347,12 @@ impl<S: UnificationStoreMut> UnificationTable<S> {
/// callsites. `uninlined_get_root_key` is the never-inlined version.
#[inline(always)]
fn inlined_get_root_key(&mut self, vid: S::Key) -> S::Key {
- let redirect = {
- match self.value(vid).parent(vid) {
- None => return vid,
- Some(redirect) => redirect,
- }
- };
+ let v = self.value(vid);
+ if v.parent == vid {
+ return vid;
+ }
+ let redirect = v.parent;
let root_key: S::Key = self.uninlined_get_root_key(redirect);
if root_key != redirect {
// Path compression
diff --git a/vendor/filetime/.cargo-checksum.json b/vendor/filetime/.cargo-checksum.json
index 8d7b7ce7d..7bc0e0ce1 100644
--- a/vendor/filetime/.cargo-checksum.json
+++ b/vendor/filetime/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"c82074c47610281bf7c3de20e143b8795ac39543df6c52e86e96506f7747e93f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"6cd295d9f4efe412971b1855ecaf7b19ef3956006a652ea205c456a5d33550c1","src/lib.rs":"eec6b5e3be2c1d5e068a9724b503e729a1e11dae4cd1bc816110c94a0bc14fa0","src/redox.rs":"898997b73c20818cf4601a1f4bcdf1b7dbf3b5d9d071cdc9fce5d1652f6bfc43","src/unix/android.rs":"d929826a9a92003b7aaa6435adc220efbb70c15308f9eed5ee517d88451fcaf6","src/unix/linux.rs":"061f0f7bd7a41f29b9d24b6487e302d650e9399f0dcc54595211e20ae4945da6","src/unix/macos.rs":"1b19a24bee240aba8d564405991bb0ee36ba72217461c8ac6829144d28f7e046","src/unix/mod.rs":"d6a4ae02099c46874e13c9fe71103cdecc394467e6c443950d27488ce9ec927b","src/unix/utimensat.rs":"0876aa32393689fcb5eec913c5d34954956a715913d611749b04243a2dbcf897","src/unix/utimes.rs":"d2d8ca5daed3d1ec0a0aed1fc03450deb4b7f1fcdb96ed40b8d2cfefd0e827ee","src/wasm.rs":"a82734259846a3349d42da011c1065d21f3981bd20d6eb43495e34808b83af9a","src/windows.rs":"3314a0def8028d8f2a4bc01377d5874ab9ddf8684aad13573c89b427bf2d9911"},"package":"4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3"} \ No newline at end of file
+{"files":{"Cargo.toml":"47fb14a283873b721e1cafb0b68f25c761b513927ee6e203426de090e2299251","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"6cd295d9f4efe412971b1855ecaf7b19ef3956006a652ea205c456a5d33550c1","src/lib.rs":"eec6b5e3be2c1d5e068a9724b503e729a1e11dae4cd1bc816110c94a0bc14fa0","src/redox.rs":"898997b73c20818cf4601a1f4bcdf1b7dbf3b5d9d071cdc9fce5d1652f6bfc43","src/unix/android.rs":"d929826a9a92003b7aaa6435adc220efbb70c15308f9eed5ee517d88451fcaf6","src/unix/linux.rs":"648498bdf715766eeffdc2dff6a58db51cf6b244c431a6d53c33cbc73f1998d4","src/unix/macos.rs":"1b19a24bee240aba8d564405991bb0ee36ba72217461c8ac6829144d28f7e046","src/unix/mod.rs":"52512c922e1dbb47a6c7f0354c6198ca1ac61ea4d540bc0d73d3a3fd08908ba1","src/unix/utimensat.rs":"0876aa32393689fcb5eec913c5d34954956a715913d611749b04243a2dbcf897","src/unix/utimes.rs":"d2d8ca5daed3d1ec0a0aed1fc03450deb4b7f1fcdb96ed40b8d2cfefd0e827ee","src/wasm.rs":"a82734259846a3349d42da011c1065d21f3981bd20d6eb43495e34808b83af9a","src/windows.rs":"3314a0def8028d8f2a4bc01377d5874ab9ddf8684aad13573c89b427bf2d9911"},"package":"4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9"} \ No newline at end of file
diff --git a/vendor/filetime/Cargo.toml b/vendor/filetime/Cargo.toml
index ba219b3e5..2d41405ab 100644
--- a/vendor/filetime/Cargo.toml
+++ b/vendor/filetime/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "filetime"
-version = "0.2.18"
+version = "0.2.19"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = """
Platform-agnostic accessors of timestamps in File metadata
diff --git a/vendor/filetime/src/unix/linux.rs b/vendor/filetime/src/unix/linux.rs
index c803e0217..255fcfb61 100644
--- a/vendor/filetime/src/unix/linux.rs
+++ b/vendor/filetime/src/unix/linux.rs
@@ -34,6 +34,11 @@ pub fn set_file_handle_times(
static INVALID: AtomicBool = AtomicBool::new(false);
if !INVALID.load(SeqCst) {
let times = [super::to_timespec(&atime), super::to_timespec(&mtime)];
+
+ // We normally use a syscall because the `utimensat` function is documented
+ // as not accepting a file descriptor in the first argument (even though, on
+ // Linux, the syscall itself can accept a file descriptor there).
+ #[cfg(not(target_env = "musl"))]
let rc = unsafe {
libc::syscall(
libc::SYS_utimensat,
@@ -43,6 +48,24 @@ pub fn set_file_handle_times(
0,
)
};
+ // However, on musl, we call the musl libc function instead. This is because
+ // on newer musl versions starting with musl 1.2, `timespec` is always a 64-bit
+ // value even on 32-bit targets. As a result, musl internally converts their
+ // `timespec` values to the correct ABI before invoking the syscall. Since we
+ // use `timespec` from the libc crate, it matches musl's definition and not
+ // the Linux kernel's version (for some platforms) so we must use musl's
+ // `utimensat` function to properly convert the value. musl's `utimensat`
+ // function allows file descriptors in the path argument so this is fine.
+ #[cfg(target_env = "musl")]
+ let rc = unsafe {
+ libc::utimensat(
+ f.as_raw_fd(),
+ ptr::null::<libc::c_char>(),
+ times.as_ptr(),
+ 0,
+ )
+ };
+
if rc == 0 {
return Ok(());
}
@@ -78,15 +101,7 @@ fn set_times(
if !INVALID.load(SeqCst) {
let p = CString::new(p.as_os_str().as_bytes())?;
let times = [super::to_timespec(&atime), super::to_timespec(&mtime)];
- let rc = unsafe {
- libc::syscall(
- libc::SYS_utimensat,
- libc::AT_FDCWD,
- p.as_ptr(),
- times.as_ptr(),
- flags,
- )
- };
+ let rc = unsafe { libc::utimensat(libc::AT_FDCWD, p.as_ptr(), times.as_ptr(), flags) };
if rc == 0 {
return Ok(());
}
diff --git a/vendor/filetime/src/unix/mod.rs b/vendor/filetime/src/unix/mod.rs
index 8b7788837..df62de429 100644
--- a/vendor/filetime/src/unix/mod.rs
+++ b/vendor/filetime/src/unix/mod.rs
@@ -58,17 +58,16 @@ fn to_timespec(ft: &Option<FileTime>) -> timespec {
}
}
+ let mut ts: timespec = unsafe { std::mem::zeroed() };
if let &Some(ft) = ft {
- timespec {
- tv_sec: ft.seconds() as time_t,
- tv_nsec: ft.nanoseconds() as _,
- }
+ ts.tv_sec = ft.seconds() as time_t;
+ ts.tv_nsec = ft.nanoseconds() as _;
} else {
- timespec {
- tv_sec: 0,
- tv_nsec: UTIME_OMIT as _,
- }
+ ts.tv_sec = 0;
+ ts.tv_nsec = UTIME_OMIT as _;
}
+
+ ts
}
pub fn from_last_modification_time(meta: &fs::Metadata) -> FileTime {
diff --git a/vendor/flate2/.cargo-checksum.json b/vendor/flate2/.cargo-checksum.json
index 0cf4f26d8..287b01bdb 100644
--- a/vendor/flate2/.cargo-checksum.json
+++ b/vendor/flate2/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"2a6c871e5e5001d23934ab7a2e1e936dc1265d912d384e10c07bede2c1163d31","Cargo.toml":"fb0c03843e790a49015855ebc71b0805ec3647b55093876709a2ed1b1437e263","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"b9fa7ea44625cd15b942c9ae748f8f35197f9c19f5deb7087a5fc115064d19d3","examples/compress_file.rs":"65cac39f50420861cb86120b0e7edce4704eb5afd1e27f790a66b8138176fc4d","examples/deflatedecoder-bufread.rs":"59aaa1dcf999da7909ea0a01bef757b80e366f8adf60984cc2e2c70acc774e1a","examples/deflatedecoder-read.rs":"7784b03156600d9a0efadb830920439a8a8f7fe70250090ee8c637033c1338eb","examples/deflatedecoder-write.rs":"6e6952cef15adbf465d4985802d315a3b51f3137be36e62bbcaed0bb27a8d5a6","examples/deflateencoder-bufread.rs":"0cefcb1edef04834e684c9c3d4048a32c7831617e51a102a7bd6a7351d4799be","examples/deflateencoder-read.rs":"d1718c60bb527a2ca15fd208e22c1df51ed972ea48b9d506536635effdbc4847","examples/deflateencoder-write.rs":"79203d1db1128df2a3fa5cbd61d60776e02959c4da67a473f3b96dfb5f01f964","examples/gzbuilder.rs":"e7435090c86db3133d19f7a2bfb7d49bb60ebbd1e0acaf74c2d0d27375d0c41d","examples/gzdecoder-bufread.rs":"8ad927ad5761e4b31bec5c2b4e5429dd9370e25f35be6e60a29149ed834c97a1","examples/gzdecoder-read.rs":"e63ae23a6188ab9831758178a86a36b8693de27e291725d2d5274df4cb93566c","examples/gzdecoder-write.rs":"a8843c75fa725de84b8eb75b08525e61586f60f9f8bfd874fbfd44039d119c5f","examples/gzencoder-bufread.rs":"aaad311ad799c2e739b58fc4e570225d8f52d3bc407f9c71c28023b321e02100","examples/gzencoder-read.rs":"797522cd3d5815321bbb5c26fdfeae027807219e429e36a1b99fff50fcdf0a64","examples/gzencoder-write.rs":"2eea8a8c8bf22f060d8749d786835ce9454abe2cfd24bf0f7a23b5a8d1030121","examples/gzmultidecoder-bufread.rs":"1e4b31da4f1352e748b4516ba60dcd52e5d5e62733fa1c8e7da7bee4efcce1c4","examples/gzmultidecoder-read.rs":"3976274ac8b8190b3f7ed9fd855a193c38458bd40a2dba6a7de21986f78682f6","examples/hello_world.txt":"d2a84f4b8b650937ec8f73cd8be2c74add5a911ba64df27458ed8229da804a26","examples/zlibdecoder-bufread.rs":"e57a597030e6bd87e79329d87b3743b7fb0ff4c217846b0ba2c2d32264208ff1","examples/zlibdecoder-read.rs":"83b3abd1c12586f820d55e4d86c328fdccbdec963e90c44a44cd7d5f1caa4c8f","examples/zlibdecoder-write.rs":"e7cbec46414eb5d67fee95bf1e4a4e6a1ba2a41f4ce9542dd4481035d3f8070d","examples/zlibencoder-bufread.rs":"13d79baeffb6080b4b4f837e23092b52b92b85466ed9e42b0a33a864232e693f","examples/zlibencoder-read.rs":"7f95783b38e0005740f05dbcc03af3bbf28dfe7c9f0542d3fb015d09554163bd","examples/zlibencoder-write.rs":"47c25fb6711e22f95bdc89309c6e367d1060a0120d4259011e97908563e2b8bc","src/bufreader.rs":"2d6a59330e8f838eb6474d19104f21cb98236125c189f36bdff75a069a4cea8f","src/crc.rs":"bfc96a0df325e8fb0005b008a52f1227546c2a4f72f422d4dd40769a8317eb37","src/deflate/bufread.rs":"7dd1c1a5a74abbf32537285c286551c20b3ed62c99e28a67b7d4d7eaeee73570","src/deflate/mod.rs":"ebaa68e75d0c7470c7a2afd7af0f8cc5413654f7c35ac1cb6af3df0144eeb2cf","src/deflate/read.rs":"dc0d080b0a4ab3aa3f2870b69f7cb5380e49baab4eb5fdfe98f7a0a9fb6b32b9","src/deflate/write.rs":"fef4deebc0dc29d4ad481718613c694f6d1b733199e241a1f7f064e2707e4908","src/ffi/c.rs":"387bba48532bb90f13b38f685f68c0e0b3bb2b3a84e6e915abb2fd571ce9c1e9","src/ffi/mod.rs":"d90bfe6150d905c06755a3bf9355be69a7ffb664ca701d51ee1ffe93229abee1","src/ffi/rust.rs":"235cf72391d4077b569f88854b6468e371c19c417b4815468070e155268d83dd","src/gz/bufread.rs":"62881dd14a736cf5fc3d5006e83afd6e5462808f4e8eeb5844ae07d67ad28f16","src/gz/mod.rs":"77d10acd6e923f35cc6434e70cc33002006a8cf5923ebb3105531d4f05542327","src/gz/read.rs":"275150a42ae3a39cc56a83ef212bffce32c74eae1bc6ede33945370bb555ad4d","src/gz/write.rs":"41b9c738bcf64dec44cef591b42e1cb5ef0316aa33d621816fbdd5484e51037b","src/lib.rs":"a1cdcc834b6cc7e470c397578142eca0872840ceb594603264b0aa92b73b94fa","src/mem.rs":"e66a546c83c723dc13320f791de5f43f6683f5549e6d8d0e53fda6108762cc8a","src/zio.rs":"066238023d9c931498d996b6bcd135ba58bc9e916dbaaeddec26a20ad07c698e","src/zlib/bufread.rs":"2c6440dd8958469bb07644a50d3170988a5d75f17788d5bfbf03adfd30e50f4a","src/zlib/mod.rs":"fffaea011fc774756f5392cffb1d2d06c05767e7bbbe306fa5fbe95108b9b06b","src/zlib/read.rs":"11dc6a4072b5d99490f7e5ea8eca9da67b4e77981b05eb3ed56067caaffb3a27","src/zlib/write.rs":"330d6ebe93939c8a045c72fb2c8106bfad92e0ed80c60f729ebba40d11090338","tests/corrupt-gz-file.bin":"083dd284aa1621916a2d0f66ea048c8d3ba7a722b22d0d618722633f51e7d39c","tests/early-flush.rs":"5ed4b0f8e66cab9209e079d5636e5f1b780606cd553a182c809cf73a24e77e8e","tests/empty-read.rs":"45477d316f77a048d747e9d18292abfec0cac667768385c8a061e18fd1240238","tests/good-file.gz":"87296963e53024a74752179ce7e54087565d358a85d3e65c3b37ef36eaa3d4a6","tests/good-file.txt":"bc4e03658a441fe2ad2df7cd2197144b87e41696f01e327b380e869cd9b485a0","tests/gunzip.rs":"a67f2e2dc2e6fbdc42f9ea003eae70f862478a5226d961658b30c8c7928fb7f5","tests/multi.gz":"efa3341da052f95056314cc6920e02a3da15bdef30234b2552fb407812db5cc6","tests/multi.txt":"dbea9325179efe46ea2add94f7b6b745ca983fabb208dc6d34aa064623d7ee23","tests/zero-write.rs":"ff8d0349a540b54363c55807c5fd7fbbdc363d08a536d35a3a40f0ce92c16489"},"package":"f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"} \ No newline at end of file
+{"files":{"Cargo.lock":"a70316ff2059a2809363ab44ebe3e80f443b5127bc772750313869381e349863","Cargo.toml":"4e65bf00fbe95389960ae6b847fa84f600405c620b53fe5b51b5862330f326d0","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"b9fa7ea44625cd15b942c9ae748f8f35197f9c19f5deb7087a5fc115064d19d3","examples/compress_file.rs":"65cac39f50420861cb86120b0e7edce4704eb5afd1e27f790a66b8138176fc4d","examples/deflatedecoder-bufread.rs":"59aaa1dcf999da7909ea0a01bef757b80e366f8adf60984cc2e2c70acc774e1a","examples/deflatedecoder-read.rs":"7784b03156600d9a0efadb830920439a8a8f7fe70250090ee8c637033c1338eb","examples/deflatedecoder-write.rs":"6e6952cef15adbf465d4985802d315a3b51f3137be36e62bbcaed0bb27a8d5a6","examples/deflateencoder-bufread.rs":"0cefcb1edef04834e684c9c3d4048a32c7831617e51a102a7bd6a7351d4799be","examples/deflateencoder-read.rs":"d1718c60bb527a2ca15fd208e22c1df51ed972ea48b9d506536635effdbc4847","examples/deflateencoder-write.rs":"79203d1db1128df2a3fa5cbd61d60776e02959c4da67a473f3b96dfb5f01f964","examples/gzbuilder.rs":"e7435090c86db3133d19f7a2bfb7d49bb60ebbd1e0acaf74c2d0d27375d0c41d","examples/gzdecoder-bufread.rs":"8ad927ad5761e4b31bec5c2b4e5429dd9370e25f35be6e60a29149ed834c97a1","examples/gzdecoder-read.rs":"e63ae23a6188ab9831758178a86a36b8693de27e291725d2d5274df4cb93566c","examples/gzdecoder-write.rs":"a8843c75fa725de84b8eb75b08525e61586f60f9f8bfd874fbfd44039d119c5f","examples/gzencoder-bufread.rs":"aaad311ad799c2e739b58fc4e570225d8f52d3bc407f9c71c28023b321e02100","examples/gzencoder-read.rs":"797522cd3d5815321bbb5c26fdfeae027807219e429e36a1b99fff50fcdf0a64","examples/gzencoder-write.rs":"2eea8a8c8bf22f060d8749d786835ce9454abe2cfd24bf0f7a23b5a8d1030121","examples/gzmultidecoder-bufread.rs":"1e4b31da4f1352e748b4516ba60dcd52e5d5e62733fa1c8e7da7bee4efcce1c4","examples/gzmultidecoder-read.rs":"3976274ac8b8190b3f7ed9fd855a193c38458bd40a2dba6a7de21986f78682f6","examples/hello_world.txt":"d2a84f4b8b650937ec8f73cd8be2c74add5a911ba64df27458ed8229da804a26","examples/zlibdecoder-bufread.rs":"e57a597030e6bd87e79329d87b3743b7fb0ff4c217846b0ba2c2d32264208ff1","examples/zlibdecoder-read.rs":"83b3abd1c12586f820d55e4d86c328fdccbdec963e90c44a44cd7d5f1caa4c8f","examples/zlibdecoder-write.rs":"e7cbec46414eb5d67fee95bf1e4a4e6a1ba2a41f4ce9542dd4481035d3f8070d","examples/zlibencoder-bufread.rs":"13d79baeffb6080b4b4f837e23092b52b92b85466ed9e42b0a33a864232e693f","examples/zlibencoder-read.rs":"7f95783b38e0005740f05dbcc03af3bbf28dfe7c9f0542d3fb015d09554163bd","examples/zlibencoder-write.rs":"47c25fb6711e22f95bdc89309c6e367d1060a0120d4259011e97908563e2b8bc","src/bufreader.rs":"2d6a59330e8f838eb6474d19104f21cb98236125c189f36bdff75a069a4cea8f","src/crc.rs":"bfc96a0df325e8fb0005b008a52f1227546c2a4f72f422d4dd40769a8317eb37","src/deflate/bufread.rs":"7dd1c1a5a74abbf32537285c286551c20b3ed62c99e28a67b7d4d7eaeee73570","src/deflate/mod.rs":"ebaa68e75d0c7470c7a2afd7af0f8cc5413654f7c35ac1cb6af3df0144eeb2cf","src/deflate/read.rs":"dc0d080b0a4ab3aa3f2870b69f7cb5380e49baab4eb5fdfe98f7a0a9fb6b32b9","src/deflate/write.rs":"fef4deebc0dc29d4ad481718613c694f6d1b733199e241a1f7f064e2707e4908","src/ffi/c.rs":"387bba48532bb90f13b38f685f68c0e0b3bb2b3a84e6e915abb2fd571ce9c1e9","src/ffi/mod.rs":"d90bfe6150d905c06755a3bf9355be69a7ffb664ca701d51ee1ffe93229abee1","src/ffi/rust.rs":"235cf72391d4077b569f88854b6468e371c19c417b4815468070e155268d83dd","src/gz/bufread.rs":"62881dd14a736cf5fc3d5006e83afd6e5462808f4e8eeb5844ae07d67ad28f16","src/gz/mod.rs":"77d10acd6e923f35cc6434e70cc33002006a8cf5923ebb3105531d4f05542327","src/gz/read.rs":"275150a42ae3a39cc56a83ef212bffce32c74eae1bc6ede33945370bb555ad4d","src/gz/write.rs":"41b9c738bcf64dec44cef591b42e1cb5ef0316aa33d621816fbdd5484e51037b","src/lib.rs":"a1cdcc834b6cc7e470c397578142eca0872840ceb594603264b0aa92b73b94fa","src/mem.rs":"e66a546c83c723dc13320f791de5f43f6683f5549e6d8d0e53fda6108762cc8a","src/zio.rs":"066238023d9c931498d996b6bcd135ba58bc9e916dbaaeddec26a20ad07c698e","src/zlib/bufread.rs":"2c6440dd8958469bb07644a50d3170988a5d75f17788d5bfbf03adfd30e50f4a","src/zlib/mod.rs":"fffaea011fc774756f5392cffb1d2d06c05767e7bbbe306fa5fbe95108b9b06b","src/zlib/read.rs":"11dc6a4072b5d99490f7e5ea8eca9da67b4e77981b05eb3ed56067caaffb3a27","src/zlib/write.rs":"330d6ebe93939c8a045c72fb2c8106bfad92e0ed80c60f729ebba40d11090338","tests/corrupt-gz-file.bin":"083dd284aa1621916a2d0f66ea048c8d3ba7a722b22d0d618722633f51e7d39c","tests/early-flush.rs":"5ed4b0f8e66cab9209e079d5636e5f1b780606cd553a182c809cf73a24e77e8e","tests/empty-read.rs":"45477d316f77a048d747e9d18292abfec0cac667768385c8a061e18fd1240238","tests/good-file.gz":"87296963e53024a74752179ce7e54087565d358a85d3e65c3b37ef36eaa3d4a6","tests/good-file.txt":"bc4e03658a441fe2ad2df7cd2197144b87e41696f01e327b380e869cd9b485a0","tests/gunzip.rs":"a67f2e2dc2e6fbdc42f9ea003eae70f862478a5226d961658b30c8c7928fb7f5","tests/multi.gz":"efa3341da052f95056314cc6920e02a3da15bdef30234b2552fb407812db5cc6","tests/multi.txt":"dbea9325179efe46ea2add94f7b6b745ca983fabb208dc6d34aa064623d7ee23","tests/zero-write.rs":"ff8d0349a540b54363c55807c5fd7fbbdc363d08a536d35a3a40f0ce92c16489"},"package":"a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"} \ No newline at end of file
diff --git a/vendor/flate2/Cargo.lock b/vendor/flate2/Cargo.lock
index 732ee01f0..5389eecb0 100644
--- a/vendor/flate2/Cargo.lock
+++ b/vendor/flate2/Cargo.lock
@@ -10,9 +10,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "cc"
-version = "1.0.73"
+version = "1.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
[[package]]
name = "cfg-if"
@@ -31,9 +31,9 @@ dependencies = [
[[package]]
name = "cmake"
-version = "0.1.48"
+version = "0.1.49"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8ad8cef104ac57b68b89df3208164d228503abbdce70f6880ffa3d970e7443a"
+checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c"
dependencies = [
"cc",
]
@@ -49,7 +49,7 @@ dependencies = [
[[package]]
name = "flate2"
-version = "1.0.24"
+version = "1.0.25"
dependencies = [
"cloudflare-zlib-sys",
"crc32fast",
@@ -62,9 +62,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.6"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -73,9 +73,9 @@ dependencies = [
[[package]]
name = "libc"
-version = "0.2.124"
+version = "0.2.137"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
[[package]]
name = "libz-ng-sys"
@@ -102,24 +102,24 @@ dependencies = [
[[package]]
name = "miniz_oxide"
-version = "0.5.1"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
name = "pkg-config"
-version = "0.3.25"
+version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
[[package]]
name = "ppv-lite86"
-version = "0.2.16"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "quickcheck"
@@ -153,9 +153,9 @@ dependencies = [
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
@@ -168,6 +168,6 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "wasi"
-version = "0.10.2+wasi-snapshot-preview1"
+version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
diff --git a/vendor/flate2/Cargo.toml b/vendor/flate2/Cargo.toml
index 23b8b80d0..68516e5e7 100644
--- a/vendor/flate2/Cargo.toml
+++ b/vendor/flate2/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "flate2"
-version = "1.0.24"
+version = "1.0.25"
authors = [
"Alex Crichton <alex@alexcrichton.com>",
"Josh Triplett <josh@joshtriplett.org>",
@@ -56,7 +56,8 @@ optional = true
default-features = false
[dependencies.miniz_oxide]
-version = "0.5.0"
+version = "0.6.0"
+features = ["with-alloc"]
optional = true
default-features = false
@@ -90,5 +91,6 @@ zlib-ng-compat = [
]
[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.miniz_oxide]
-version = "0.5.0"
+version = "0.6.0"
+features = ["with-alloc"]
default-features = false
diff --git a/vendor/gimli-0.26.2/.cargo-checksum.json b/vendor/gimli-0.26.2/.cargo-checksum.json
new file mode 100644
index 000000000..759ec93de
--- /dev/null
+++ b/vendor/gimli-0.26.2/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"789a696803d3f1bed3ff3566cac8e7cf15c4bf9428242d637d0ce7f3a0ad57a3","CONTRIBUTING.md":"5f513ec06013e4f6f097e9c9492da5a47b9f25c94c6ecadfb655a77405fe912c","Cargo.lock":"284bff6b09ef0fd214c34492417778d6d5b9f75dc54557015af01a95696c752a","Cargo.toml":"92dccbeaa61bc8c65da53917fbf32900b3cb2549f90b67b67e1c67672bac205e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"57e36d344dabe1c52a9c81eafb28787c309b86c47437abf8589ef17bf383fc5f","benches/bench.rs":"e0045b989683794951563aa91b37069b2f6ae55f95e288d23f5c984b46e3a7eb","examples/dwarf-validate.rs":"4aac1045e3c08bf00878eeff75c0cfc30c06171c5eab2e71d757505786729687","examples/dwarfdump.rs":"d74323c037689b32825efa9bf69614ee26a444513b266e819ecf486956ee3299","examples/simple.rs":"4c3425e8bd1880d9522f5ed2581fb5ccd452d4be678eebc0e147c48722a7be1d","examples/simple_line.rs":"ac795f859a17650dde466b5b23b8c161b2e3b8eb57e32f5b6718a3072f6bfad0","fixtures/self/README.md":"7cfd76031ec5a4b38cc4eb56ccbfe1bb590fb54c333d037550bdeaaeacfc20cb","fixtures/self/debug_abbrev":"7c0faa940d9c68d196d03ad55a20e5c746040fa428ff323277fa381deff82bba","fixtures/self/debug_aranges":"8c2aeb2335f61d04ecb7b747070d24f83a6517cbee79dc5c96d97fb6c53d6b6d","fixtures/self/debug_info":"42028a5983006e0703f9ca9515cd27d891ae4af70279fae5011d547f581e2661","fixtures/self/debug_inlined":"89d9516f06ff835621936037f5884fc56712bf304c1dcde52251ddd510fe8710","fixtures/self/debug_line":"b29aebcca3b38bb2bb8aa708cbe74a0dce5a3b0c18916b63d6d17282c017bec7","fixtures/self/debug_loc":"8906ccb9c204f233eb74c1d069dee97a19d18c2051f9147795d7b5364a9266aa","fixtures/self/debug_pubnames":"cf58e237f89c68afba724597fa7e260448636b45f2e69dc6f1bfe34006e27c48","fixtures/self/debug_pubtypes":"d43c1bed71c9d14d1683294cdc1833f069cf131d6e95ee808547919b4f352d81","fixtures/self/debug_ranges":"6d765ac18d33accd89186d077eeb505cbdf97d990c9201d63d9463cd7787ce7a","fixtures/self/debug_str":"9ed904b68eee77b8558b80b3b7ca03e8527f6c64483e9d6d845f40270eb21183","fixtures/self/eh_frame":"6dc3d84351cac42cf73d03452fbb532470dd94d08715154c48417e3f62095f17","fixtures/self/eh_frame_hdr":"afba7a0aa233c9a8c81c986495bd2505164844adb93272d6bc0c9e592e684716","rustfmt.toml":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/arch.rs":"1c4cb3e2a322f3f42fe0b82875c9d0ce060d9af2388990139bdce9a4487c32da","src/common.rs":"392f52a58db6101187ca5525bbeafca9bda2342debd058cabca37350cd9db619","src/constants.rs":"358cf7924c79bc72de59d23d1fa02b2047d6c763c8fbd8be263ab8cd3e3ba7ec","src/endianity.rs":"1f7e62ae34f540c06bedf1e7948739211556eea7dd83731a5ca52c7d687ed0fc","src/leb128.rs":"996d5c79d027f97c010ca487bc4ff5f8265f4b9e63d62b4e4fa291383c259ee9","src/lib.rs":"6863b9a9d1eddf34b4095dfe60318aae56914fbf515ba5601b29024cc963f27c","src/read/abbrev.rs":"a3f550c32f1eb880d82bdb5257d35e10d32cfd039050e8131cbeedac346cc1d9","src/read/addr.rs":"f63f289edf889e87107bb2090fb1c50b48af7015f31b7c39c3d6ea09630a38e9","src/read/aranges.rs":"ba3302f87cffb7ee15f48b0530ebd707f45ad056934223078d25ae2a1b034f1c","src/read/cfi.rs":"b1064ed9b4b87169a148cc86adc7443c5a771dc2d1799129f7883f1ef6adc165","src/read/dwarf.rs":"a39c24429b437ae3a1cd17bae2f01c973c9ce39f7b5f2b3435982d6860944e0e","src/read/endian_reader.rs":"320983a859c2bb0dd44a3e6fae55ff0a84dba3fa80c2edbc64aa8135c44eddf0","src/read/endian_slice.rs":"ae1c52499728f6a85648f1bf87c02dcf43bebecb5ad4e835a1246938ba4338bf","src/read/index.rs":"e79b8d591b8e2007a37f5ea85a6d71b69d56ca3739a85cf7bf361724c5b829fa","src/read/line.rs":"af7a1520777e56632970fc5fe7377fdcd12d078eb88eeb2b0f2cc95b73ff68a7","src/read/lists.rs":"67ca9e1a36a91feb4996d035211de845205212bfda02163685d217818567ff93","src/read/loclists.rs":"1b4ea85c0dd8c6eae492a60cb70810185d56ba579df7986cb8a36385031b10fd","src/read/lookup.rs":"0cf89ba12b9d48b1fe035dd3a497730323acb9427a9457abbc2f7c58c4c71165","src/read/mod.rs":"3bafc747c31a575bcc92d3e7d5ea5a15f5acc01918a4377cec1dced0f85b5d2b","src/read/op.rs":"e5dce6520dfc90ec74c3b070ca374b89fcf55ff23101471591458175a72c79e6","src/read/pubnames.rs":"ed752ee1a7017e6d3be42d81e4ddaaac960ef08081463a19106c9f041526d4a3","src/read/pubtypes.rs":"5e75b32c0923e827aff0bb2db456797a0e8d38ba46be992558a7990b3196bcf5","src/read/reader.rs":"b10ff3e77b54347e96b1f3cff30da104dfdd0c4d7a55b672950788f1f1ae3478","src/read/rnglists.rs":"af637d283d76514382ee0556463cccab4e6f0ea4d061db9a44a594b5d57d1fd7","src/read/str.rs":"4c2f50014451621fea45969cd313f6840fcd3a99d7a2d081bfa1f8e0e434133a","src/read/unit.rs":"6ed00ba004c329008bf295d9c7d724afe961750f0c7b08430fc213fd5d998003","src/read/util.rs":"0b7d0d2225a98618070dc472ccba49a5411aa8beed5ff6696da079d06156d363","src/read/value.rs":"5a91e03ad3d41f679b264753498434b91948c6b89955e4beb4522498386d9b1d","src/test_util.rs":"291eefa6b51c6d934ba2f4a4c9bc7c403046fc1cccf4d43487820f0154bb89e2","src/write/abbrev.rs":"fa02163389e92e804d139cf84f833ab6af932083f0eb2d74464b4a70bd3237ff","src/write/cfi.rs":"3b04b0ebd82363738199cc673f64e0ceb60506a67c4f18b435a109caa62840f3","src/write/dwarf.rs":"8a1a0893e31134ad68993994594f3024ad0c8af7c1188b29e0ffc26b42edef21","src/write/endian_vec.rs":"1d5811986648816a677580b22630f5059757a381487d73e9adbb3008c9ae0c58","src/write/line.rs":"df7d2082c71b5e523cd52745700aae3dcfa5800f0b280e831ef5d8eb8035d6a7","src/write/loc.rs":"bb5b750c04f6603e18225db72652ea00239234ba674a8a8627c99d4ab07b47a9","src/write/mod.rs":"d8aa1da854cdee629d470d00d87e00dc6998e4bec1ca951f8d2f277730ab9d69","src/write/op.rs":"7b1d49b10c8c92b2d5b259e83119ff7dc95bc552535bb7b1a82ca9556a35c589","src/write/range.rs":"5bac01e372c08e3cc19e1e07e40492d8214cdfa8881737920cb792f4aa2ba80b","src/write/section.rs":"3ce781d5e82ba365ff54fdd36e0ef58c58a2215b09a8861eb0b038efac82b77f","src/write/str.rs":"4850cc2fee55980f9cbb6b4169f9861ab9d05c2b28a85c2b790480b83a66f514","src/write/unit.rs":"213c881736f8c87fcb2f921e379791eaba2915e8d077139965a9c6211001fe44","src/write/writer.rs":"304181287f90445bbfb33349c26b34bd87002d6844fc5686bfc0756fd0a1ecd8","tests/convert_self.rs":"180909b562969e1691b64628ded8654e6e0b10b3357f39917bd8ac288c5826dd","tests/parse_self.rs":"f2da1c7daef7139545c9367c2f26199e8b4623b31d4ec6480ddd851e6980f2dc"},"package":"22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"} \ No newline at end of file
diff --git a/vendor/gimli-0.26.2/CHANGELOG.md b/vendor/gimli-0.26.2/CHANGELOG.md
new file mode 100644
index 000000000..9ca6d70a8
--- /dev/null
+++ b/vendor/gimli-0.26.2/CHANGELOG.md
@@ -0,0 +1,873 @@
+# `gimli` Change Log
+
+--------------------------------------------------------------------------------
+
+## 0.26.2
+
+Released 2022/07/16.
+
+### Changed
+
+* Fixed CFI personality encoding when writing.
+ [#609](https://github.com/gimli-rs/gimli/pull/609)
+
+* Fixed use of raw pointer for mutation, detected by Miri.
+ [#614](https://github.com/gimli-rs/gimli/pull/614)
+
+* Fixed `DW_OP_GNU_implicit_pointer` handling for DWARF version 2.
+ [#618](https://github.com/gimli-rs/gimli/pull/618)
+
+### Added
+
+* Added `read::EhHdrTable::iter`.
+ [#619](https://github.com/gimli-rs/gimli/pull/619)
+
+--------------------------------------------------------------------------------
+
+## 0.26.1
+
+Released 2021/11/02.
+
+### Changed
+
+* Fixed segmentation fault in `ArrayVec<Vec<T>>::into_vec`, which may be used by
+ `read::Evaluation::result`. This regression was introduced in 0.26.0.
+ [#601](https://github.com/gimli-rs/gimli/pull/601)
+
+--------------------------------------------------------------------------------
+
+## 0.26.0
+
+Released 2021/10/24.
+
+### Breaking changes
+
+* Removed `read::UninitializedUnwindContext`. Use `Box<UnwindContext>` instead.
+ [#593](https://github.com/gimli-rs/gimli/pull/593)
+
+* Renamed `read::Error::CfiStackFull` to `StackFull`.
+ [#595](https://github.com/gimli-rs/gimli/pull/595)
+
+* Added `UnwindContextStorage` type parameter to `read::UnwindContext`, `read::UnwindTable`,
+ `read::UnwindTableRow`, and `read::RegisterRuleMap`.
+ [#595](https://github.com/gimli-rs/gimli/pull/595)
+
+* Added `EvaluationStorage` type parameter to `read::Evaluation`.
+ [#595](https://github.com/gimli-rs/gimli/pull/595)
+
+* Added `read::SectionId::DebugCuIndex` and `read::SectionId::DebugTuIndex`.
+ [#588](https://github.com/gimli-rs/gimli/pull/588)
+
+### Changed
+
+* Fixed `DW_EH_PE_pcrel` handling in default `write::Writer::write_eh_pointer` implementation.
+ [#576](https://github.com/gimli-rs/gimli/pull/576)
+
+* Fixed `read::AttributeSpecification::size` for some forms.
+ [#597](https://github.com/gimli-rs/gimli/pull/597)
+
+* Display more unit details in dwarfdump.
+ [#584](https://github.com/gimli-rs/gimli/pull/584)
+
+### Added
+
+* Added `write::DebuggingInformationEntry::delete_child`.
+ [#570](https://github.com/gimli-rs/gimli/pull/570)
+
+* Added ARM and AArch64 register definitions.
+ [#574](https://github.com/gimli-rs/gimli/pull/574)
+ [#577](https://github.com/gimli-rs/gimli/pull/577)
+
+* Added RISC-V register definitions.
+ [#579](https://github.com/gimli-rs/gimli/pull/579)
+
+* Added `read::DwarfPackage`, `read::DebugCuIndex`, and `read::DebugTuIndex`.
+ [#588](https://github.com/gimli-rs/gimli/pull/588)
+
+* Added `read-core` feature to allow building without `liballoc`.
+ [#596](https://github.com/gimli-rs/gimli/pull/596)
+
+* Added `read::EntriesRaw::skip_attributes`.
+ [#597](https://github.com/gimli-rs/gimli/pull/597)
+
+--------------------------------------------------------------------------------
+
+## 0.25.0
+
+Released 2021/07/26.
+
+### Breaking changes
+
+* `read::FrameDescriptionEntry::unwind_info_for_address` now returns a reference
+ instead of cloning.
+ [#557](https://github.com/gimli-rs/gimli/pull/557)
+
+* `read::AttributeValue::RangeListsRef` now contains a `RawRangeListsOffset`
+ to allow handling of GNU split DWARF extensions.
+ Use `read::Dwarf::ranges_offset_from_raw` to handle it.
+ [#568](https://github.com/gimli-rs/gimli/pull/568)
+ [#569](https://github.com/gimli-rs/gimli/pull/569)
+
+* Added `read::Unit::dwo_id`.
+ [#569](https://github.com/gimli-rs/gimli/pull/569)
+
+### Changed
+
+* `.debug_aranges` parsing now accepts version 3.
+ [#560](https://github.com/gimli-rs/gimli/pull/560)
+
+* `read::Dwarf::attr_ranges_offset` and its callers now handle GNU split DWARF extensions.
+ [#568](https://github.com/gimli-rs/gimli/pull/568)
+ [#569](https://github.com/gimli-rs/gimli/pull/569)
+
+### Added
+
+* Added `read::DebugLineStr::new`.
+ [#556](https://github.com/gimli-rs/gimli/pull/556)
+
+* Added `read::UnwindTable::into_current_row`.
+ [#557](https://github.com/gimli-rs/gimli/pull/557)
+
+* Added more `DW_LANG` constants.
+ [#565](https://github.com/gimli-rs/gimli/pull/565)
+
+* dwarfdump: added DWO parent support.
+ [#568](https://github.com/gimli-rs/gimli/pull/568)
+
+* Added `read::Dwarf` methods: `ranges_offset_from_raw`, `raw_ranges`, and `raw_locations`.
+ [#568](https://github.com/gimli-rs/gimli/pull/568)
+ [#569](https://github.com/gimli-rs/gimli/pull/569)
+
+--------------------------------------------------------------------------------
+
+## 0.24.0
+
+Released 2021/05/01.
+
+### Breaking changes
+
+* Minimum Rust version increased to 1.42.0.
+
+* Added `read::Dwarf::debug_aranges`.
+ [#539](https://github.com/gimli-rs/gimli/pull/539)
+
+* Replaced `read::DebugAranges::items` with `read::DebugAranges::headers`.
+ [#539](https://github.com/gimli-rs/gimli/pull/539)
+
+* Added `read::Operation::Wasm*`.
+ [#546](https://github.com/gimli-rs/gimli/pull/546)
+
+* `read::LineRow::line` now returns `Option<NonZeroU64>`.
+ The `read::ColumnType::Column` variant now contains a `NonZeroU64`.
+ [#551](https://github.com/gimli-rs/gimli/pull/551)
+
+* Replaced `read::Dwarf::debug_str_sup` with `read::Dwarf::sup`.
+ Deleted `sup` parameter of `read::Dwarf::load`.
+ Added `read::Dwarf::load_sup`.
+ [#554](https://github.com/gimli-rs/gimli/pull/554)
+
+### Added
+
+* dwarfdump: Supplementary object file support.
+ [#552](https://github.com/gimli-rs/gimli/pull/552)
+
+### Changed
+
+* Support `DW_FORM_addrx*` for `DW_AT_low_pc`/`DW_AT_high_pc` in `read::Dwarf`.
+ [#541](https://github.com/gimli-rs/gimli/pull/541)
+
+* Performance improvement in `EndianReader`.
+ [#549](https://github.com/gimli-rs/gimli/pull/549)
+
+--------------------------------------------------------------------------------
+
+## 0.23.0
+
+Released 2020/10/27.
+
+### Breaking changes
+
+* Added more variants to `read::UnitType`.
+ Added `read::AttributeValue::DwoId`
+ [#521](https://github.com/gimli-rs/gimli/pull/521)
+
+* Replaced `CompilationUnitHeader` and `TypeUnitHeader` with `UnitHeader`.
+ Replaced `CompilationUnitHeadersIter` with `DebugInfoUnitHeadersIter`.
+ Replaced `TypeUnitHeadersIter` with `DebugTypesUnitHeadersIter`.
+ [#523](https://github.com/gimli-rs/gimli/pull/523)
+
+
+### Added
+
+* Added read support for split DWARF.
+ [#527](https://github.com/gimli-rs/gimli/pull/527)
+ [#529](https://github.com/gimli-rs/gimli/pull/529)
+
+* Added `read::Dwarf::attr_address`.
+ [#524](https://github.com/gimli-rs/gimli/pull/524)
+
+* Added read support for `DW_AT_GNU_addr_base` and `DW_AT_GNU_ranges_base`.
+ [#525](https://github.com/gimli-rs/gimli/pull/525)
+
+* dwarfdump: Display index values for attributes.
+ [#526](https://github.com/gimli-rs/gimli/pull/526)
+
+* Added `name_to_register`.
+ [#532](https://github.com/gimli-rs/gimli/pull/532)
+
+--------------------------------------------------------------------------------
+
+## 0.22.0
+
+Released 2020/07/03.
+
+### Breaking changes
+
+* Fixed `UnitHeader::size_of_header` for DWARF 5 units.
+ [#518](https://github.com/gimli-rs/gimli/pull/518)
+
+### Added
+
+* Added fuzz targets in CI.
+ [#512](https://github.com/gimli-rs/gimli/pull/512)
+
+* Added read support for `DW_OP_GNU_addr_index` and `DW_OP_GNU_const_index`.
+ [#516](https://github.com/gimli-rs/gimli/pull/516)
+
+* Added `.dwo` support to dwarfdump.
+ [#516](https://github.com/gimli-rs/gimli/pull/516)
+
+* Added `SectionId::dwo_name` and `Section::dwo_section_name`.
+ [#517](https://github.com/gimli-rs/gimli/pull/517)
+
+### Fixed
+
+* Fixed panic when reading `DW_FORM_indirect` combined with `DW_FORM_implicit_const`.
+ [#502](https://github.com/gimli-rs/gimli/pull/502)
+
+* Fixed panic for `read::Abbreviations::get(0)`.
+ [#505](https://github.com/gimli-rs/gimli/pull/505)
+
+* Fixed arithmetic overflow when reading `.debug_line`.
+ [#508](https://github.com/gimli-rs/gimli/pull/508)
+
+* Fixed arithmetic overflow when reading CFI.
+ [#509](https://github.com/gimli-rs/gimli/pull/509)
+
+* Fixed arithmetic overflow and division by zero when reading `.debug_aranges`.
+ [#510](https://github.com/gimli-rs/gimli/pull/510)
+
+* Don't return error from `read::Unit::new` when `DW_AT_name` or `DW_AT_comp_dir` is missing.
+ [#515](https://github.com/gimli-rs/gimli/pull/515)
+
+--------------------------------------------------------------------------------
+
+## 0.21.0
+
+Released 2020/05/12.
+
+### Breaking changes
+
+* Minimum Rust version increased to 1.38.0.
+
+* Replaced `read::Operation::Literal` with `Operation::UnsignedConstant` and `Operation::SignedConstant`.
+ Changed `read::Operation::Bra` and `read::Operation::Skip` to contain the target offset instead of the bytecode.
+ [#479](https://github.com/gimli-rs/gimli/pull/479)
+
+* Changed `write::Expression` to support references. Existing users can convert to use `Expression::raw`.
+ [#479](https://github.com/gimli-rs/gimli/pull/479)
+
+* Replaced `write::AttributeValue::AnyUnitEntryRef` with `DebugInfoRef`.
+ Renamed `write::AttributeValue::ThisUnitEntryRef` to `UnitRef`.
+ [#479](https://github.com/gimli-rs/gimli/pull/479)
+
+* Added more optional features: `endian-reader` and `fallible-iterator`.
+ [#495](https://github.com/gimli-rs/gimli/pull/495)
+ [#498](https://github.com/gimli-rs/gimli/pull/498)
+
+### Added
+
+* Added `read::Expression::operations`
+ [#479](https://github.com/gimli-rs/gimli/pull/479)
+
+### Fixed
+
+* Fixed newlines in `dwarfdump` example.
+ [#470](https://github.com/gimli-rs/gimli/pull/470)
+
+* Ignore zero terminators when reading `.debug_frame` sections.
+ [#486](https://github.com/gimli-rs/gimli/pull/486)
+
+* Increase the number of CFI register rules supported by `read::UnwindContext`.
+ [#487](https://github.com/gimli-rs/gimli/pull/487)
+
+* Fixed version handling and return register encoding when reading `.eh_frame` sections.
+ [#493](https://github.com/gimli-rs/gimli/pull/493)
+
+### Changed
+
+* Added `EhFrame` and `DebugFrame` to `write::Sections`.
+ [#492](https://github.com/gimli-rs/gimli/pull/492)
+
+* Improved performance of `write::LineProgram::generate_row`.
+ [#476](https://github.com/gimli-rs/gimli/pull/476)
+
+* Removed use of the `byteorder`, `arrayvec` and `smallvec` crates.
+ [#494](https://github.com/gimli-rs/gimli/pull/494)
+ [#496](https://github.com/gimli-rs/gimli/pull/496)
+ [#497](https://github.com/gimli-rs/gimli/pull/497)
+
+--------------------------------------------------------------------------------
+
+## 0.20.0
+
+Released 2020/01/11.
+
+### Breaking changes
+
+* Changed type of `DwTag`, `DwAt`, and `DwForm` constants.
+ [#451](https://github.com/gimli-rs/gimli/pull/451)
+
+* Added `read/write::AttributeValue::DebugMacroRef`, and returned where
+ required in `read::Attribute::value`. Added `SectionId::DebugMacro`.
+ [#454](https://github.com/gimli-rs/gimli/pull/454)
+
+* Deleted `alloc` feature, and fixed `no-std` builds with stable rust.
+ [#459](https://github.com/gimli-rs/gimli/pull/459)
+
+* Deleted `read::Error::description`, and changed `<read::Error as Display>`
+ to display what was previously the description.
+ [#462](https://github.com/gimli-rs/gimli/pull/462)
+
+### Added
+
+* Added GNU view constants.
+ [#434](https://github.com/gimli-rs/gimli/pull/434)
+
+* Added `read::EntriesRaw` for low level DIE parsing.
+ [#455](https://github.com/gimli-rs/gimli/pull/455)
+
+* Added `examples/simple-line.rs`.
+ [#460](https://github.com/gimli-rs/gimli/pull/460)
+
+### Fixed
+
+* Fixed handling of CFI augmentations without data.
+ [#438](https://github.com/gimli-rs/gimli/pull/438)
+
+* dwarfdump: fix panic for malformed expressions.
+ [#447](https://github.com/gimli-rs/gimli/pull/447)
+
+* dwarfdump: fix handling of Mach-O relocations.
+ [#449](https://github.com/gimli-rs/gimli/pull/449)
+
+### Changed
+
+* Improved abbreviation parsing performance.
+ [#451](https://github.com/gimli-rs/gimli/pull/451)
+
+--------------------------------------------------------------------------------
+
+## 0.19.0
+
+Released 2019/07/08.
+
+### Breaking changes
+
+* Small API changes related to `.debug_loc` and `.debug_loclists`:
+ added `read::RawLocListEntry::AddressOrOffsetPair` enum variant,
+ added `write::Sections::debug_loc/debug_loclists` public members,
+ and replaced `write::AttributeValue::LocationListsRef` with `LocationListRef`.
+ [#425](https://github.com/gimli-rs/gimli/pull/425)
+
+### Added
+
+* Added `read::Attribute::exprloc_value` and `read::AttributeValue::exprloc_value`.
+ [#422](https://github.com/gimli-rs/gimli/pull/422)
+
+* Added support for writing `.debug_loc` and `.debug_loclists` sections.
+ [#425](https://github.com/gimli-rs/gimli/pull/425)
+
+* Added `-G` flag to `dwarfdump` example to display global offsets.
+ [#427](https://github.com/gimli-rs/gimli/pull/427)
+
+* Added `examples/simple.rs`.
+ [#429](https://github.com/gimli-rs/gimli/pull/429)
+
+### Fixed
+
+* `write::LineProgram::from` no longer requires `DW_AT_name` or `DW_AT_comp_dir`
+ attributes to be present in the unit DIE.
+ [#430](https://github.com/gimli-rs/gimli/pull/430)
+
+--------------------------------------------------------------------------------
+
+## 0.18.0
+
+Released 2019/04/25.
+
+The focus of this release has been on improving support for reading CFI,
+and adding support for writing CFI.
+
+### Breaking changes
+
+* For types which have an `Offset` type parameter, the default `Offset`
+ has changed from `usize` to `R::Offset`.
+ [#392](https://github.com/gimli-rs/gimli/pull/392)
+
+* Added an `Offset` type parameter to the `read::Unit` type to allow variance.
+ [#393](https://github.com/gimli-rs/gimli/pull/393)
+
+* Changed the `UninitializedUnwindContext::initialize` method to borrow `self`,
+ and return `&mut UnwindContext`. Deleted the `InitializedUnwindContext` type.
+ [#395](https://github.com/gimli-rs/gimli/pull/395)
+
+* Deleted the `UnwindSection` type parameters from the `CommonInformationEntry`,
+ `FrameDescriptionEntry`, `UninitializedUnwindContext`,
+ `UnwindContext`, and `UnwindTable` types.
+ [#399](https://github.com/gimli-rs/gimli/pull/399)
+
+* Changed the signature of the `get_cie` callback parameter for various functions.
+ The signature now matches the `UnwindSection::cie_from_offset` method, so
+ that method can be used as the parameter.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Reduced the number of lifetime parameters for the `UnwindTable` type.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Updated `fallible-iterator` to version 0.2.0.
+ [#407](https://github.com/gimli-rs/gimli/pull/407)
+
+* Added a parameter to the `Error::UnexpectedEof` enum variant.
+ [#408](https://github.com/gimli-rs/gimli/pull/408)
+
+### Added
+
+* Update to 2018 edition.
+ [#391](https://github.com/gimli-rs/gimli/pull/391)
+
+* Added the `FrameDescriptionEntry::unwind_info_for_address` method.
+ [#396](https://github.com/gimli-rs/gimli/pull/396)
+
+* Added the `FrameDescriptionEntry::rows` method.
+ [#396](https://github.com/gimli-rs/gimli/pull/396)
+
+* Added the `EhHdrTable::unwind_info_for_address` method.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Added the `EhHdrTable::fde_for_address` method and deprecated the
+ `EhHdrTable::lookup_and_parse` method.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Added the `EhHdrTable::pointer_to_offset` method.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Added the `UnwindSection::fde_for_address` method.
+ [#396](https://github.com/gimli-rs/gimli/pull/396)
+
+* Added the `UnwindSection::fde_from_offset` method.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Added the `UnwindSection::partial_fde_from_offset` method.
+ [#400](https://github.com/gimli-rs/gimli/pull/400)
+
+* Added the `Section::id` method.
+ [#406](https://github.com/gimli-rs/gimli/pull/406)
+
+* Added the `Dwarf::load` method, and corresponding methods for individual sections.
+ [#406](https://github.com/gimli-rs/gimli/pull/406)
+
+* Added the `Dwarf::borrow` method, and corresponding methods for individual sections.
+ [#406](https://github.com/gimli-rs/gimli/pull/406)
+
+* Added the `Dwarf::format_error` method.
+ [#408](https://github.com/gimli-rs/gimli/pull/408)
+
+* Added the `Dwarf::die_ranges` method.
+ [#417](https://github.com/gimli-rs/gimli/pull/417)
+
+* Added the `Dwarf::unit_ranges` method.
+ [#417](https://github.com/gimli-rs/gimli/pull/417)
+
+* Added support for writing `.debug_frame` and `.eh_frame` sections.
+ [#412](https://github.com/gimli-rs/gimli/pull/412)
+ [#419](https://github.com/gimli-rs/gimli/pull/419)
+
+### Fixed
+
+* The `code_alignment_factor` is now used when evaluting CFI instructions
+ that advance the location.
+ [#401](https://github.com/gimli-rs/gimli/pull/401)
+
+* Fixed parsing of pointers encoded with `DW_EH_PE_funcrel`.
+ [#402](https://github.com/gimli-rs/gimli/pull/402)
+
+* Use the FDE address encoding from the augmentation when parsing `DW_CFA_set_loc`.
+ [#403](https://github.com/gimli-rs/gimli/pull/403)
+
+* Fixed setting of `.eh_frame` base addresses in dwarfdump.
+ [#410](https://github.com/gimli-rs/gimli/pull/410)
+
+## 0.17.0
+
+Released 2019/02/21.
+
+The focus of this release has been on improving DWARF 5 support, and
+adding support for writing DWARF.
+
+### Breaking changes
+
+* Changed register values to a `Register` type instead of `u8`/`u64`.
+ [#328](https://github.com/gimli-rs/gimli/pull/328)
+
+* Replaced `BaseAddresses::set_cfi` with `set_eh_frame_hdr` and `set_eh_frame`.
+ Replaced `BaseAddresses::set_data` with `set_got`.
+ You should now use the same `BaseAddresses` value for parsing both
+ `.eh_frame` and `.eh_frame_hdr`.
+ [#351](https://github.com/gimli-rs/gimli/pull/351)
+
+* Renamed many types and functions related to `.debug_line`.
+ Renamed `LineNumberProgram` to `LineProgram`.
+ Renamed `IncompleteLineNumberProgram` to `IncompleteLineProgram`.
+ Renamed `CompleteLineNumberProgram` to `CompleteLineProgram`.
+ Renamed `LineNumberProgramHeader` to `LineProgramHeader`.
+ Renamed `LineNumberRow` to `LineRow`.
+ Renamed `StateMachine` to `LineRows`.
+ Renamed `Opcode` to `LineInstruction`.
+ Renamed `OpcodesIter` to `LineInstructions`.
+ Renamed `LineNumberSequence` to `LineSequence`.
+ [#359](https://github.com/gimli-rs/gimli/pull/359)
+
+* Added `Offset` type parameter to `AttributeValue`, `LineProgram`,
+ `IncompleteLineProgram`, `CompleteLineProgram`, `LineRows`, `LineInstruction`,
+ and `FileEntry`.
+ [#324](https://github.com/gimli-rs/gimli/pull/324)
+
+* Changed `FileEntry::path_name`, `FileEntry::directory`, and
+ `LineProgramHeader::directory` to return an `AttributeValue` instead
+ of a `Reader`.
+ [#366](https://github.com/gimli-rs/gimli/pull/366)
+
+* Renamed `FileEntry::last_modification` to `FileEntry::timestamp`
+ and renamed `FileEntry::length` to `FileEntry::size`.
+ [#366](https://github.com/gimli-rs/gimli/pull/366)
+
+* Added an `Encoding` type. Changed many functions that previously accepted
+ `Format`, version or address size parameters to accept an `Encoding`
+ parameter instead.
+ Notable changes are `LocationLists::locations`, `RangeLists::ranges`,
+ and `Expression::evaluation`.
+ [#364](https://github.com/gimli-rs/gimli/pull/364)
+
+* Changed return type of `LocationLists::new` and `RangeLists::new`.
+ [#370](https://github.com/gimli-rs/gimli/pull/370)
+
+* Added parameters to `LocationsLists::locations` and `RangeLists::ranges`
+ to support `.debug_addr`.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Added more `AttributeValue` variants: `DebugAddrBase`, `DebugAddrIndex`,
+ `DebugLocListsBase`, `DebugLocListsIndex`, `DebugRngListsBase`, `DebugRngListsIndex`,
+ `DebugStrOffsetsBase`, `DebugStrOffsetsIndex`, `DebugLineStrRef`.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Changed `AttributeValue::Data*` attributes to native endian integers instead
+ of byte arrays.
+ [#365](https://github.com/gimli-rs/gimli/pull/365)
+
+* Replaced `EvaluationResult::TextBase` with
+ `EvaluationResult::RequiresRelocatedAddress`. The handling of `TextBase`
+ was incorrect.
+ [#335](https://github.com/gimli-rs/gimli/pull/335)
+
+* Added `EvaluationResult::IndexedAddress` for operations that require an
+ address from `.debug_addr`.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Added `Reader::read_slice`. Added a default implementation of
+ `Reader::read_u8_array` which uses this.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+### Added
+
+* Added initial support for writing DWARF. This is targeted at supporting
+ line number information only.
+ [#340](https://github.com/gimli-rs/gimli/pull/340)
+ [#344](https://github.com/gimli-rs/gimli/pull/344)
+ [#346](https://github.com/gimli-rs/gimli/pull/346)
+ [#361](https://github.com/gimli-rs/gimli/pull/361)
+ [#362](https://github.com/gimli-rs/gimli/pull/362)
+ [#365](https://github.com/gimli-rs/gimli/pull/365)
+ [#368](https://github.com/gimli-rs/gimli/pull/368)
+ [#382](https://github.com/gimli-rs/gimli/pull/382)
+
+* Added `read` and `write` Cargo features. Both are enabled by default.
+ [#343](https://github.com/gimli-rs/gimli/pull/343)
+
+* Added support for reading DWARF 5 `.debug_line` and `.debug_line_str` sections.
+ [#366](https://github.com/gimli-rs/gimli/pull/366)
+
+* Added support for reading DWARF 5 `.debug_str_offsets` sections, including
+ parsing `DW_FORM_strx*` attributes.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Added support for reading DWARF 5 `.debug_addr` sections, including parsing
+ `DW_FORM_addrx*` attributes and evaluating `DW_OP_addrx` and `DW_OP_constx`
+ operations.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Added support for reading DWARF 5 indexed addresses and offsets in
+ `.debug_loclists` and `.debug_rnglists`, including parsing `DW_FORM_rnglistx`
+ and `DW_FORM_loclistx` attributes.
+ [#358](https://github.com/gimli-rs/gimli/pull/358)
+
+* Added high level `Dwarf` and `Unit` types. Existing code does not need to
+ switch to using these types, but doing so will make DWARF 5 support simpler.
+ [#352](https://github.com/gimli-rs/gimli/pull/352)
+ [#380](https://github.com/gimli-rs/gimli/pull/380)
+ [#381](https://github.com/gimli-rs/gimli/pull/381)
+
+* Added `EhFrame::set_address_size` and `DebugFrame::set_address_size` methods
+ to allow parsing non-native CFI sections. The default address size is still
+ the native size.
+ [#325](https://github.com/gimli-rs/gimli/pull/325)
+
+* Added architecture specific definitions for `Register` values and names.
+ Changed dwarfdump to print them.
+ [#328](https://github.com/gimli-rs/gimli/pull/328)
+
+* Added support for reading relocatable DWARF sections.
+ [#337](https://github.com/gimli-rs/gimli/pull/337)
+
+* Added parsing of `DW_FORM_data16`.
+ [#366](https://github.com/gimli-rs/gimli/pull/366)
+
+### Fixed
+
+* Fixed parsing DWARF 5 ranges with `start == end == 0`.
+ [#323](https://github.com/gimli-rs/gimli/pull/323)
+
+* Changed `LineRows` to be covariant in its `Reader` type parameter.
+ [#324](https://github.com/gimli-rs/gimli/pull/324)
+
+* Fixed handling of empty units in dwarfdump.
+ [#330](https://github.com/gimli-rs/gimli/pull/330)
+
+* Fixed `UnitHeader::length_including_self` for `Dwarf64`.
+ [#342](https://github.com/gimli-rs/gimli/pull/342)
+
+* Fixed parsing of `DW_CFA_set_loc`.
+ [#355](https://github.com/gimli-rs/gimli/pull/355)
+
+* Fixed handling of multiple headers in `.debug_loclists` and `.debug_rnglists`.
+ [#370](https://github.com/gimli-rs/gimli/pull/370)
+
+--------------------------------------------------------------------------------
+
+## 0.16.1
+
+Released 2018/08/28.
+
+### Added
+
+* Added `EhFrameHdr::lookup_and_parse`. [#316][]
+* Added support for `DW_CFA_GNU_args_size`. [#319][]
+
+### Fixed
+
+* Implement `Send`/`Sync` for `SubRange`. [#305][]
+* Fixed `alloc` support on nightly. [#306][] [#310][]
+
+[#305]: https://github.com/gimli-rs/gimli/pull/305
+[#306]: https://github.com/gimli-rs/gimli/pull/306
+[#310]: https://github.com/gimli-rs/gimli/pull/310
+[#316]: https://github.com/gimli-rs/gimli/pull/316
+[#319]: https://github.com/gimli-rs/gimli/pull/319
+
+--------------------------------------------------------------------------------
+
+## 0.16.0
+
+Released 2018/06/01.
+
+### Added
+
+* Added support for building in `#![no_std]` environments, when the `alloc`
+ crate is available. Disable the "std" feature and enable the "alloc"
+ feature. [#138][] [#271][]
+
+* Added support for DWARF 5 `.debug_rnglists` and `.debug_loclists`
+ sections. [#272][]
+
+* Added support for DWARF 5 `DW_FORM_ref_sup` and `DW_FORM_strp_sup` attribute
+ forms. [#288][]
+
+* Added support for DWARF 5 operations on typed values. [#293][]
+
+* A `dwarf-validate` example program that checks the integrity of the given
+ DWARF and its references between sections. [#290][]
+
+* Added the `EndianReader<T>` type, an easy way to define a custom `Reader`
+ implementation with a reference to a generic buffer of bytes and an associated
+ endianity. [#298][] [#302][]
+
+### Changed
+
+* Various speed improvements for evaluating `.debug_line` line number
+ programs. [#276][]
+
+* The example `dwarfdump` clone is a [whole lot faster
+ now][dwarfdump-faster]. [#282][] [#284][] [#285][]
+
+### Deprecated
+
+* `EndianBuf` has been renamed to `EndianSlice`, use that name instead. [#295][]
+
+### Fixed
+
+* Evaluating the `DW_CFA_restore_state` opcode properly maintains the current
+ location. Previously it would incorrectly restore the old location when
+ popping from evaluation stack. [#274][]
+
+[#271]: https://github.com/gimli-rs/gimli/issues/271
+[#138]: https://github.com/gimli-rs/gimli/issues/138
+[#274]: https://github.com/gimli-rs/gimli/issues/274
+[#272]: https://github.com/gimli-rs/gimli/issues/272
+[#276]: https://github.com/gimli-rs/gimli/issues/276
+[#282]: https://github.com/gimli-rs/gimli/issues/282
+[#285]: https://github.com/gimli-rs/gimli/issues/285
+[#284]: https://github.com/gimli-rs/gimli/issues/284
+[#288]: https://github.com/gimli-rs/gimli/issues/288
+[#290]: https://github.com/gimli-rs/gimli/issues/290
+[#293]: https://github.com/gimli-rs/gimli/issues/293
+[#295]: https://github.com/gimli-rs/gimli/issues/295
+[#298]: https://github.com/gimli-rs/gimli/issues/298
+[#302]: https://github.com/gimli-rs/gimli/issues/302
+[dwarfdump-faster]: https://robert.ocallahan.org/2018/03/speeding-up-dwarfdump-with-rust.html
+
+--------------------------------------------------------------------------------
+
+## 0.15.0
+
+Released 2017/12/01.
+
+### Added
+
+* Added the `EndianBuf::to_string()` method. [#233][]
+
+* Added more robust error handling in our example `dwarfdump` clone. [#234][]
+
+* Added `FrameDescriptionEntry::initial_address` method. [#237][]
+
+* Added `FrameDescriptionEntry::len` method. [#237][]
+
+* Added the `FrameDescriptionEntry::entry_len` method. [#241][]
+
+* Added the `CommonInformationEntry::offset` method. [#241][]
+
+* Added the `CommonInformationEntry::entry_len` method. [#241][]
+
+* Added the `CommonInformationEntry::version` method. [#241][]
+
+* Added the `CommonInformationEntry::augmentation` method. [#241][]
+
+* Added the `CommonInformationEntry::code_alignment_factor` method. [#241][]
+
+* Added the `CommonInformationEntry::data_alignment_factor` method. [#241][]
+
+* Added the `CommonInformationEntry::return_address_register` method. [#241][]
+
+* Added support for printing `.eh_frame` sections to our example `dwarfdump`
+ clone. [#241][]
+
+* Added support for parsing the `.eh_frame_hdr` section. On Linux, the
+ `.eh_frame_hdr` section provides a pointer to the already-mapped-in-memory
+ `.eh_frame` data, so that it doesn't need to be duplicated, and a binary
+ search table of its entries for faster unwinding information lookups. [#250][]
+
+* Added support for parsing DWARF 5 compilation unit headers. [#257][]
+
+* Added support for DWARF 5's `DW_FORM_implicit_const`. [#257][]
+
+### Changed
+
+* Unwinding methods now give ownership of the unwinding context back to the
+ caller if errors are encountered, not just on the success path. This allows
+ recovering from errors in signal-safe code, where constructing a new unwinding
+ context is not an option because it requires allocation. This is a **breaking
+ change** affecting `UnwindSection::unwind_info_for_address` and
+ `UninitializedUnwindContext::initialize`. [#241][]
+
+* `CfaRule` and `RegisterRule` now expose their `DW_OP` expressions as
+ `Expression`. This is a minor **breaking change**. [#241][]
+
+* The `Error::UnknownVersion` variant now contains the unknown version
+ number. This is a minor **breaking change**. [#245][]
+
+* `EvaluationResult::RequiresEntryValue` requires an `Expression` instead of a
+ `Reader` now. This is a minor **breaking change**. [#256][]
+
+
+[#233]: https://github.com/gimli-rs/gimli/pull/233
+[#234]: https://github.com/gimli-rs/gimli/pull/234
+[#237]: https://github.com/gimli-rs/gimli/pull/237
+[#241]: https://github.com/gimli-rs/gimli/pull/241
+[#245]: https://github.com/gimli-rs/gimli/pull/245
+[#250]: https://github.com/gimli-rs/gimli/pull/250
+[#256]: https://github.com/gimli-rs/gimli/pull/256
+[#257]: https://github.com/gimli-rs/gimli/pull/257
+
+--------------------------------------------------------------------------------
+
+## 0.14.0
+
+Released 2017/08/08.
+
+### Added
+
+* All `pub` types now `derive(Hash)`. [#192][]
+
+* All the constants from DWARF 5 are now defined. [#193][]
+
+* Added support for the `DW_OP_GNU_parameter_ref` GNU extension to parsing and
+ evaluation DWARF opcodes. [#208][]
+
+* Improved LEB128 parsing performance. [#216][]
+
+* Improved `.debug_{aranges,pubnames,pubtypes}` parsing performance. [#218][]
+
+* Added the ability to choose endianity dynamically at run time, rather than
+ only statically at compile time. [#219][]
+
+### Changed
+
+* The biggest change of this release is that `gimli` no longer requires the
+ object file's section be fully loaded into memory. This enables using `gimli`
+ on 32 bit platforms where there often isn't enough contiguous virtual memory
+ address space to load debugging information into. The default behavior is
+ still geared for 64 bit platforms, where address space overfloweth, and you
+ can still load the whole sections of the object file (or the entire object
+ file) into memory. This is abstracted over with the `gimli::Reader`
+ trait. This manifests as small (but many) breaking changes to much of the
+ public API. [#182][]
+
+### Fixed
+
+* The `DW_END_*` constants for defining endianity of a compilation unit were
+ previously incorrect. [#193][]
+
+* The `DW_OP_addr` opcode is relative to the base address of the `.text` section
+ of the binary, but we were incorrectly treating it as an absolute value. [#210][]
+
+[GitHub]: https://github.com/gimli-rs/gimli
+[crates.io]: https://crates.io/crates/gimli
+[contributing]: https://github.com/gimli-rs/gimli/blob/master/CONTRIBUTING.md
+[easy]: https://github.com/gimli-rs/gimli/issues?q=is%3Aopen+is%3Aissue+label%3Aeasy
+[#192]: https://github.com/gimli-rs/gimli/pull/192
+[#193]: https://github.com/gimli-rs/gimli/pull/193
+[#182]: https://github.com/gimli-rs/gimli/issues/182
+[#208]: https://github.com/gimli-rs/gimli/pull/208
+[#210]: https://github.com/gimli-rs/gimli/pull/210
+[#216]: https://github.com/gimli-rs/gimli/pull/216
+[#218]: https://github.com/gimli-rs/gimli/pull/218
+[#219]: https://github.com/gimli-rs/gimli/pull/219
diff --git a/vendor/gimli-0.26.2/CONTRIBUTING.md b/vendor/gimli-0.26.2/CONTRIBUTING.md
new file mode 100644
index 000000000..4f9e574ce
--- /dev/null
+++ b/vendor/gimli-0.26.2/CONTRIBUTING.md
@@ -0,0 +1,137 @@
+# Contributing to `gimli`
+
+Hi! We'd love to have your contributions! If you want help or mentorship, reach
+out to us in a GitHub issue, or ping `fitzgen` in `#rust` on `irc.mozilla.org`.
+
+* [Code of Conduct](#coc)
+* [Filing an Issue](#issues)
+* [Building `gimli`](#building)
+* [Testing `gimli`](#testing)
+ * [Test Coverage](#coverage)
+ * [Using `test-assembler`](#test-assembler)
+ * [Fuzzing](#fuzzing)
+* [Benchmarking](#benchmarking)
+* [Style](#style)
+
+## <a id="coc"></a> Code of Conduct
+
+We abide by the
+[Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html) and ask
+that you do as well.
+
+## <a id="issues"></a> Filing an Issue
+
+Think you've found a bug? File an issue! To help us understand and reproduce the
+issue, provide us with:
+
+* The (preferably minimal) test case
+* Steps to reproduce the issue using the test case
+* The expected result of following those steps
+* The actual result of following those steps
+
+Definitely file an issue if you see an unexpected panic originating from within
+`gimli`! `gimli` should never panic unless it is explicitly documented to panic
+in the specific circumstances provided.
+
+## <a id="building"></a> Building `gimli`
+
+`gimli` should always build on stable `rustc`, but we recommend using
+[`rustup`](https://www.rustup.rs/) so you can switch to nightly `rustc` and run
+benchmarks.
+
+To build `gimli`:
+
+```
+$ cargo build
+```
+
+## <a id="testing"></a> Testing `gimli`
+
+Run the tests with `cargo`:
+
+```
+$ cargo test
+```
+
+### <a id="coverage"></a> Test Coverage
+
+If you have `kcov` installed under linux, then you can generate code coverage
+results using the `coverage` script in the root of the repository, and view them
+at `target/kcov/index.html`. Otherwise you can create a pull request and view
+the coverage results on coveralls.io.
+
+```
+$ ./coverage
+```
+
+The ideal we aim to reach is having our unit tests exercise every branch in
+`gimli`. We allow an exception for branches which propagate errors inside a
+`try!(..)` invocation, but we *do* want to exercise the original error paths.
+
+Pull requests adding new code should ensure that this ideal is met.
+
+At the time of writing we have 94% test coverage according to our coveralls.io
+continuous integration. That number should generally stay the same or go up ;)
+This is a bit subjective, because -.001% is just noise and doesn't matter.
+
+### <a id="test-assembler"></a> Using `test-assembler`
+
+We use the awesome
+[`test-assembler`](https://github.com/luser/rust-test-assembler) crate to
+construct binary test data. It makes building complex test cases readable.
+
+[Here is an example usage in `gimli`](https://github.com/gimli-rs/gimli/blob/156451f3fe6eeb2fa62b84b362c33fcb176e1171/src/loc.rs#L263)
+
+### <a id="fuzzing"></a> Fuzzing
+
+First, install `cargo fuzz`:
+
+```
+$ cargo install cargo-fuzz
+```
+
+Optionally, [set up the corpora for our fuzz targets by following these
+instructions](https://github.com/gimli-rs/gimli-libfuzzer-corpora/blob/master/README.md#using-these-corpora).
+
+Finally, run a fuzz target! In this case, we are running the `eh_frame` fuzz
+target:
+
+```
+$ cargo fuzz run eh_frame
+```
+
+The fuzz target definitions live in `fuzz/fuzz_targets/*`. You can add new ones
+via `cargo fuzz add <my_new_target>`.
+
+## <a id="benchmarking"></a> Benchmarking
+
+The benchmarks require nightly `rustc`, so use `rustup`:
+
+```
+$ rustup run nightly cargo bench
+```
+
+We aim to be the fastest DWARF library. Period.
+
+Please provide before and after benchmark results with your pull requests. You
+may also find [`cargo benchcmp`](https://github.com/BurntSushi/cargo-benchcmp)
+handy for comparing results.
+
+Pull requests adding `#[bench]` micro-benchmarks that exercise a new edge case
+are very welcome!
+
+## <a id="style"></a> Style
+
+We use `rustfmt` to automatically format and style all of our code.
+
+To install `rustfmt`:
+
+```
+$ rustup component add rustfmt-preview
+```
+
+To run `rustfmt` on `gimli`:
+
+```
+$ cargo fmt
+```
diff --git a/vendor/gimli-0.26.2/Cargo.lock b/vendor/gimli-0.26.2/Cargo.lock
new file mode 100644
index 000000000..b4a719a0c
--- /dev/null
+++ b/vendor/gimli-0.26.2/Cargo.lock
@@ -0,0 +1,358 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "compiler_builtins"
+version = "0.1.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3587b3669d6f2c1cfd34c475272dabcfef29d52703933f6f72ebb36d6bd81a97"
+
+[[package]]
+name = "crc32fast"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
+dependencies = [
+ "cfg-if",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-queue",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+ "lazy_static",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
+dependencies = [
+ "cfg-if",
+ "lazy_static",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "fallible-iterator"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
+
+[[package]]
+name = "flate2"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+dependencies = [
+ "cfg-if",
+ "crc32fast",
+ "libc",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "getopts"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "gimli"
+version = "0.26.2"
+dependencies = [
+ "compiler_builtins",
+ "crossbeam",
+ "fallible-iterator",
+ "getopts",
+ "indexmap",
+ "memmap2",
+ "num_cpus",
+ "object",
+ "rayon",
+ "regex",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+ "stable_deref_trait",
+ "test-assembler",
+ "typed-arena",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "memmap2"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a79b39c93a7a5a27eeaf9a23b5ff43f1b9e0ad6b1cdd441140ae53c35613fc7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
+dependencies = [
+ "adler",
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "flate2",
+ "memchr",
+ "wasmparser",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "lazy_static",
+ "num_cpus",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "rustc-std-workspace-alloc"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff66d57013a5686e1917ed6a025d54dd591fcda71a41fe07edf4d16726aefa86"
+
+[[package]]
+name = "rustc-std-workspace-core"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
+[[package]]
+name = "test-assembler"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a6da51de149453f5c43fa67d5e73cccd75b3c5727a38a2f18c5f3c47f2db582"
+dependencies = [
+ "byteorder",
+]
+
+[[package]]
+name = "typed-arena"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "wasmparser"
+version = "0.57.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32fddd575d477c6e9702484139cf9f23dcd554b06d185ed0f56c857dd3a47aa6"
diff --git a/vendor/gimli-0.26.2/Cargo.toml b/vendor/gimli-0.26.2/Cargo.toml
new file mode 100644
index 000000000..f36ccd936
--- /dev/null
+++ b/vendor/gimli-0.26.2/Cargo.toml
@@ -0,0 +1,146 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "gimli"
+version = "0.26.2"
+exclude = [
+ "/releases/*",
+ "/.github",
+]
+description = "A library for reading and writing the DWARF debugging format."
+documentation = "https://docs.rs/gimli"
+readme = "./README.md"
+keywords = [
+ "DWARF",
+ "debug",
+ "ELF",
+ "eh_frame",
+]
+categories = [
+ "development-tools::debugging",
+ "development-tools::profiling",
+ "parser-implementations",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/gimli-rs/gimli"
+
+[profile.bench]
+codegen-units = 1
+debug = true
+split-debuginfo = "packed"
+
+[profile.test]
+split-debuginfo = "packed"
+
+[[example]]
+name = "simple"
+required-features = ["read"]
+
+[[example]]
+name = "simple_line"
+required-features = ["read"]
+
+[[example]]
+name = "dwarfdump"
+required-features = [
+ "read",
+ "std",
+]
+
+[[example]]
+name = "dwarf-validate"
+required-features = [
+ "read",
+ "std",
+]
+
+[dependencies.alloc]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-alloc"
+
+[dependencies.compiler_builtins]
+version = "0.1.2"
+optional = true
+
+[dependencies.core]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-core"
+
+[dependencies.fallible-iterator]
+version = "0.2.0"
+optional = true
+default-features = false
+
+[dependencies.indexmap]
+version = "1.0.2"
+optional = true
+
+[dependencies.stable_deref_trait]
+version = "1.1.0"
+optional = true
+default-features = false
+
+[dev-dependencies.crossbeam]
+version = "0.8"
+
+[dev-dependencies.getopts]
+version = "0.2"
+
+[dev-dependencies.memmap2]
+version = "0.5.5"
+
+[dev-dependencies.num_cpus]
+version = "1"
+
+[dev-dependencies.object]
+version = "0.29.0"
+features = ["wasm"]
+
+[dev-dependencies.rayon]
+version = "1.0"
+
+[dev-dependencies.regex]
+version = "1"
+
+[dev-dependencies.test-assembler]
+version = "0.1.3"
+
+[dev-dependencies.typed-arena]
+version = "2"
+
+[features]
+default = [
+ "read",
+ "write",
+ "std",
+ "fallible-iterator",
+ "endian-reader",
+]
+endian-reader = [
+ "read",
+ "stable_deref_trait",
+]
+read = ["read-core"]
+read-core = []
+rustc-dep-of-std = [
+ "core",
+ "alloc",
+ "compiler_builtins",
+]
+std = [
+ "fallible-iterator/std",
+ "stable_deref_trait/std",
+]
+write = ["indexmap"]
diff --git a/vendor/gimli-0.26.2/LICENSE-APACHE b/vendor/gimli-0.26.2/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/vendor/gimli-0.26.2/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/gimli-0.26.2/LICENSE-MIT b/vendor/gimli-0.26.2/LICENSE-MIT
new file mode 100644
index 000000000..e69282e38
--- /dev/null
+++ b/vendor/gimli-0.26.2/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2015 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/gimli-0.26.2/README.md b/vendor/gimli-0.26.2/README.md
new file mode 100644
index 000000000..19e7bbd0e
--- /dev/null
+++ b/vendor/gimli-0.26.2/README.md
@@ -0,0 +1,78 @@
+# `gimli`
+
+[![](https://img.shields.io/crates/v/gimli.svg) ![](https://img.shields.io/crates/d/gimli.svg)](https://crates.io/crates/gimli)
+[![](https://docs.rs/gimli/badge.svg)](https://docs.rs/gimli/)
+[![Build Status](https://github.com/gimli-rs/gimli/workflows/Rust/badge.svg)](https://github.com/gimli-rs/gimli/actions)
+[![Coverage Status](https://coveralls.io/repos/github/gimli-rs/gimli/badge.svg?branch=master)](https://coveralls.io/github/gimli-rs/gimli?branch=master)
+
+`gimli` is a blazing fast library for consuming the
+[DWARF debugging format](https://dwarfstd.org/).
+
+* **Zero copy:** everything is just a reference to the original input buffer. No
+ copies of the input data get made.
+
+* **Lazy:** you can iterate compilation units without parsing their
+ contents. Parse only as many debugging information entry (DIE) trees as you
+ iterate over. `gimli` also uses `DW_AT_sibling` references to avoid parsing a
+ DIE's children to find its next sibling, when possible.
+
+* **Cross-platform:** `gimli` makes no assumptions about what kind of object
+ file you're working with. The flipside to that is that it's up to you to
+ provide an ELF loader on Linux or Mach-O loader on macOS.
+
+ * Unsure which object file parser to use? Try the cross-platform
+ [`object`](https://github.com/gimli-rs/object) crate. See the
+ [`examples/`](./examples) directory for usage with `gimli`.
+
+## Install
+
+Add this to your `Cargo.toml`:
+
+```toml
+[dependencies]
+gimli = "0.26.2"
+```
+
+The minimum supported Rust version is 1.42.0.
+
+## Documentation
+
+* [Documentation on docs.rs](https://docs.rs/gimli/)
+
+* Example programs:
+
+ * [A simple `.debug_info` parser](./examples/simple.rs)
+
+ * [A simple `.debug_line` parser](./examples/simple_line.rs)
+
+ * [A `dwarfdump` clone](./examples/dwarfdump.rs)
+
+ * [An `addr2line` clone](https://github.com/gimli-rs/addr2line)
+
+ * [`ddbug`](https://github.com/gimli-rs/ddbug), a utility giving insight into
+ code generation by making debugging information readable.
+
+ * [`dwprod`](https://github.com/fitzgen/dwprod), a tiny utility to list the
+ compilers used to create each compilation unit within a shared library or
+ executable (via `DW_AT_producer`).
+
+ * [`dwarf-validate`](./examples/dwarf-validate.rs), a program to validate the
+ integrity of some DWARF and its references between sections and compilation
+ units.
+
+## License
+
+Licensed under either of
+
+ * Apache License, Version 2.0 ([`LICENSE-APACHE`](./LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license ([`LICENSE-MIT`](./LICENSE-MIT) or https://opensource.org/licenses/MIT)
+
+at your option.
+
+## Contribution
+
+See [CONTRIBUTING.md](./CONTRIBUTING.md) for hacking.
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
+dual licensed as above, without any additional terms or conditions.
diff --git a/vendor/gimli-0.26.2/benches/bench.rs b/vendor/gimli-0.26.2/benches/bench.rs
new file mode 100644
index 000000000..fb29df77c
--- /dev/null
+++ b/vendor/gimli-0.26.2/benches/bench.rs
@@ -0,0 +1,807 @@
+#![feature(test)]
+
+extern crate test;
+
+use gimli::{
+ AttributeValue, DebugAbbrev, DebugAddr, DebugAddrBase, DebugAranges, DebugInfo, DebugLine,
+ DebugLineOffset, DebugLoc, DebugLocLists, DebugPubNames, DebugPubTypes, DebugRanges,
+ DebugRngLists, Encoding, EndianSlice, EntriesTreeNode, Expression, LittleEndian, LocationLists,
+ Operation, RangeLists, RangeListsOffset, Reader, ReaderOffset,
+};
+use std::env;
+use std::fs::File;
+use std::io::Read;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+pub fn read_section(section: &str) -> Vec<u8> {
+ let mut path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into()));
+ path.push("./fixtures/self/");
+ path.push(section);
+
+ assert!(path.is_file());
+ let mut file = File::open(path).unwrap();
+
+ let mut buf = Vec::new();
+ file.read_to_end(&mut buf).unwrap();
+ buf
+}
+
+#[bench]
+fn bench_parsing_debug_abbrev(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("Should have at least one compilation unit")
+ .expect("And it should parse OK");
+
+ let debug_abbrev = read_section("debug_abbrev");
+
+ b.iter(|| {
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+ test::black_box(
+ unit.abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations"),
+ );
+ });
+}
+
+#[inline]
+fn impl_bench_parsing_debug_info<R: Reader>(
+ debug_info: DebugInfo<R>,
+ debug_abbrev: DebugAbbrev<R>,
+) {
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ while let Some((_, entry)) = cursor.next_dfs().expect("Should parse next dfs") {
+ let mut attrs = entry.attrs();
+ loop {
+ match attrs.next() {
+ Ok(Some(ref attr)) => {
+ test::black_box(attr);
+ }
+ Ok(None) => break,
+ e @ Err(_) => {
+ e.expect("Should parse entry's attribute");
+ }
+ }
+ }
+ }
+ }
+}
+
+#[bench]
+fn bench_parsing_debug_info(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ b.iter(|| impl_bench_parsing_debug_info(debug_info, debug_abbrev));
+}
+
+#[bench]
+fn bench_parsing_debug_info_with_endian_rc_slice(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = Rc::from(&debug_info[..]);
+ let debug_info = gimli::EndianRcSlice::new(debug_info, LittleEndian);
+ let debug_info = DebugInfo::from(debug_info);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = Rc::from(&debug_abbrev[..]);
+ let debug_abbrev = gimli::EndianRcSlice::new(debug_abbrev, LittleEndian);
+ let debug_abbrev = DebugAbbrev::from(debug_abbrev);
+
+ b.iter(|| impl_bench_parsing_debug_info(debug_info.clone(), debug_abbrev.clone()));
+}
+
+#[bench]
+fn bench_parsing_debug_info_tree(b: &mut test::Bencher) {
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_info = read_section("debug_info");
+
+ b.iter(|| {
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut tree = unit
+ .entries_tree(&abbrevs, None)
+ .expect("Should have entries tree");
+ let root = tree.root().expect("Should parse root entry");
+ parse_debug_info_tree(root);
+ }
+ });
+}
+
+fn parse_debug_info_tree<R: Reader>(node: EntriesTreeNode<R>) {
+ {
+ let mut attrs = node.entry().attrs();
+ loop {
+ match attrs.next() {
+ Ok(Some(ref attr)) => {
+ test::black_box(attr);
+ }
+ Ok(None) => break,
+ e @ Err(_) => {
+ e.expect("Should parse entry's attribute");
+ }
+ }
+ }
+ }
+ let mut children = node.children();
+ while let Some(child) = children.next().expect("Should parse child entry") {
+ parse_debug_info_tree(child);
+ }
+}
+
+#[bench]
+fn bench_parsing_debug_info_raw(b: &mut test::Bencher) {
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_info = read_section("debug_info");
+
+ b.iter(|| {
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut raw = unit
+ .entries_raw(&abbrevs, None)
+ .expect("Should have entries");
+ while !raw.is_empty() {
+ if let Some(abbrev) = raw
+ .read_abbreviation()
+ .expect("Should parse abbreviation code")
+ {
+ for spec in abbrev.attributes().iter().cloned() {
+ match raw.read_attribute(spec) {
+ Ok(ref attr) => {
+ test::black_box(attr);
+ }
+ e @ Err(_) => {
+ e.expect("Should parse attribute");
+ }
+ }
+ }
+ }
+ }
+ }
+ });
+}
+
+#[bench]
+fn bench_parsing_debug_aranges(b: &mut test::Bencher) {
+ let debug_aranges = read_section("debug_aranges");
+ let debug_aranges = DebugAranges::new(&debug_aranges, LittleEndian);
+
+ b.iter(|| {
+ let mut headers = debug_aranges.headers();
+ while let Some(header) = headers.next().expect("Should parse arange header OK") {
+ let mut entries = header.entries();
+ while let Some(arange) = entries.next().expect("Should parse arange entry OK") {
+ test::black_box(arange);
+ }
+ }
+ });
+}
+
+#[bench]
+fn bench_parsing_debug_pubnames(b: &mut test::Bencher) {
+ let debug_pubnames = read_section("debug_pubnames");
+ let debug_pubnames = DebugPubNames::new(&debug_pubnames, LittleEndian);
+
+ b.iter(|| {
+ let mut pubnames = debug_pubnames.items();
+ while let Some(pubname) = pubnames.next().expect("Should parse pubname OK") {
+ test::black_box(pubname);
+ }
+ });
+}
+
+#[bench]
+fn bench_parsing_debug_pubtypes(b: &mut test::Bencher) {
+ let debug_pubtypes = read_section("debug_pubtypes");
+ let debug_pubtypes = DebugPubTypes::new(&debug_pubtypes, LittleEndian);
+
+ b.iter(|| {
+ let mut pubtypes = debug_pubtypes.items();
+ while let Some(pubtype) = pubtypes.next().expect("Should parse pubtype OK") {
+ test::black_box(pubtype);
+ }
+ });
+}
+
+// We happen to know that there is a line number program and header at
+// offset 0 and that address size is 8 bytes. No need to parse DIEs to grab
+// this info off of the compilation units.
+const OFFSET: DebugLineOffset = DebugLineOffset(0);
+const ADDRESS_SIZE: u8 = 8;
+
+#[bench]
+fn bench_parsing_line_number_program_opcodes(b: &mut test::Bencher) {
+ let debug_line = read_section("debug_line");
+ let debug_line = DebugLine::new(&debug_line, LittleEndian);
+
+ b.iter(|| {
+ let program = debug_line
+ .program(OFFSET, ADDRESS_SIZE, None, None)
+ .expect("Should parse line number program header");
+ let header = program.header();
+
+ let mut instructions = header.instructions();
+ while let Some(instruction) = instructions
+ .next_instruction(header)
+ .expect("Should parse instruction")
+ {
+ test::black_box(instruction);
+ }
+ });
+}
+
+#[bench]
+fn bench_executing_line_number_programs(b: &mut test::Bencher) {
+ let debug_line = read_section("debug_line");
+ let debug_line = DebugLine::new(&debug_line, LittleEndian);
+
+ b.iter(|| {
+ let program = debug_line
+ .program(OFFSET, ADDRESS_SIZE, None, None)
+ .expect("Should parse line number program header");
+
+ let mut rows = program.rows();
+ while let Some(row) = rows
+ .next_row()
+ .expect("Should parse and execute all rows in the line number program")
+ {
+ test::black_box(row);
+ }
+ });
+}
+
+#[bench]
+fn bench_parsing_debug_loc(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let debug_loc = read_section("debug_loc");
+ let debug_loc = DebugLoc::new(&debug_loc, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+
+ let mut offsets = Vec::new();
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let mut low_pc = 0;
+
+ {
+ let unit_entry = cursor.current().expect("Should have a root entry");
+ let low_pc_attr = unit_entry
+ .attr_value(gimli::DW_AT_low_pc)
+ .expect("Should parse low_pc");
+ if let Some(gimli::AttributeValue::Addr(address)) = low_pc_attr {
+ low_pc = address;
+ }
+ }
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let gimli::AttributeValue::LocationListsRef(offset) = attr.value() {
+ offsets.push((offset, unit.encoding(), low_pc));
+ }
+ }
+ }
+ }
+
+ b.iter(|| {
+ for &(offset, encoding, base_address) in &*offsets {
+ let mut locs = loclists
+ .locations(offset, encoding, base_address, &debug_addr, debug_addr_base)
+ .expect("Should parse locations OK");
+ while let Some(loc) = locs.next().expect("Should parse next location") {
+ test::black_box(loc);
+ }
+ }
+ });
+}
+
+#[bench]
+fn bench_parsing_debug_ranges(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let debug_ranges = read_section("debug_ranges");
+ let debug_ranges = DebugRanges::new(&debug_ranges, LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&[], LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+
+ let mut offsets = Vec::new();
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let mut low_pc = 0;
+
+ {
+ let unit_entry = cursor.current().expect("Should have a root entry");
+ let low_pc_attr = unit_entry
+ .attr_value(gimli::DW_AT_low_pc)
+ .expect("Should parse low_pc");
+ if let Some(gimli::AttributeValue::Addr(address)) = low_pc_attr {
+ low_pc = address;
+ }
+ }
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let gimli::AttributeValue::RangeListsRef(offset) = attr.value() {
+ offsets.push((RangeListsOffset(offset.0), unit.encoding(), low_pc));
+ }
+ }
+ }
+ }
+
+ b.iter(|| {
+ for &(offset, encoding, base_address) in &*offsets {
+ let mut ranges = rnglists
+ .ranges(offset, encoding, base_address, &debug_addr, debug_addr_base)
+ .expect("Should parse ranges OK");
+ while let Some(range) = ranges.next().expect("Should parse next range") {
+ test::black_box(range);
+ }
+ }
+ });
+}
+
+fn debug_info_expressions<R: Reader>(
+ debug_info: &DebugInfo<R>,
+ debug_abbrev: &DebugAbbrev<R>,
+) -> Vec<(Expression<R>, Encoding)> {
+ let mut expressions = Vec::new();
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ while let Some((_, entry)) = cursor.next_dfs().expect("Should parse next dfs") {
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let AttributeValue::Exprloc(expression) = attr.value() {
+ expressions.push((expression, unit.encoding()));
+ }
+ }
+ }
+ }
+
+ expressions
+}
+
+#[bench]
+fn bench_parsing_debug_info_expressions(b: &mut test::Bencher) {
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let expressions = debug_info_expressions(&debug_info, &debug_abbrev);
+
+ b.iter(|| {
+ for &(expression, encoding) in &*expressions {
+ let mut pc = expression.0;
+ while !pc.is_empty() {
+ Operation::parse(&mut pc, encoding).expect("Should parse operation");
+ }
+ }
+ });
+}
+
+#[bench]
+fn bench_evaluating_debug_info_expressions(b: &mut test::Bencher) {
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let expressions = debug_info_expressions(&debug_info, &debug_abbrev);
+
+ b.iter(|| {
+ for &(expression, encoding) in &*expressions {
+ let mut eval = expression.evaluation(encoding);
+ eval.set_initial_value(0);
+ let result = eval.evaluate().expect("Should evaluate expression");
+ test::black_box(result);
+ }
+ });
+}
+
+fn debug_loc_expressions<R: Reader>(
+ debug_info: &DebugInfo<R>,
+ debug_abbrev: &DebugAbbrev<R>,
+ debug_addr: &DebugAddr<R>,
+ loclists: &LocationLists<R>,
+) -> Vec<(Expression<R>, Encoding)> {
+ let debug_addr_base = DebugAddrBase(R::Offset::from_u8(0));
+
+ let mut expressions = Vec::new();
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let mut low_pc = 0;
+
+ {
+ let unit_entry = cursor.current().expect("Should have a root entry");
+ let low_pc_attr = unit_entry
+ .attr_value(gimli::DW_AT_low_pc)
+ .expect("Should parse low_pc");
+ if let Some(gimli::AttributeValue::Addr(address)) = low_pc_attr {
+ low_pc = address;
+ }
+ }
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let gimli::AttributeValue::LocationListsRef(offset) = attr.value() {
+ let mut locs = loclists
+ .locations(offset, unit.encoding(), low_pc, debug_addr, debug_addr_base)
+ .expect("Should parse locations OK");
+ while let Some(loc) = locs.next().expect("Should parse next location") {
+ expressions.push((loc.data, unit.encoding()));
+ }
+ }
+ }
+ }
+ }
+
+ expressions
+}
+
+#[bench]
+fn bench_parsing_debug_loc_expressions(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+
+ let debug_loc = read_section("debug_loc");
+ let debug_loc = DebugLoc::new(&debug_loc, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+
+ let expressions = debug_loc_expressions(&debug_info, &debug_abbrev, &debug_addr, &loclists);
+
+ b.iter(|| {
+ for &(expression, encoding) in &*expressions {
+ let mut pc = expression.0;
+ while !pc.is_empty() {
+ Operation::parse(&mut pc, encoding).expect("Should parse operation");
+ }
+ }
+ });
+}
+
+#[bench]
+fn bench_evaluating_debug_loc_expressions(b: &mut test::Bencher) {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+
+ let debug_loc = read_section("debug_loc");
+ let debug_loc = DebugLoc::new(&debug_loc, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+
+ let expressions = debug_loc_expressions(&debug_info, &debug_abbrev, &debug_addr, &loclists);
+
+ b.iter(|| {
+ for &(expression, encoding) in &*expressions {
+ let mut eval = expression.evaluation(encoding);
+ eval.set_initial_value(0);
+ let result = eval.evaluate().expect("Should evaluate expression");
+ test::black_box(result);
+ }
+ });
+}
+
+// See comment above `test_parse_self_eh_frame`.
+#[cfg(target_pointer_width = "64")]
+mod cfi {
+ use super::*;
+ use fallible_iterator::FallibleIterator;
+
+ use gimli::{
+ BaseAddresses, CieOrFde, EhFrame, FrameDescriptionEntry, LittleEndian, UnwindContext,
+ UnwindSection,
+ };
+
+ #[bench]
+ fn iterate_entries_and_do_not_parse_any_fde(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+
+ b.iter(|| {
+ let mut entries = eh_frame.entries(&bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ test::black_box(entry);
+ }
+ });
+ }
+
+ #[bench]
+ fn iterate_entries_and_parse_every_fde(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+
+ b.iter(|| {
+ let mut entries = eh_frame.entries(&bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ match entry {
+ CieOrFde::Cie(cie) => {
+ test::black_box(cie);
+ }
+ CieOrFde::Fde(partial) => {
+ let fde = partial
+ .parse(EhFrame::cie_from_offset)
+ .expect("Should be able to get CIE for FED");
+ test::black_box(fde);
+ }
+ };
+ }
+ });
+ }
+
+ #[bench]
+ fn iterate_entries_and_parse_every_fde_and_instructions(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+
+ b.iter(|| {
+ let mut entries = eh_frame.entries(&bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ match entry {
+ CieOrFde::Cie(cie) => {
+ let mut instrs = cie.instructions(&eh_frame, &bases);
+ while let Some(i) =
+ instrs.next().expect("Can parse next CFI instruction OK")
+ {
+ test::black_box(i);
+ }
+ }
+ CieOrFde::Fde(partial) => {
+ let fde = partial
+ .parse(EhFrame::cie_from_offset)
+ .expect("Should be able to get CIE for FED");
+ let mut instrs = fde.instructions(&eh_frame, &bases);
+ while let Some(i) =
+ instrs.next().expect("Can parse next CFI instruction OK")
+ {
+ test::black_box(i);
+ }
+ }
+ };
+ }
+ });
+ }
+
+ #[bench]
+ fn iterate_entries_evaluate_every_fde(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+
+ let mut ctx = Box::new(UnwindContext::new());
+
+ b.iter(|| {
+ let mut entries = eh_frame.entries(&bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ match entry {
+ CieOrFde::Cie(_) => {}
+ CieOrFde::Fde(partial) => {
+ let fde = partial
+ .parse(EhFrame::cie_from_offset)
+ .expect("Should be able to get CIE for FED");
+ let mut table = fde
+ .rows(&eh_frame, &bases, &mut ctx)
+ .expect("Should be able to initialize ctx");
+ while let Some(row) =
+ table.next_row().expect("Should get next unwind table row")
+ {
+ test::black_box(row);
+ }
+ }
+ };
+ }
+ });
+ }
+
+ fn instrs_len<R: Reader>(
+ eh_frame: &EhFrame<R>,
+ bases: &BaseAddresses,
+ fde: &FrameDescriptionEntry<R>,
+ ) -> usize {
+ fde.instructions(eh_frame, bases)
+ .fold(0, |count, _| Ok(count + 1))
+ .expect("fold over instructions OK")
+ }
+
+ fn get_fde_with_longest_cfi_instructions<R: Reader>(
+ eh_frame: &EhFrame<R>,
+ bases: &BaseAddresses,
+ ) -> FrameDescriptionEntry<R> {
+ let mut longest: Option<(usize, FrameDescriptionEntry<_>)> = None;
+
+ let mut entries = eh_frame.entries(bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ match entry {
+ CieOrFde::Cie(_) => {}
+ CieOrFde::Fde(partial) => {
+ let fde = partial
+ .parse(EhFrame::cie_from_offset)
+ .expect("Should be able to get CIE for FED");
+
+ let this_len = instrs_len(eh_frame, bases, &fde);
+
+ let found_new_longest = match longest {
+ None => true,
+ Some((longest_len, ref _fde)) => this_len > longest_len,
+ };
+
+ if found_new_longest {
+ longest = Some((this_len, fde));
+ }
+ }
+ };
+ }
+
+ longest.expect("At least one FDE in .eh_frame").1
+ }
+
+ #[bench]
+ fn parse_longest_fde_instructions(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+ let fde = get_fde_with_longest_cfi_instructions(&eh_frame, &bases);
+
+ b.iter(|| {
+ let mut instrs = fde.instructions(&eh_frame, &bases);
+ while let Some(i) = instrs.next().expect("Should parse instruction OK") {
+ test::black_box(i);
+ }
+ });
+ }
+
+ #[bench]
+ fn eval_longest_fde_instructions_new_ctx_everytime(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+ let fde = get_fde_with_longest_cfi_instructions(&eh_frame, &bases);
+
+ b.iter(|| {
+ let mut ctx = Box::new(UnwindContext::new());
+ let mut table = fde
+ .rows(&eh_frame, &bases, &mut ctx)
+ .expect("Should initialize the ctx OK");
+ while let Some(row) = table.next_row().expect("Should get next unwind table row") {
+ test::black_box(row);
+ }
+ });
+ }
+
+ #[bench]
+ fn eval_longest_fde_instructions_same_ctx(b: &mut test::Bencher) {
+ let eh_frame = read_section("eh_frame");
+ let eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_got(0)
+ .set_text(0);
+ let fde = get_fde_with_longest_cfi_instructions(&eh_frame, &bases);
+
+ let mut ctx = Box::new(UnwindContext::new());
+
+ b.iter(|| {
+ let mut table = fde
+ .rows(&eh_frame, &bases, &mut ctx)
+ .expect("Should initialize the ctx OK");
+ while let Some(row) = table.next_row().expect("Should get next unwind table row") {
+ test::black_box(row);
+ }
+ });
+ }
+}
diff --git a/vendor/gimli-0.26.2/examples/dwarf-validate.rs b/vendor/gimli-0.26.2/examples/dwarf-validate.rs
new file mode 100644
index 000000000..54d8f3a1d
--- /dev/null
+++ b/vendor/gimli-0.26.2/examples/dwarf-validate.rs
@@ -0,0 +1,267 @@
+// Allow clippy lints when building without clippy.
+#![allow(unknown_lints)]
+
+use gimli::{AttributeValue, UnitHeader};
+use object::{Object, ObjectSection};
+use rayon::prelude::*;
+use std::borrow::{Borrow, Cow};
+use std::env;
+use std::fs;
+use std::io::{self, BufWriter, Write};
+use std::iter::Iterator;
+use std::path::{Path, PathBuf};
+use std::process;
+use std::sync::Mutex;
+use typed_arena::Arena;
+
+trait Reader: gimli::Reader<Offset = usize> + Send + Sync {
+ type SyncSendEndian: gimli::Endianity + Send + Sync;
+}
+
+impl<'input, Endian> Reader for gimli::EndianSlice<'input, Endian>
+where
+ Endian: gimli::Endianity + Send + Sync,
+{
+ type SyncSendEndian = Endian;
+}
+
+struct ErrorWriter<W: Write + Send> {
+ inner: Mutex<(W, usize)>,
+ path: PathBuf,
+}
+
+impl<W: Write + Send> ErrorWriter<W> {
+ #[allow(clippy::needless_pass_by_value)]
+ fn error(&self, s: String) {
+ let mut lock = self.inner.lock().unwrap();
+ writeln!(&mut lock.0, "DWARF error in {}: {}", self.path.display(), s).unwrap();
+ lock.1 += 1;
+ }
+}
+
+fn main() {
+ let mut w = BufWriter::new(io::stdout());
+ let mut errors = 0;
+ for arg in env::args_os().skip(1) {
+ let path = Path::new(&arg);
+ let file = match fs::File::open(&path) {
+ Ok(file) => file,
+ Err(err) => {
+ eprintln!("Failed to open file '{}': {}", path.display(), err);
+ errors += 1;
+ continue;
+ }
+ };
+ let file = match unsafe { memmap2::Mmap::map(&file) } {
+ Ok(mmap) => mmap,
+ Err(err) => {
+ eprintln!("Failed to map file '{}': {}", path.display(), &err);
+ errors += 1;
+ continue;
+ }
+ };
+ let file = match object::File::parse(&*file) {
+ Ok(file) => file,
+ Err(err) => {
+ eprintln!("Failed to parse file '{}': {}", path.display(), err);
+ errors += 1;
+ continue;
+ }
+ };
+
+ let endian = if file.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+ let mut error_writer = ErrorWriter {
+ inner: Mutex::new((&mut w, 0)),
+ path: path.to_owned(),
+ };
+ validate_file(&mut error_writer, &file, endian);
+ errors += error_writer.inner.into_inner().unwrap().1;
+ }
+ // Flush any errors.
+ drop(w);
+ if errors > 0 {
+ process::exit(1);
+ }
+}
+
+fn validate_file<W, Endian>(w: &mut ErrorWriter<W>, file: &object::File, endian: Endian)
+where
+ W: Write + Send,
+ Endian: gimli::Endianity + Send + Sync,
+{
+ let arena = Arena::new();
+
+ fn load_section<'a, 'file, 'input, S, Endian>(
+ arena: &'a Arena<Cow<'file, [u8]>>,
+ file: &'file object::File<'input>,
+ endian: Endian,
+ ) -> S
+ where
+ S: gimli::Section<gimli::EndianSlice<'a, Endian>>,
+ Endian: gimli::Endianity + Send + Sync,
+ 'file: 'input,
+ 'a: 'file,
+ {
+ let data = match file.section_by_name(S::section_name()) {
+ Some(ref section) => section
+ .uncompressed_data()
+ .unwrap_or(Cow::Borrowed(&[][..])),
+ None => Cow::Borrowed(&[][..]),
+ };
+ let data_ref = (*arena.alloc(data)).borrow();
+ S::from(gimli::EndianSlice::new(data_ref, endian))
+ }
+
+ // Variables representing sections of the file. The type of each is inferred from its use in the
+ // validate_info function below.
+ let debug_abbrev = &load_section(&arena, file, endian);
+ let debug_info = &load_section(&arena, file, endian);
+
+ validate_info(w, debug_info, debug_abbrev);
+}
+
+struct UnitSummary {
+ // True if we successfully parsed all the DIEs and attributes in the compilation unit
+ internally_valid: bool,
+ offset: gimli::DebugInfoOffset,
+ die_offsets: Vec<gimli::UnitOffset>,
+ global_die_references: Vec<(gimli::UnitOffset, gimli::DebugInfoOffset)>,
+}
+
+fn validate_info<W, R>(
+ w: &mut ErrorWriter<W>,
+ debug_info: &gimli::DebugInfo<R>,
+ debug_abbrev: &gimli::DebugAbbrev<R>,
+) where
+ W: Write + Send,
+ R: Reader,
+{
+ let mut units = Vec::new();
+ let mut units_iter = debug_info.units();
+ let mut last_offset = 0;
+ loop {
+ let u = match units_iter.next() {
+ Err(err) => {
+ w.error(format!(
+ "Can't read unit header at offset {:#x}, stopping reading units: {}",
+ last_offset, err
+ ));
+ break;
+ }
+ Ok(None) => break,
+ Ok(Some(u)) => u,
+ };
+ last_offset = u.offset().as_debug_info_offset().unwrap().0 + u.length_including_self();
+ units.push(u);
+ }
+ let process_unit = |unit: UnitHeader<R>| -> UnitSummary {
+ let unit_offset = unit.offset().as_debug_info_offset().unwrap();
+ let mut ret = UnitSummary {
+ internally_valid: false,
+ offset: unit_offset,
+ die_offsets: Vec::new(),
+ global_die_references: Vec::new(),
+ };
+ let abbrevs = match unit.abbreviations(debug_abbrev) {
+ Ok(abbrevs) => abbrevs,
+ Err(err) => {
+ w.error(format!(
+ "Invalid abbrevs for unit {:#x}: {}",
+ unit_offset.0, &err
+ ));
+ return ret;
+ }
+ };
+ let mut entries = unit.entries(&abbrevs);
+ let mut unit_refs = Vec::new();
+ loop {
+ let (_, entry) = match entries.next_dfs() {
+ Err(err) => {
+ w.error(format!(
+ "Invalid DIE for unit {:#x}: {}",
+ unit_offset.0, &err
+ ));
+ return ret;
+ }
+ Ok(None) => break,
+ Ok(Some(entry)) => entry,
+ };
+ ret.die_offsets.push(entry.offset());
+
+ let mut attrs = entry.attrs();
+ loop {
+ let attr = match attrs.next() {
+ Err(err) => {
+ w.error(format!(
+ "Invalid attribute for unit {:#x} at DIE {:#x}: {}",
+ unit_offset.0,
+ entry.offset().0,
+ &err
+ ));
+ return ret;
+ }
+ Ok(None) => break,
+ Ok(Some(attr)) => attr,
+ };
+ match attr.value() {
+ AttributeValue::UnitRef(offset) => {
+ unit_refs.push((entry.offset(), offset));
+ }
+ AttributeValue::DebugInfoRef(offset) => {
+ ret.global_die_references.push((entry.offset(), offset));
+ }
+ _ => (),
+ }
+ }
+ }
+ ret.internally_valid = true;
+ ret.die_offsets.shrink_to_fit();
+ ret.global_die_references.shrink_to_fit();
+
+ // Check intra-unit references
+ for (from, to) in unit_refs {
+ if ret.die_offsets.binary_search(&to).is_err() {
+ w.error(format!(
+ "Invalid intra-unit reference in unit {:#x} from DIE {:#x} to {:#x}",
+ unit_offset.0, from.0, to.0
+ ));
+ }
+ }
+
+ ret
+ };
+ let processed_units = units.into_par_iter().map(process_unit).collect::<Vec<_>>();
+
+ let check_unit = |summary: &UnitSummary| {
+ if !summary.internally_valid {
+ return;
+ }
+ for &(from, to) in summary.global_die_references.iter() {
+ let u = match processed_units.binary_search_by_key(&to, |v| v.offset) {
+ Ok(i) => &processed_units[i],
+ Err(i) => {
+ if i > 0 {
+ &processed_units[i - 1]
+ } else {
+ w.error(format!("Invalid cross-unit reference in unit {:#x} from DIE {:#x} to global DIE {:#x}: no unit found",
+ summary.offset.0, from.0, to.0));
+ continue;
+ }
+ }
+ };
+ if !u.internally_valid {
+ continue;
+ }
+ let to_offset = gimli::UnitOffset(to.0 - u.offset.0);
+ if u.die_offsets.binary_search(&to_offset).is_err() {
+ w.error(format!("Invalid cross-unit reference in unit {:#x} from DIE {:#x} to global DIE {:#x}: unit at {:#x} contains no DIE {:#x}",
+ summary.offset.0, from.0, to.0, u.offset.0, to_offset.0));
+ }
+ }
+ };
+ processed_units.par_iter().for_each(check_unit);
+}
diff --git a/vendor/gimli-0.26.2/examples/dwarfdump.rs b/vendor/gimli-0.26.2/examples/dwarfdump.rs
new file mode 100644
index 000000000..4b61fd572
--- /dev/null
+++ b/vendor/gimli-0.26.2/examples/dwarfdump.rs
@@ -0,0 +1,2417 @@
+// Allow clippy lints when building without clippy.
+#![allow(unknown_lints)]
+
+use fallible_iterator::FallibleIterator;
+use gimli::{Section, UnitHeader, UnitOffset, UnitSectionOffset, UnitType, UnwindSection};
+use object::{Object, ObjectSection, ObjectSymbol};
+use regex::bytes::Regex;
+use std::borrow::{Borrow, Cow};
+use std::cmp::min;
+use std::collections::HashMap;
+use std::env;
+use std::fmt::{self, Debug};
+use std::fs;
+use std::io;
+use std::io::{BufWriter, Write};
+use std::iter::Iterator;
+use std::mem;
+use std::process;
+use std::result;
+use std::sync::{Condvar, Mutex};
+use typed_arena::Arena;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum Error {
+ GimliError(gimli::Error),
+ ObjectError(object::read::Error),
+ IoError,
+}
+
+impl fmt::Display for Error {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> {
+ Debug::fmt(self, f)
+ }
+}
+
+fn writeln_error<W: Write, R: Reader>(
+ w: &mut W,
+ dwarf: &gimli::Dwarf<R>,
+ err: Error,
+ msg: &str,
+) -> io::Result<()> {
+ writeln!(
+ w,
+ "{}: {}",
+ msg,
+ match err {
+ Error::GimliError(err) => dwarf.format_error(err),
+ Error::ObjectError(err) =>
+ format!("{}:{:?}", "An object error occurred while reading", err),
+ Error::IoError => "An I/O error occurred while writing.".to_string(),
+ }
+ )
+}
+
+impl From<gimli::Error> for Error {
+ fn from(err: gimli::Error) -> Self {
+ Error::GimliError(err)
+ }
+}
+
+impl From<io::Error> for Error {
+ fn from(_: io::Error) -> Self {
+ Error::IoError
+ }
+}
+
+impl From<object::read::Error> for Error {
+ fn from(err: object::read::Error) -> Self {
+ Error::ObjectError(err)
+ }
+}
+
+pub type Result<T> = result::Result<T, Error>;
+
+fn parallel_output<W, II, F>(w: &mut W, max_workers: usize, iter: II, f: F) -> Result<()>
+where
+ W: Write + Send,
+ F: Sync + Fn(II::Item, &mut Vec<u8>) -> Result<()>,
+ II: IntoIterator,
+ II::IntoIter: Send,
+{
+ struct ParallelOutputState<I, W> {
+ iterator: I,
+ current_worker: usize,
+ result: Result<()>,
+ w: W,
+ }
+
+ let state = Mutex::new(ParallelOutputState {
+ iterator: iter.into_iter().fuse(),
+ current_worker: 0,
+ result: Ok(()),
+ w,
+ });
+ let workers = min(max_workers, num_cpus::get());
+ let mut condvars = Vec::new();
+ for _ in 0..workers {
+ condvars.push(Condvar::new());
+ }
+ {
+ let state_ref = &state;
+ let f_ref = &f;
+ let condvars_ref = &condvars;
+ crossbeam::scope(|scope| {
+ for i in 0..workers {
+ scope.spawn(move |_| {
+ let mut v = Vec::new();
+ let mut lock = state_ref.lock().unwrap();
+ while lock.current_worker != i {
+ lock = condvars_ref[i].wait(lock).unwrap();
+ }
+ loop {
+ let item = if lock.result.is_ok() {
+ lock.iterator.next()
+ } else {
+ None
+ };
+ lock.current_worker = (i + 1) % workers;
+ condvars_ref[lock.current_worker].notify_one();
+ mem::drop(lock);
+
+ let ret = if let Some(item) = item {
+ v.clear();
+ f_ref(item, &mut v)
+ } else {
+ return;
+ };
+
+ lock = state_ref.lock().unwrap();
+ while lock.current_worker != i {
+ lock = condvars_ref[i].wait(lock).unwrap();
+ }
+ if lock.result.is_ok() {
+ let ret2 = lock.w.write_all(&v);
+ if ret.is_err() {
+ lock.result = ret;
+ } else {
+ lock.result = ret2.map_err(Error::from);
+ }
+ }
+ }
+ });
+ }
+ })
+ .unwrap();
+ }
+ state.into_inner().unwrap().result
+}
+
+trait Reader: gimli::Reader<Offset = usize> + Send + Sync {}
+
+impl<'input, Endian> Reader for gimli::EndianSlice<'input, Endian> where
+ Endian: gimli::Endianity + Send + Sync
+{
+}
+
+type RelocationMap = HashMap<usize, object::Relocation>;
+
+fn add_relocations(
+ relocations: &mut RelocationMap,
+ file: &object::File,
+ section: &object::Section,
+) {
+ for (offset64, mut relocation) in section.relocations() {
+ let offset = offset64 as usize;
+ if offset as u64 != offset64 {
+ continue;
+ }
+ let offset = offset as usize;
+ match relocation.kind() {
+ object::RelocationKind::Absolute => {
+ match relocation.target() {
+ object::RelocationTarget::Symbol(symbol_idx) => {
+ match file.symbol_by_index(symbol_idx) {
+ Ok(symbol) => {
+ let addend =
+ symbol.address().wrapping_add(relocation.addend() as u64);
+ relocation.set_addend(addend as i64);
+ }
+ Err(_) => {
+ eprintln!(
+ "Relocation with invalid symbol for section {} at offset 0x{:08x}",
+ section.name().unwrap(),
+ offset
+ );
+ }
+ }
+ }
+ _ => {}
+ }
+ if relocations.insert(offset, relocation).is_some() {
+ eprintln!(
+ "Multiple relocations for section {} at offset 0x{:08x}",
+ section.name().unwrap(),
+ offset
+ );
+ }
+ }
+ _ => {
+ eprintln!(
+ "Unsupported relocation for section {} at offset 0x{:08x}",
+ section.name().unwrap(),
+ offset
+ );
+ }
+ }
+ }
+}
+
+/// Apply relocations to addresses and offsets during parsing,
+/// instead of requiring the data to be fully relocated prior
+/// to parsing.
+///
+/// Pros
+/// - allows readonly buffers, we don't need to implement writing of values back to buffers
+/// - potentially allows us to handle addresses and offsets differently
+/// - potentially allows us to add metadata from the relocation (eg symbol names)
+/// Cons
+/// - maybe incomplete
+#[derive(Debug, Clone)]
+struct Relocate<'a, R: gimli::Reader<Offset = usize>> {
+ relocations: &'a RelocationMap,
+ section: R,
+ reader: R,
+}
+
+impl<'a, R: gimli::Reader<Offset = usize>> Relocate<'a, R> {
+ fn relocate(&self, offset: usize, value: u64) -> u64 {
+ if let Some(relocation) = self.relocations.get(&offset) {
+ match relocation.kind() {
+ object::RelocationKind::Absolute => {
+ if relocation.has_implicit_addend() {
+ // Use the explicit addend too, because it may have the symbol value.
+ return value.wrapping_add(relocation.addend() as u64);
+ } else {
+ return relocation.addend() as u64;
+ }
+ }
+ _ => {}
+ }
+ };
+ value
+ }
+}
+
+impl<'a, R: gimli::Reader<Offset = usize>> gimli::Reader for Relocate<'a, R> {
+ type Endian = R::Endian;
+ type Offset = R::Offset;
+
+ fn read_address(&mut self, address_size: u8) -> gimli::Result<u64> {
+ let offset = self.reader.offset_from(&self.section);
+ let value = self.reader.read_address(address_size)?;
+ Ok(self.relocate(offset, value))
+ }
+
+ fn read_length(&mut self, format: gimli::Format) -> gimli::Result<usize> {
+ let offset = self.reader.offset_from(&self.section);
+ let value = self.reader.read_length(format)?;
+ <usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
+ }
+
+ fn read_offset(&mut self, format: gimli::Format) -> gimli::Result<usize> {
+ let offset = self.reader.offset_from(&self.section);
+ let value = self.reader.read_offset(format)?;
+ <usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
+ }
+
+ fn read_sized_offset(&mut self, size: u8) -> gimli::Result<usize> {
+ let offset = self.reader.offset_from(&self.section);
+ let value = self.reader.read_sized_offset(size)?;
+ <usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
+ }
+
+ #[inline]
+ fn split(&mut self, len: Self::Offset) -> gimli::Result<Self> {
+ let mut other = self.clone();
+ other.reader.truncate(len)?;
+ self.reader.skip(len)?;
+ Ok(other)
+ }
+
+ // All remaining methods simply delegate to `self.reader`.
+
+ #[inline]
+ fn endian(&self) -> Self::Endian {
+ self.reader.endian()
+ }
+
+ #[inline]
+ fn len(&self) -> Self::Offset {
+ self.reader.len()
+ }
+
+ #[inline]
+ fn empty(&mut self) {
+ self.reader.empty()
+ }
+
+ #[inline]
+ fn truncate(&mut self, len: Self::Offset) -> gimli::Result<()> {
+ self.reader.truncate(len)
+ }
+
+ #[inline]
+ fn offset_from(&self, base: &Self) -> Self::Offset {
+ self.reader.offset_from(&base.reader)
+ }
+
+ #[inline]
+ fn offset_id(&self) -> gimli::ReaderOffsetId {
+ self.reader.offset_id()
+ }
+
+ #[inline]
+ fn lookup_offset_id(&self, id: gimli::ReaderOffsetId) -> Option<Self::Offset> {
+ self.reader.lookup_offset_id(id)
+ }
+
+ #[inline]
+ fn find(&self, byte: u8) -> gimli::Result<Self::Offset> {
+ self.reader.find(byte)
+ }
+
+ #[inline]
+ fn skip(&mut self, len: Self::Offset) -> gimli::Result<()> {
+ self.reader.skip(len)
+ }
+
+ #[inline]
+ fn to_slice(&self) -> gimli::Result<Cow<[u8]>> {
+ self.reader.to_slice()
+ }
+
+ #[inline]
+ fn to_string(&self) -> gimli::Result<Cow<str>> {
+ self.reader.to_string()
+ }
+
+ #[inline]
+ fn to_string_lossy(&self) -> gimli::Result<Cow<str>> {
+ self.reader.to_string_lossy()
+ }
+
+ #[inline]
+ fn read_slice(&mut self, buf: &mut [u8]) -> gimli::Result<()> {
+ self.reader.read_slice(buf)
+ }
+}
+
+impl<'a, R: Reader> Reader for Relocate<'a, R> {}
+
+#[derive(Default)]
+struct Flags<'a> {
+ eh_frame: bool,
+ goff: bool,
+ info: bool,
+ line: bool,
+ pubnames: bool,
+ pubtypes: bool,
+ aranges: bool,
+ dwo: bool,
+ dwp: bool,
+ dwo_parent: Option<object::File<'a>>,
+ sup: Option<object::File<'a>>,
+ raw: bool,
+ match_units: Option<Regex>,
+}
+
+fn print_usage(opts: &getopts::Options) -> ! {
+ let brief = format!("Usage: {} <options> <file>", env::args().next().unwrap());
+ write!(&mut io::stderr(), "{}", opts.usage(&brief)).ok();
+ process::exit(1);
+}
+
+fn main() {
+ let mut opts = getopts::Options::new();
+ opts.optflag(
+ "",
+ "eh-frame",
+ "print .eh-frame exception handling frame information",
+ );
+ opts.optflag("G", "", "show global die offsets");
+ opts.optflag("i", "", "print .debug_info and .debug_types sections");
+ opts.optflag("l", "", "print .debug_line section");
+ opts.optflag("p", "", "print .debug_pubnames section");
+ opts.optflag("r", "", "print .debug_aranges section");
+ opts.optflag("y", "", "print .debug_pubtypes section");
+ opts.optflag(
+ "",
+ "dwo",
+ "print the .dwo versions of the selected sections",
+ );
+ opts.optflag(
+ "",
+ "dwp",
+ "print the .dwp versions of the selected sections",
+ );
+ opts.optopt(
+ "",
+ "dwo-parent",
+ "use the specified file as the parent of the dwo or dwp (e.g. for .debug_addr)",
+ "library path",
+ );
+ opts.optflag("", "raw", "print raw data values");
+ opts.optopt(
+ "u",
+ "match-units",
+ "print compilation units whose output matches a regex",
+ "REGEX",
+ );
+ opts.optopt("", "sup", "path to supplementary object file", "PATH");
+
+ let matches = match opts.parse(env::args().skip(1)) {
+ Ok(m) => m,
+ Err(e) => {
+ writeln!(&mut io::stderr(), "{:?}\n", e).ok();
+ print_usage(&opts);
+ }
+ };
+ if matches.free.is_empty() {
+ print_usage(&opts);
+ }
+
+ let mut all = true;
+ let mut flags = Flags::default();
+ if matches.opt_present("eh-frame") {
+ flags.eh_frame = true;
+ all = false;
+ }
+ if matches.opt_present("G") {
+ flags.goff = true;
+ }
+ if matches.opt_present("i") {
+ flags.info = true;
+ all = false;
+ }
+ if matches.opt_present("l") {
+ flags.line = true;
+ all = false;
+ }
+ if matches.opt_present("p") {
+ flags.pubnames = true;
+ all = false;
+ }
+ if matches.opt_present("y") {
+ flags.pubtypes = true;
+ all = false;
+ }
+ if matches.opt_present("r") {
+ flags.aranges = true;
+ all = false;
+ }
+ if matches.opt_present("dwo") {
+ flags.dwo = true;
+ }
+ if matches.opt_present("dwp") {
+ flags.dwp = true;
+ }
+ if matches.opt_present("raw") {
+ flags.raw = true;
+ }
+ if all {
+ // .eh_frame is excluded even when printing all information.
+ // cosmetic flags like -G must be set explicitly too.
+ flags.info = true;
+ flags.line = true;
+ flags.pubnames = true;
+ flags.pubtypes = true;
+ flags.aranges = true;
+ }
+ flags.match_units = if let Some(r) = matches.opt_str("u") {
+ match Regex::new(&r) {
+ Ok(r) => Some(r),
+ Err(e) => {
+ eprintln!("Invalid regular expression {}: {}", r, e);
+ process::exit(1);
+ }
+ }
+ } else {
+ None
+ };
+
+ let arena_mmap = Arena::new();
+ let load_file = |path| {
+ let file = match fs::File::open(&path) {
+ Ok(file) => file,
+ Err(err) => {
+ eprintln!("Failed to open file '{}': {}", path, err);
+ process::exit(1);
+ }
+ };
+ let mmap = match unsafe { memmap2::Mmap::map(&file) } {
+ Ok(mmap) => mmap,
+ Err(err) => {
+ eprintln!("Failed to map file '{}': {}", path, err);
+ process::exit(1);
+ }
+ };
+ let mmap_ref = (*arena_mmap.alloc(mmap)).borrow();
+ match object::File::parse(&**mmap_ref) {
+ Ok(file) => Some(file),
+ Err(err) => {
+ eprintln!("Failed to parse file '{}': {}", path, err);
+ process::exit(1);
+ }
+ }
+ };
+
+ flags.sup = matches.opt_str("sup").and_then(load_file);
+ flags.dwo_parent = matches.opt_str("dwo-parent").and_then(load_file);
+ if flags.dwo_parent.is_some() && !flags.dwo && !flags.dwp {
+ eprintln!("--dwo-parent also requires --dwo or --dwp");
+ process::exit(1);
+ }
+ if flags.dwo_parent.is_none() && flags.dwp {
+ eprintln!("--dwp also requires --dwo-parent");
+ process::exit(1);
+ }
+
+ for file_path in &matches.free {
+ if matches.free.len() != 1 {
+ println!("{}", file_path);
+ println!();
+ }
+
+ let file = match fs::File::open(&file_path) {
+ Ok(file) => file,
+ Err(err) => {
+ eprintln!("Failed to open file '{}': {}", file_path, err);
+ continue;
+ }
+ };
+ let file = match unsafe { memmap2::Mmap::map(&file) } {
+ Ok(mmap) => mmap,
+ Err(err) => {
+ eprintln!("Failed to map file '{}': {}", file_path, err);
+ continue;
+ }
+ };
+ let file = match object::File::parse(&*file) {
+ Ok(file) => file,
+ Err(err) => {
+ eprintln!("Failed to parse file '{}': {}", file_path, err);
+ continue;
+ }
+ };
+
+ let endian = if file.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+ let ret = dump_file(&file, endian, &flags);
+ match ret {
+ Ok(_) => (),
+ Err(err) => eprintln!("Failed to dump '{}': {}", file_path, err,),
+ }
+ }
+}
+
+fn empty_file_section<'input, 'arena, Endian: gimli::Endianity>(
+ endian: Endian,
+ arena_relocations: &'arena Arena<RelocationMap>,
+) -> Relocate<'arena, gimli::EndianSlice<'arena, Endian>> {
+ let reader = gimli::EndianSlice::new(&[], endian);
+ let section = reader;
+ let relocations = RelocationMap::default();
+ let relocations = (*arena_relocations.alloc(relocations)).borrow();
+ Relocate {
+ relocations,
+ section,
+ reader,
+ }
+}
+
+fn load_file_section<'input, 'arena, Endian: gimli::Endianity>(
+ id: gimli::SectionId,
+ file: &object::File<'input>,
+ endian: Endian,
+ is_dwo: bool,
+ arena_data: &'arena Arena<Cow<'input, [u8]>>,
+ arena_relocations: &'arena Arena<RelocationMap>,
+) -> Result<Relocate<'arena, gimli::EndianSlice<'arena, Endian>>> {
+ let mut relocations = RelocationMap::default();
+ let name = if is_dwo {
+ id.dwo_name()
+ } else {
+ Some(id.name())
+ };
+
+ let data = match name.and_then(|name| file.section_by_name(&name)) {
+ Some(ref section) => {
+ // DWO sections never have relocations, so don't bother.
+ if !is_dwo {
+ add_relocations(&mut relocations, file, section);
+ }
+ section.uncompressed_data()?
+ }
+ // Use a non-zero capacity so that `ReaderOffsetId`s are unique.
+ None => Cow::Owned(Vec::with_capacity(1)),
+ };
+ let data_ref = (*arena_data.alloc(data)).borrow();
+ let reader = gimli::EndianSlice::new(data_ref, endian);
+ let section = reader;
+ let relocations = (*arena_relocations.alloc(relocations)).borrow();
+ Ok(Relocate {
+ relocations,
+ section,
+ reader,
+ })
+}
+
+fn dump_file<Endian>(file: &object::File, endian: Endian, flags: &Flags) -> Result<()>
+where
+ Endian: gimli::Endianity + Send + Sync,
+{
+ let arena_data = Arena::new();
+ let arena_relocations = Arena::new();
+
+ let dwo_parent = if let Some(dwo_parent_file) = flags.dwo_parent.as_ref() {
+ let mut load_dwo_parent_section = |id: gimli::SectionId| -> Result<_> {
+ load_file_section(
+ id,
+ dwo_parent_file,
+ endian,
+ false,
+ &arena_data,
+ &arena_relocations,
+ )
+ };
+ Some(gimli::Dwarf::load(&mut load_dwo_parent_section)?)
+ } else {
+ None
+ };
+ let dwo_parent = dwo_parent.as_ref();
+
+ let dwo_parent_units = if let Some(dwo_parent) = dwo_parent {
+ Some(
+ match dwo_parent
+ .units()
+ .map(|unit_header| dwo_parent.unit(unit_header))
+ .filter_map(|unit| Ok(unit.dwo_id.map(|dwo_id| (dwo_id, unit))))
+ .collect()
+ {
+ Ok(units) => units,
+ Err(err) => {
+ eprintln!("Failed to process --dwo-parent units: {}", err);
+ return Ok(());
+ }
+ },
+ )
+ } else {
+ None
+ };
+ let dwo_parent_units = dwo_parent_units.as_ref();
+
+ let mut load_section = |id: gimli::SectionId| -> Result<_> {
+ load_file_section(
+ id,
+ file,
+ endian,
+ flags.dwo || flags.dwp,
+ &arena_data,
+ &arena_relocations,
+ )
+ };
+
+ let w = &mut BufWriter::new(io::stdout());
+ if flags.dwp {
+ let empty = empty_file_section(endian, &arena_relocations);
+ let dwp = gimli::DwarfPackage::load(&mut load_section, empty)?;
+ dump_dwp(w, &dwp, dwo_parent.unwrap(), dwo_parent_units, flags)?;
+ w.flush()?;
+ return Ok(());
+ }
+
+ let mut dwarf = gimli::Dwarf::load(&mut load_section)?;
+ if flags.dwo {
+ dwarf.file_type = gimli::DwarfFileType::Dwo;
+ if let Some(dwo_parent) = dwo_parent {
+ dwarf.debug_addr = dwo_parent.debug_addr.clone();
+ dwarf
+ .ranges
+ .set_debug_ranges(dwo_parent.ranges.debug_ranges().clone());
+ }
+ }
+
+ if let Some(sup_file) = flags.sup.as_ref() {
+ let mut load_sup_section = |id: gimli::SectionId| -> Result<_> {
+ // Note: we really only need the `.debug_str` section,
+ // but for now we load them all.
+ load_file_section(id, sup_file, endian, false, &arena_data, &arena_relocations)
+ };
+ dwarf.load_sup(&mut load_sup_section)?;
+ }
+
+ if flags.eh_frame {
+ let eh_frame = gimli::EhFrame::load(&mut load_section).unwrap();
+ dump_eh_frame(w, file, eh_frame)?;
+ }
+ if flags.info {
+ dump_info(w, &dwarf, dwo_parent_units, flags)?;
+ dump_types(w, &dwarf, dwo_parent_units, flags)?;
+ }
+ if flags.line {
+ dump_line(w, &dwarf)?;
+ }
+ if flags.pubnames {
+ let debug_pubnames = &gimli::Section::load(&mut load_section).unwrap();
+ dump_pubnames(w, debug_pubnames, &dwarf.debug_info)?;
+ }
+ if flags.aranges {
+ let debug_aranges = &gimli::Section::load(&mut load_section).unwrap();
+ dump_aranges(w, debug_aranges)?;
+ }
+ if flags.pubtypes {
+ let debug_pubtypes = &gimli::Section::load(&mut load_section).unwrap();
+ dump_pubtypes(w, debug_pubtypes, &dwarf.debug_info)?;
+ }
+ w.flush()?;
+ Ok(())
+}
+
+fn dump_eh_frame<R: Reader, W: Write>(
+ w: &mut W,
+ file: &object::File,
+ mut eh_frame: gimli::EhFrame<R>,
+) -> Result<()> {
+ // TODO: this might be better based on the file format.
+ let address_size = file
+ .architecture()
+ .address_size()
+ .map(|w| w.bytes())
+ .unwrap_or(mem::size_of::<usize>() as u8);
+ eh_frame.set_address_size(address_size);
+
+ fn register_name_none(_: gimli::Register) -> Option<&'static str> {
+ None
+ }
+ let arch_register_name = match file.architecture() {
+ object::Architecture::Arm | object::Architecture::Aarch64 => gimli::Arm::register_name,
+ object::Architecture::I386 => gimli::X86::register_name,
+ object::Architecture::X86_64 => gimli::X86_64::register_name,
+ _ => register_name_none,
+ };
+ let register_name = &|register| match arch_register_name(register) {
+ Some(name) => Cow::Borrowed(name),
+ None => Cow::Owned(format!("{}", register.0)),
+ };
+
+ let mut bases = gimli::BaseAddresses::default();
+ if let Some(section) = file.section_by_name(".eh_frame_hdr") {
+ bases = bases.set_eh_frame_hdr(section.address());
+ }
+ if let Some(section) = file.section_by_name(".eh_frame") {
+ bases = bases.set_eh_frame(section.address());
+ }
+ if let Some(section) = file.section_by_name(".text") {
+ bases = bases.set_text(section.address());
+ }
+ if let Some(section) = file.section_by_name(".got") {
+ bases = bases.set_got(section.address());
+ }
+
+ // TODO: Print "__eh_frame" here on macOS, and more generally use the
+ // section that we're actually looking at, which is what the canonical
+ // dwarfdump does.
+ writeln!(
+ w,
+ "Exception handling frame information for section .eh_frame"
+ )?;
+
+ let mut cies = HashMap::new();
+
+ let mut entries = eh_frame.entries(&bases);
+ loop {
+ match entries.next()? {
+ None => return Ok(()),
+ Some(gimli::CieOrFde::Cie(cie)) => {
+ writeln!(w)?;
+ writeln!(w, "{:#010x}: CIE", cie.offset())?;
+ writeln!(w, " length: {:#010x}", cie.entry_len())?;
+ // TODO: CIE_id
+ writeln!(w, " version: {:#04x}", cie.version())?;
+ // TODO: augmentation
+ writeln!(w, " code_align: {}", cie.code_alignment_factor())?;
+ writeln!(w, " data_align: {}", cie.data_alignment_factor())?;
+ writeln!(
+ w,
+ " ra_register: {}",
+ register_name(cie.return_address_register())
+ )?;
+ if let Some(encoding) = cie.lsda_encoding() {
+ writeln!(
+ w,
+ " lsda_encoding: {}/{}",
+ encoding.application(),
+ encoding.format()
+ )?;
+ }
+ if let Some((encoding, personality)) = cie.personality_with_encoding() {
+ write!(
+ w,
+ " personality: {}/{} ",
+ encoding.application(),
+ encoding.format()
+ )?;
+ dump_pointer(w, personality)?;
+ writeln!(w)?;
+ }
+ if let Some(encoding) = cie.fde_address_encoding() {
+ writeln!(
+ w,
+ " fde_encoding: {}/{}",
+ encoding.application(),
+ encoding.format()
+ )?;
+ }
+ let instructions = cie.instructions(&eh_frame, &bases);
+ dump_cfi_instructions(w, instructions, true, register_name)?;
+ writeln!(w)?;
+ }
+ Some(gimli::CieOrFde::Fde(partial)) => {
+ let mut offset = None;
+ let fde = partial.parse(|_, bases, o| {
+ offset = Some(o);
+ cies.entry(o)
+ .or_insert_with(|| eh_frame.cie_from_offset(bases, o))
+ .clone()
+ })?;
+
+ writeln!(w)?;
+ writeln!(w, "{:#010x}: FDE", fde.offset())?;
+ writeln!(w, " length: {:#010x}", fde.entry_len())?;
+ writeln!(w, " CIE_pointer: {:#010x}", offset.unwrap().0)?;
+ // TODO: symbolicate the start address like the canonical dwarfdump does.
+ writeln!(w, " start_addr: {:#018x}", fde.initial_address())?;
+ writeln!(
+ w,
+ " range_size: {:#018x} (end_addr = {:#018x})",
+ fde.len(),
+ fde.initial_address() + fde.len()
+ )?;
+ if let Some(lsda) = fde.lsda() {
+ write!(w, " lsda: ")?;
+ dump_pointer(w, lsda)?;
+ writeln!(w)?;
+ }
+ let instructions = fde.instructions(&eh_frame, &bases);
+ dump_cfi_instructions(w, instructions, false, register_name)?;
+ writeln!(w)?;
+ }
+ }
+ }
+}
+
+fn dump_pointer<W: Write>(w: &mut W, p: gimli::Pointer) -> Result<()> {
+ match p {
+ gimli::Pointer::Direct(p) => {
+ write!(w, "{:#018x}", p)?;
+ }
+ gimli::Pointer::Indirect(p) => {
+ write!(w, "({:#018x})", p)?;
+ }
+ }
+ Ok(())
+}
+
+#[allow(clippy::unneeded_field_pattern)]
+fn dump_cfi_instructions<R: Reader, W: Write>(
+ w: &mut W,
+ mut insns: gimli::CallFrameInstructionIter<R>,
+ is_initial: bool,
+ register_name: &dyn Fn(gimli::Register) -> Cow<'static, str>,
+) -> Result<()> {
+ use gimli::CallFrameInstruction::*;
+
+ // TODO: we need to actually evaluate these instructions as we iterate them
+ // so we can print the initialized state for CIEs, and each unwind row's
+ // registers for FDEs.
+ //
+ // TODO: We should print DWARF expressions for the CFI instructions that
+ // embed DWARF expressions within themselves.
+
+ if !is_initial {
+ writeln!(w, " Instructions:")?;
+ }
+
+ loop {
+ match insns.next() {
+ Err(e) => {
+ writeln!(w, "Failed to decode CFI instruction: {}", e)?;
+ return Ok(());
+ }
+ Ok(None) => {
+ if is_initial {
+ writeln!(w, " Instructions: Init State:")?;
+ }
+ return Ok(());
+ }
+ Ok(Some(op)) => match op {
+ SetLoc { address } => {
+ writeln!(w, " DW_CFA_set_loc ({:#x})", address)?;
+ }
+ AdvanceLoc { delta } => {
+ writeln!(w, " DW_CFA_advance_loc ({})", delta)?;
+ }
+ DefCfa { register, offset } => {
+ writeln!(
+ w,
+ " DW_CFA_def_cfa ({}, {})",
+ register_name(register),
+ offset
+ )?;
+ }
+ DefCfaSf {
+ register,
+ factored_offset,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_def_cfa_sf ({}, {})",
+ register_name(register),
+ factored_offset
+ )?;
+ }
+ DefCfaRegister { register } => {
+ writeln!(
+ w,
+ " DW_CFA_def_cfa_register ({})",
+ register_name(register)
+ )?;
+ }
+ DefCfaOffset { offset } => {
+ writeln!(w, " DW_CFA_def_cfa_offset ({})", offset)?;
+ }
+ DefCfaOffsetSf { factored_offset } => {
+ writeln!(
+ w,
+ " DW_CFA_def_cfa_offset_sf ({})",
+ factored_offset
+ )?;
+ }
+ DefCfaExpression { expression: _ } => {
+ writeln!(w, " DW_CFA_def_cfa_expression (...)")?;
+ }
+ Undefined { register } => {
+ writeln!(
+ w,
+ " DW_CFA_undefined ({})",
+ register_name(register)
+ )?;
+ }
+ SameValue { register } => {
+ writeln!(
+ w,
+ " DW_CFA_same_value ({})",
+ register_name(register)
+ )?;
+ }
+ Offset {
+ register,
+ factored_offset,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_offset ({}, {})",
+ register_name(register),
+ factored_offset
+ )?;
+ }
+ OffsetExtendedSf {
+ register,
+ factored_offset,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_offset_extended_sf ({}, {})",
+ register_name(register),
+ factored_offset
+ )?;
+ }
+ ValOffset {
+ register,
+ factored_offset,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_val_offset ({}, {})",
+ register_name(register),
+ factored_offset
+ )?;
+ }
+ ValOffsetSf {
+ register,
+ factored_offset,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_val_offset_sf ({}, {})",
+ register_name(register),
+ factored_offset
+ )?;
+ }
+ Register {
+ dest_register,
+ src_register,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_register ({}, {})",
+ register_name(dest_register),
+ register_name(src_register)
+ )?;
+ }
+ Expression {
+ register,
+ expression: _,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_expression ({}, ...)",
+ register_name(register)
+ )?;
+ }
+ ValExpression {
+ register,
+ expression: _,
+ } => {
+ writeln!(
+ w,
+ " DW_CFA_val_expression ({}, ...)",
+ register_name(register)
+ )?;
+ }
+ Restore { register } => {
+ writeln!(
+ w,
+ " DW_CFA_restore ({})",
+ register_name(register)
+ )?;
+ }
+ RememberState => {
+ writeln!(w, " DW_CFA_remember_state")?;
+ }
+ RestoreState => {
+ writeln!(w, " DW_CFA_restore_state")?;
+ }
+ ArgsSize { size } => {
+ writeln!(w, " DW_CFA_GNU_args_size ({})", size)?;
+ }
+ Nop => {
+ writeln!(w, " DW_CFA_nop")?;
+ }
+ },
+ }
+ }
+}
+
+fn dump_dwp<R: Reader, W: Write + Send>(
+ w: &mut W,
+ dwp: &gimli::DwarfPackage<R>,
+ dwo_parent: &gimli::Dwarf<R>,
+ dwo_parent_units: Option<&HashMap<gimli::DwoId, gimli::Unit<R>>>,
+ flags: &Flags,
+) -> Result<()>
+where
+ R::Endian: Send + Sync,
+{
+ if dwp.cu_index.unit_count() != 0 {
+ writeln!(
+ w,
+ "\n.debug_cu_index: version = {}, sections = {}, units = {}, slots = {}",
+ dwp.cu_index.version(),
+ dwp.cu_index.section_count(),
+ dwp.cu_index.unit_count(),
+ dwp.cu_index.slot_count(),
+ )?;
+ for i in 1..=dwp.cu_index.unit_count() {
+ writeln!(w, "\nCU index {}", i)?;
+ dump_dwp_sections(
+ w,
+ &dwp,
+ dwo_parent,
+ dwo_parent_units,
+ flags,
+ dwp.cu_index.sections(i)?,
+ )?;
+ }
+ }
+
+ if dwp.tu_index.unit_count() != 0 {
+ writeln!(
+ w,
+ "\n.debug_tu_index: version = {}, sections = {}, units = {}, slots = {}",
+ dwp.tu_index.version(),
+ dwp.tu_index.section_count(),
+ dwp.tu_index.unit_count(),
+ dwp.tu_index.slot_count(),
+ )?;
+ for i in 1..=dwp.tu_index.unit_count() {
+ writeln!(w, "\nTU index {}", i)?;
+ dump_dwp_sections(
+ w,
+ &dwp,
+ dwo_parent,
+ dwo_parent_units,
+ flags,
+ dwp.tu_index.sections(i)?,
+ )?;
+ }
+ }
+
+ Ok(())
+}
+
+fn dump_dwp_sections<R: Reader, W: Write + Send>(
+ w: &mut W,
+ dwp: &gimli::DwarfPackage<R>,
+ dwo_parent: &gimli::Dwarf<R>,
+ dwo_parent_units: Option<&HashMap<gimli::DwoId, gimli::Unit<R>>>,
+ flags: &Flags,
+ sections: gimli::UnitIndexSectionIterator<R>,
+) -> Result<()>
+where
+ R::Endian: Send + Sync,
+{
+ for section in sections.clone() {
+ writeln!(
+ w,
+ " {}: offset = 0x{:x}, size = 0x{:x}",
+ section.section.dwo_name().unwrap(),
+ section.offset,
+ section.size
+ )?;
+ }
+ let dwarf = dwp.sections(sections, dwo_parent)?;
+ if flags.info {
+ dump_info(w, &dwarf, dwo_parent_units, flags)?;
+ dump_types(w, &dwarf, dwo_parent_units, flags)?;
+ }
+ if flags.line {
+ dump_line(w, &dwarf)?;
+ }
+ Ok(())
+}
+
+fn dump_info<R: Reader, W: Write + Send>(
+ w: &mut W,
+ dwarf: &gimli::Dwarf<R>,
+ dwo_parent_units: Option<&HashMap<gimli::DwoId, gimli::Unit<R>>>,
+ flags: &Flags,
+) -> Result<()>
+where
+ R::Endian: Send + Sync,
+{
+ writeln!(w, "\n.debug_info")?;
+
+ let units = match dwarf.units().collect::<Vec<_>>() {
+ Ok(units) => units,
+ Err(err) => {
+ writeln_error(
+ w,
+ dwarf,
+ Error::GimliError(err),
+ "Failed to read unit headers",
+ )?;
+ return Ok(());
+ }
+ };
+ let process_unit = |header: UnitHeader<R>, buf: &mut Vec<u8>| -> Result<()> {
+ dump_unit(buf, header, dwarf, dwo_parent_units, flags)?;
+ if !flags
+ .match_units
+ .as_ref()
+ .map(|r| r.is_match(&buf))
+ .unwrap_or(true)
+ {
+ buf.clear();
+ }
+ Ok(())
+ };
+ // Don't use more than 16 cores even if available. No point in soaking hundreds
+ // of cores if you happen to have them.
+ parallel_output(w, 16, units, process_unit)
+}
+
+fn dump_types<R: Reader, W: Write>(
+ w: &mut W,
+ dwarf: &gimli::Dwarf<R>,
+ dwo_parent_units: Option<&HashMap<gimli::DwoId, gimli::Unit<R>>>,
+ flags: &Flags,
+) -> Result<()> {
+ writeln!(w, "\n.debug_types")?;
+
+ let mut iter = dwarf.type_units();
+ while let Some(header) = iter.next()? {
+ dump_unit(w, header, dwarf, dwo_parent_units, flags)?;
+ }
+ Ok(())
+}
+
+fn dump_unit<R: Reader, W: Write>(
+ w: &mut W,
+ header: UnitHeader<R>,
+ dwarf: &gimli::Dwarf<R>,
+ dwo_parent_units: Option<&HashMap<gimli::DwoId, gimli::Unit<R>>>,
+ flags: &Flags,
+) -> Result<()> {
+ write!(w, "\nUNIT<")?;
+ match header.offset() {
+ UnitSectionOffset::DebugInfoOffset(o) => {
+ write!(w, ".debug_info+0x{:08x}", o.0)?;
+ }
+ UnitSectionOffset::DebugTypesOffset(o) => {
+ write!(w, ".debug_types+0x{:08x}", o.0)?;
+ }
+ }
+ writeln!(w, ">: length = 0x{:x}, format = {:?}, version = {}, address_size = {}, abbrev_offset = 0x{:x}",
+ header.unit_length(),
+ header.format(),
+ header.version(),
+ header.address_size(),
+ header.debug_abbrev_offset().0,
+ )?;
+
+ match header.type_() {
+ UnitType::Compilation | UnitType::Partial => (),
+ UnitType::Type {
+ type_signature,
+ type_offset,
+ }
+ | UnitType::SplitType {
+ type_signature,
+ type_offset,
+ } => {
+ write!(w, " signature = ")?;
+ dump_type_signature(w, type_signature)?;
+ writeln!(w)?;
+ writeln!(w, " type_offset = 0x{:x}", type_offset.0,)?;
+ }
+ UnitType::Skeleton(dwo_id) | UnitType::SplitCompilation(dwo_id) => {
+ write!(w, " dwo_id = ")?;
+ writeln!(w, "0x{:016x}", dwo_id.0)?;
+ }
+ }
+
+ let mut unit = match dwarf.unit(header) {
+ Ok(unit) => unit,
+ Err(err) => {
+ writeln_error(w, dwarf, err.into(), "Failed to parse unit root entry")?;
+ return Ok(());
+ }
+ };
+
+ if let Some(dwo_parent_units) = dwo_parent_units {
+ if let Some(dwo_id) = unit.dwo_id {
+ if let Some(parent_unit) = dwo_parent_units.get(&dwo_id) {
+ unit.copy_relocated_attributes(parent_unit);
+ }
+ }
+ }
+
+ let entries_result = dump_entries(w, unit, dwarf, flags);
+ if let Err(err) = entries_result {
+ writeln_error(w, dwarf, err, "Failed to dump entries")?;
+ }
+ Ok(())
+}
+
+fn spaces(buf: &mut String, len: usize) -> &str {
+ while buf.len() < len {
+ buf.push(' ');
+ }
+ &buf[..len]
+}
+
+// " GOFF=0x{:08x}" adds exactly 16 spaces.
+const GOFF_SPACES: usize = 16;
+
+fn write_offset<R: Reader, W: Write>(
+ w: &mut W,
+ unit: &gimli::Unit<R>,
+ offset: gimli::UnitOffset<R::Offset>,
+ flags: &Flags,
+) -> Result<()> {
+ write!(w, "<0x{:08x}", offset.0)?;
+ if flags.goff {
+ let goff = match offset.to_unit_section_offset(unit) {
+ UnitSectionOffset::DebugInfoOffset(o) => o.0,
+ UnitSectionOffset::DebugTypesOffset(o) => o.0,
+ };
+ write!(w, " GOFF=0x{:08x}", goff)?;
+ }
+ write!(w, ">")?;
+ Ok(())
+}
+
+fn dump_entries<R: Reader, W: Write>(
+ w: &mut W,
+ unit: gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+ flags: &Flags,
+) -> Result<()> {
+ let mut spaces_buf = String::new();
+
+ let mut entries = unit.entries_raw(None)?;
+ while !entries.is_empty() {
+ let offset = entries.next_offset();
+ let depth = entries.next_depth();
+ let abbrev = entries.read_abbreviation()?;
+
+ let mut indent = if depth >= 0 {
+ depth as usize * 2 + 2
+ } else {
+ 2
+ };
+ write!(w, "<{}{}>", if depth < 10 { " " } else { "" }, depth)?;
+ write_offset(w, &unit, offset, flags)?;
+ writeln!(
+ w,
+ "{}{}",
+ spaces(&mut spaces_buf, indent),
+ abbrev.map(|x| x.tag()).unwrap_or(gimli::DW_TAG_null)
+ )?;
+
+ indent += 18;
+ if flags.goff {
+ indent += GOFF_SPACES;
+ }
+
+ for spec in abbrev.map(|x| x.attributes()).unwrap_or(&[]) {
+ let attr = entries.read_attribute(*spec)?;
+ w.write_all(spaces(&mut spaces_buf, indent).as_bytes())?;
+ if let Some(n) = attr.name().static_string() {
+ let right_padding = 27 - std::cmp::min(27, n.len());
+ write!(w, "{}{} ", n, spaces(&mut spaces_buf, right_padding))?;
+ } else {
+ write!(w, "{:27} ", attr.name())?;
+ }
+ if flags.raw {
+ writeln!(w, "{:?}", attr.raw_value())?;
+ } else {
+ match dump_attr_value(w, &attr, &unit, dwarf) {
+ Ok(_) => (),
+ Err(err) => writeln_error(w, dwarf, err, "Failed to dump attribute value")?,
+ };
+ }
+ }
+ }
+ Ok(())
+}
+
+fn dump_attr_value<R: Reader, W: Write>(
+ w: &mut W,
+ attr: &gimli::Attribute<R>,
+ unit: &gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+) -> Result<()> {
+ let value = attr.value();
+ match value {
+ gimli::AttributeValue::Addr(address) => {
+ writeln!(w, "0x{:08x}", address)?;
+ }
+ gimli::AttributeValue::Block(data) => {
+ for byte in data.to_slice()?.iter() {
+ write!(w, "{:02x}", byte)?;
+ }
+ writeln!(w)?;
+ }
+ gimli::AttributeValue::Data1(_)
+ | gimli::AttributeValue::Data2(_)
+ | gimli::AttributeValue::Data4(_)
+ | gimli::AttributeValue::Data8(_) => {
+ if let (Some(udata), Some(sdata)) = (attr.udata_value(), attr.sdata_value()) {
+ if sdata >= 0 {
+ writeln!(w, "{}", udata)?;
+ } else {
+ writeln!(w, "{} ({})", udata, sdata)?;
+ }
+ } else {
+ writeln!(w, "{:?}", value)?;
+ }
+ }
+ gimli::AttributeValue::Sdata(data) => {
+ match attr.name() {
+ gimli::DW_AT_data_member_location => {
+ writeln!(w, "{}", data)?;
+ }
+ _ => {
+ if data >= 0 {
+ writeln!(w, "0x{:08x}", data)?;
+ } else {
+ writeln!(w, "0x{:08x} ({})", data, data)?;
+ }
+ }
+ };
+ }
+ gimli::AttributeValue::Udata(data) => {
+ match attr.name() {
+ gimli::DW_AT_high_pc => {
+ writeln!(w, "<offset-from-lowpc>{}", data)?;
+ }
+ gimli::DW_AT_data_member_location => {
+ if let Some(sdata) = attr.sdata_value() {
+ // This is a DW_FORM_data* value.
+ // libdwarf-dwarfdump displays this as signed too.
+ if sdata >= 0 {
+ writeln!(w, "{}", data)?;
+ } else {
+ writeln!(w, "{} ({})", data, sdata)?;
+ }
+ } else {
+ writeln!(w, "{}", data)?;
+ }
+ }
+ gimli::DW_AT_lower_bound | gimli::DW_AT_upper_bound => {
+ writeln!(w, "{}", data)?;
+ }
+ _ => {
+ writeln!(w, "0x{:08x}", data)?;
+ }
+ };
+ }
+ gimli::AttributeValue::Exprloc(ref data) => {
+ if let gimli::AttributeValue::Exprloc(_) = attr.raw_value() {
+ write!(w, "len 0x{:04x}: ", data.0.len())?;
+ for byte in data.0.to_slice()?.iter() {
+ write!(w, "{:02x}", byte)?;
+ }
+ write!(w, ": ")?;
+ }
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::AttributeValue::Flag(true) => {
+ writeln!(w, "yes")?;
+ }
+ gimli::AttributeValue::Flag(false) => {
+ writeln!(w, "no")?;
+ }
+ gimli::AttributeValue::SecOffset(offset) => {
+ writeln!(w, "0x{:08x}", offset)?;
+ }
+ gimli::AttributeValue::DebugAddrBase(base) => {
+ writeln!(w, "<.debug_addr+0x{:08x}>", base.0)?;
+ }
+ gimli::AttributeValue::DebugAddrIndex(index) => {
+ write!(w, "(indirect address, index {:#x}): ", index.0)?;
+ let address = dwarf.address(unit, index)?;
+ writeln!(w, "0x{:08x}", address)?;
+ }
+ gimli::AttributeValue::UnitRef(offset) => {
+ write!(w, "0x{:08x}", offset.0)?;
+ match offset.to_unit_section_offset(unit) {
+ UnitSectionOffset::DebugInfoOffset(goff) => {
+ write!(w, "<.debug_info+0x{:08x}>", goff.0)?;
+ }
+ UnitSectionOffset::DebugTypesOffset(goff) => {
+ write!(w, "<.debug_types+0x{:08x}>", goff.0)?;
+ }
+ }
+ writeln!(w)?;
+ }
+ gimli::AttributeValue::DebugInfoRef(offset) => {
+ writeln!(w, "<.debug_info+0x{:08x}>", offset.0)?;
+ }
+ gimli::AttributeValue::DebugInfoRefSup(offset) => {
+ writeln!(w, "<.debug_info(sup)+0x{:08x}>", offset.0)?;
+ }
+ gimli::AttributeValue::DebugLineRef(offset) => {
+ writeln!(w, "<.debug_line+0x{:08x}>", offset.0)?;
+ }
+ gimli::AttributeValue::LocationListsRef(offset) => {
+ dump_loc_list(w, offset, unit, dwarf)?;
+ }
+ gimli::AttributeValue::DebugLocListsBase(base) => {
+ writeln!(w, "<.debug_loclists+0x{:08x}>", base.0)?;
+ }
+ gimli::AttributeValue::DebugLocListsIndex(index) => {
+ write!(w, "(indirect location list, index {:#x}): ", index.0)?;
+ let offset = dwarf.locations_offset(unit, index)?;
+ dump_loc_list(w, offset, unit, dwarf)?;
+ }
+ gimli::AttributeValue::DebugMacinfoRef(offset) => {
+ writeln!(w, "<.debug_macinfo+0x{:08x}>", offset.0)?;
+ }
+ gimli::AttributeValue::DebugMacroRef(offset) => {
+ writeln!(w, "<.debug_macro+0x{:08x}>", offset.0)?;
+ }
+ gimli::AttributeValue::RangeListsRef(offset) => {
+ let offset = dwarf.ranges_offset_from_raw(unit, offset);
+ dump_range_list(w, offset, unit, dwarf)?;
+ }
+ gimli::AttributeValue::DebugRngListsBase(base) => {
+ writeln!(w, "<.debug_rnglists+0x{:08x}>", base.0)?;
+ }
+ gimli::AttributeValue::DebugRngListsIndex(index) => {
+ write!(w, "(indirect range list, index {:#x}): ", index.0)?;
+ let offset = dwarf.ranges_offset(unit, index)?;
+ dump_range_list(w, offset, unit, dwarf)?;
+ }
+ gimli::AttributeValue::DebugTypesRef(signature) => {
+ dump_type_signature(w, signature)?;
+ writeln!(w, " <type signature>")?;
+ }
+ gimli::AttributeValue::DebugStrRef(offset) => {
+ if let Ok(s) = dwarf.debug_str.get_str(offset) {
+ writeln!(w, "{}", s.to_string_lossy()?)?;
+ } else {
+ writeln!(w, "<.debug_str+0x{:08x}>", offset.0)?;
+ }
+ }
+ gimli::AttributeValue::DebugStrRefSup(offset) => {
+ if let Some(s) = dwarf
+ .sup()
+ .and_then(|sup| sup.debug_str.get_str(offset).ok())
+ {
+ writeln!(w, "{}", s.to_string_lossy()?)?;
+ } else {
+ writeln!(w, "<.debug_str(sup)+0x{:08x}>", offset.0)?;
+ }
+ }
+ gimli::AttributeValue::DebugStrOffsetsBase(base) => {
+ writeln!(w, "<.debug_str_offsets+0x{:08x}>", base.0)?;
+ }
+ gimli::AttributeValue::DebugStrOffsetsIndex(index) => {
+ write!(w, "(indirect string, index {:#x}): ", index.0)?;
+ let offset = dwarf.debug_str_offsets.get_str_offset(
+ unit.encoding().format,
+ unit.str_offsets_base,
+ index,
+ )?;
+ if let Ok(s) = dwarf.debug_str.get_str(offset) {
+ writeln!(w, "{}", s.to_string_lossy()?)?;
+ } else {
+ writeln!(w, "<.debug_str+0x{:08x}>", offset.0)?;
+ }
+ }
+ gimli::AttributeValue::DebugLineStrRef(offset) => {
+ if let Ok(s) = dwarf.debug_line_str.get_str(offset) {
+ writeln!(w, "{}", s.to_string_lossy()?)?;
+ } else {
+ writeln!(w, "<.debug_line_str=0x{:08x}>", offset.0)?;
+ }
+ }
+ gimli::AttributeValue::String(s) => {
+ writeln!(w, "{}", s.to_string_lossy()?)?;
+ }
+ gimli::AttributeValue::Encoding(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::DecimalSign(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Endianity(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Accessibility(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Visibility(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Virtuality(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Language(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::AddressClass(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::IdentifierCase(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::CallingConvention(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Inline(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::Ordering(value) => {
+ writeln!(w, "{}", value)?;
+ }
+ gimli::AttributeValue::FileIndex(value) => {
+ write!(w, "0x{:08x}", value)?;
+ dump_file_index(w, value, unit, dwarf)?;
+ writeln!(w)?;
+ }
+ gimli::AttributeValue::DwoId(value) => {
+ writeln!(w, "0x{:016x}", value.0)?;
+ }
+ }
+
+ Ok(())
+}
+
+fn dump_type_signature<W: Write>(w: &mut W, signature: gimli::DebugTypeSignature) -> Result<()> {
+ write!(w, "0x{:016x}", signature.0)?;
+ Ok(())
+}
+
+fn dump_file_index<R: Reader, W: Write>(
+ w: &mut W,
+ file_index: u64,
+ unit: &gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+) -> Result<()> {
+ if file_index == 0 && unit.header.version() <= 4 {
+ return Ok(());
+ }
+ let header = match unit.line_program {
+ Some(ref program) => program.header(),
+ None => return Ok(()),
+ };
+ let file = match header.file(file_index) {
+ Some(file) => file,
+ None => {
+ writeln!(w, "Unable to get header for file {}", file_index)?;
+ return Ok(());
+ }
+ };
+ write!(w, " ")?;
+ if let Some(directory) = file.directory(header) {
+ let directory = dwarf.attr_string(unit, directory)?;
+ let directory = directory.to_string_lossy()?;
+ if file.directory_index() != 0 && !directory.starts_with('/') {
+ if let Some(ref comp_dir) = unit.comp_dir {
+ write!(w, "{}/", comp_dir.to_string_lossy()?,)?;
+ }
+ }
+ write!(w, "{}/", directory)?;
+ }
+ write!(
+ w,
+ "{}",
+ dwarf
+ .attr_string(unit, file.path_name())?
+ .to_string_lossy()?
+ )?;
+ Ok(())
+}
+
+fn dump_exprloc<R: Reader, W: Write>(
+ w: &mut W,
+ encoding: gimli::Encoding,
+ data: &gimli::Expression<R>,
+) -> Result<()> {
+ let mut pc = data.0.clone();
+ let mut space = false;
+ while pc.len() != 0 {
+ let pc_clone = pc.clone();
+ match gimli::Operation::parse(&mut pc, encoding) {
+ Ok(op) => {
+ if space {
+ write!(w, " ")?;
+ } else {
+ space = true;
+ }
+ dump_op(w, encoding, pc_clone, op)?;
+ }
+ Err(gimli::Error::InvalidExpression(op)) => {
+ writeln!(w, "WARNING: unsupported operation 0x{:02x}", op.0)?;
+ return Ok(());
+ }
+ Err(gimli::Error::UnsupportedRegister(register)) => {
+ writeln!(w, "WARNING: unsupported register {}", register)?;
+ return Ok(());
+ }
+ Err(gimli::Error::UnexpectedEof(_)) => {
+ writeln!(w, "WARNING: truncated or malformed expression")?;
+ return Ok(());
+ }
+ Err(e) => {
+ writeln!(w, "WARNING: unexpected operation parse error: {}", e)?;
+ return Ok(());
+ }
+ }
+ }
+ Ok(())
+}
+
+fn dump_op<R: Reader, W: Write>(
+ w: &mut W,
+ encoding: gimli::Encoding,
+ mut pc: R,
+ op: gimli::Operation<R>,
+) -> Result<()> {
+ let dwop = gimli::DwOp(pc.read_u8()?);
+ write!(w, "{}", dwop)?;
+ match op {
+ gimli::Operation::Deref {
+ base_type, size, ..
+ } => {
+ if dwop == gimli::DW_OP_deref_size || dwop == gimli::DW_OP_xderef_size {
+ write!(w, " {}", size)?;
+ }
+ if base_type != UnitOffset(0) {
+ write!(w, " type 0x{:08x}", base_type.0)?;
+ }
+ }
+ gimli::Operation::Pick { index } => {
+ if dwop == gimli::DW_OP_pick {
+ write!(w, " {}", index)?;
+ }
+ }
+ gimli::Operation::PlusConstant { value } => {
+ write!(w, " {}", value as i64)?;
+ }
+ gimli::Operation::Bra { target } => {
+ write!(w, " {}", target)?;
+ }
+ gimli::Operation::Skip { target } => {
+ write!(w, " {}", target)?;
+ }
+ gimli::Operation::SignedConstant { value } => match dwop {
+ gimli::DW_OP_const1s
+ | gimli::DW_OP_const2s
+ | gimli::DW_OP_const4s
+ | gimli::DW_OP_const8s
+ | gimli::DW_OP_consts => {
+ write!(w, " {}", value)?;
+ }
+ _ => {}
+ },
+ gimli::Operation::UnsignedConstant { value } => match dwop {
+ gimli::DW_OP_const1u
+ | gimli::DW_OP_const2u
+ | gimli::DW_OP_const4u
+ | gimli::DW_OP_const8u
+ | gimli::DW_OP_constu => {
+ write!(w, " {}", value)?;
+ }
+ _ => {
+ // These have the value encoded in the operation, eg DW_OP_lit0.
+ }
+ },
+ gimli::Operation::Register { register } => {
+ if dwop == gimli::DW_OP_regx {
+ write!(w, " {}", register.0)?;
+ }
+ }
+ gimli::Operation::RegisterOffset {
+ register,
+ offset,
+ base_type,
+ } => {
+ if dwop >= gimli::DW_OP_breg0 && dwop <= gimli::DW_OP_breg31 {
+ write!(w, "{:+}", offset)?;
+ } else {
+ write!(w, " {}", register.0)?;
+ if offset != 0 {
+ write!(w, "{:+}", offset)?;
+ }
+ if base_type != UnitOffset(0) {
+ write!(w, " type 0x{:08x}", base_type.0)?;
+ }
+ }
+ }
+ gimli::Operation::FrameOffset { offset } => {
+ write!(w, " {}", offset)?;
+ }
+ gimli::Operation::Call { offset } => match offset {
+ gimli::DieReference::UnitRef(gimli::UnitOffset(offset)) => {
+ write!(w, " 0x{:08x}", offset)?;
+ }
+ gimli::DieReference::DebugInfoRef(gimli::DebugInfoOffset(offset)) => {
+ write!(w, " 0x{:08x}", offset)?;
+ }
+ },
+ gimli::Operation::Piece {
+ size_in_bits,
+ bit_offset: None,
+ } => {
+ write!(w, " {}", size_in_bits / 8)?;
+ }
+ gimli::Operation::Piece {
+ size_in_bits,
+ bit_offset: Some(bit_offset),
+ } => {
+ write!(w, " 0x{:08x} offset 0x{:08x}", size_in_bits, bit_offset)?;
+ }
+ gimli::Operation::ImplicitValue { data } => {
+ let data = data.to_slice()?;
+ write!(w, " 0x{:08x} contents 0x", data.len())?;
+ for byte in data.iter() {
+ write!(w, "{:02x}", byte)?;
+ }
+ }
+ gimli::Operation::ImplicitPointer { value, byte_offset } => {
+ write!(w, " 0x{:08x} {}", value.0, byte_offset)?;
+ }
+ gimli::Operation::EntryValue { expression } => {
+ write!(w, "(")?;
+ dump_exprloc(w, encoding, &gimli::Expression(expression))?;
+ write!(w, ")")?;
+ }
+ gimli::Operation::ParameterRef { offset } => {
+ write!(w, " 0x{:08x}", offset.0)?;
+ }
+ gimli::Operation::Address { address } => {
+ write!(w, " 0x{:08x}", address)?;
+ }
+ gimli::Operation::AddressIndex { index } => {
+ write!(w, " 0x{:08x}", index.0)?;
+ }
+ gimli::Operation::ConstantIndex { index } => {
+ write!(w, " 0x{:08x}", index.0)?;
+ }
+ gimli::Operation::TypedLiteral { base_type, value } => {
+ write!(w, " type 0x{:08x} contents 0x", base_type.0)?;
+ for byte in value.to_slice()?.iter() {
+ write!(w, "{:02x}", byte)?;
+ }
+ }
+ gimli::Operation::Convert { base_type } => {
+ write!(w, " type 0x{:08x}", base_type.0)?;
+ }
+ gimli::Operation::Reinterpret { base_type } => {
+ write!(w, " type 0x{:08x}", base_type.0)?;
+ }
+ gimli::Operation::WasmLocal { index }
+ | gimli::Operation::WasmGlobal { index }
+ | gimli::Operation::WasmStack { index } => {
+ let wasmop = pc.read_u8()?;
+ write!(w, " 0x{:x} 0x{:x}", wasmop, index)?;
+ }
+ gimli::Operation::Drop
+ | gimli::Operation::Swap
+ | gimli::Operation::Rot
+ | gimli::Operation::Abs
+ | gimli::Operation::And
+ | gimli::Operation::Div
+ | gimli::Operation::Minus
+ | gimli::Operation::Mod
+ | gimli::Operation::Mul
+ | gimli::Operation::Neg
+ | gimli::Operation::Not
+ | gimli::Operation::Or
+ | gimli::Operation::Plus
+ | gimli::Operation::Shl
+ | gimli::Operation::Shr
+ | gimli::Operation::Shra
+ | gimli::Operation::Xor
+ | gimli::Operation::Eq
+ | gimli::Operation::Ge
+ | gimli::Operation::Gt
+ | gimli::Operation::Le
+ | gimli::Operation::Lt
+ | gimli::Operation::Ne
+ | gimli::Operation::Nop
+ | gimli::Operation::PushObjectAddress
+ | gimli::Operation::TLS
+ | gimli::Operation::CallFrameCFA
+ | gimli::Operation::StackValue => {}
+ };
+ Ok(())
+}
+
+fn dump_loc_list<R: Reader, W: Write>(
+ w: &mut W,
+ offset: gimli::LocationListsOffset<R::Offset>,
+ unit: &gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+) -> Result<()> {
+ let raw_locations = dwarf.raw_locations(unit, offset)?;
+ let raw_locations: Vec<_> = raw_locations.collect()?;
+ let mut locations = dwarf.locations(unit, offset)?;
+ writeln!(
+ w,
+ "<loclist at {}+0x{:08x} with {} entries>",
+ if unit.encoding().version < 5 {
+ ".debug_loc"
+ } else {
+ ".debug_loclists"
+ },
+ offset.0,
+ raw_locations.len()
+ )?;
+ for (i, raw) in raw_locations.iter().enumerate() {
+ write!(w, "\t\t\t[{:2}]", i)?;
+ match *raw {
+ gimli::RawLocListEntry::BaseAddress { addr } => {
+ writeln!(w, "<new base address 0x{:08x}>", addr)?;
+ }
+ gimli::RawLocListEntry::BaseAddressx { addr } => {
+ let addr_val = dwarf.address(unit, addr)?;
+ writeln!(w, "<new base addressx [{}]0x{:08x}>", addr.0, addr_val)?;
+ }
+ gimli::RawLocListEntry::StartxEndx {
+ begin,
+ end,
+ ref data,
+ } => {
+ let begin_val = dwarf.address(unit, begin)?;
+ let end_val = dwarf.address(unit, end)?;
+ let location = locations.next()?.unwrap();
+ write!(
+ w,
+ "<startx-endx \
+ low-off: [{}]0x{:08x} addr 0x{:08x} \
+ high-off: [{}]0x{:08x} addr 0x{:08x}>",
+ begin.0, begin_val, location.range.begin, end.0, end_val, location.range.end
+ )?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::RawLocListEntry::StartxLength {
+ begin,
+ length,
+ ref data,
+ } => {
+ let begin_val = dwarf.address(unit, begin)?;
+ let location = locations.next()?.unwrap();
+ write!(
+ w,
+ "<start-length \
+ low-off: [{}]0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin.0, begin_val, location.range.begin, length, location.range.end
+ )?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::RawLocListEntry::AddressOrOffsetPair {
+ begin,
+ end,
+ ref data,
+ }
+ | gimli::RawLocListEntry::OffsetPair {
+ begin,
+ end,
+ ref data,
+ } => {
+ let location = locations.next()?.unwrap();
+ write!(
+ w,
+ "<offset pair \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, location.range.begin, end, location.range.end
+ )?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::RawLocListEntry::DefaultLocation { ref data } => {
+ write!(w, "<default location>")?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::RawLocListEntry::StartEnd {
+ begin,
+ end,
+ ref data,
+ } => {
+ let location = locations.next()?.unwrap();
+ write!(
+ w,
+ "<start-end \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, location.range.begin, end, location.range.end
+ )?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ gimli::RawLocListEntry::StartLength {
+ begin,
+ length,
+ ref data,
+ } => {
+ let location = locations.next()?.unwrap();
+ write!(
+ w,
+ "<start-length \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, location.range.begin, length, location.range.end
+ )?;
+ dump_exprloc(w, unit.encoding(), data)?;
+ writeln!(w)?;
+ }
+ };
+ }
+ Ok(())
+}
+
+fn dump_range_list<R: Reader, W: Write>(
+ w: &mut W,
+ offset: gimli::RangeListsOffset<R::Offset>,
+ unit: &gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+) -> Result<()> {
+ let raw_ranges = dwarf.raw_ranges(unit, offset)?;
+ let raw_ranges: Vec<_> = raw_ranges.collect()?;
+ let mut ranges = dwarf.ranges(unit, offset)?;
+ writeln!(
+ w,
+ "<rnglist at {}+0x{:08x} with {} entries>",
+ if unit.encoding().version < 5 {
+ ".debug_ranges"
+ } else {
+ ".debug_rnglists"
+ },
+ offset.0,
+ raw_ranges.len()
+ )?;
+ for (i, raw) in raw_ranges.iter().enumerate() {
+ write!(w, "\t\t\t[{:2}] ", i)?;
+ match *raw {
+ gimli::RawRngListEntry::AddressOrOffsetPair { begin, end } => {
+ let range = ranges.next()?.unwrap();
+ writeln!(
+ w,
+ "<address pair \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, range.begin, end, range.end
+ )?;
+ }
+ gimli::RawRngListEntry::BaseAddress { addr } => {
+ writeln!(w, "<new base address 0x{:08x}>", addr)?;
+ }
+ gimli::RawRngListEntry::BaseAddressx { addr } => {
+ let addr_val = dwarf.address(unit, addr)?;
+ writeln!(w, "<new base addressx [{}]0x{:08x}>", addr.0, addr_val)?;
+ }
+ gimli::RawRngListEntry::StartxEndx { begin, end } => {
+ let begin_val = dwarf.address(unit, begin)?;
+ let end_val = dwarf.address(unit, end)?;
+ let range = if begin_val == end_val {
+ gimli::Range {
+ begin: begin_val,
+ end: end_val,
+ }
+ } else {
+ ranges.next()?.unwrap()
+ };
+ writeln!(
+ w,
+ "<startx-endx \
+ low-off: [{}]0x{:08x} addr 0x{:08x} \
+ high-off: [{}]0x{:08x} addr 0x{:08x}>",
+ begin.0, begin_val, range.begin, end.0, end_val, range.end
+ )?;
+ }
+ gimli::RawRngListEntry::StartxLength { begin, length } => {
+ let begin_val = dwarf.address(unit, begin)?;
+ let range = ranges.next()?.unwrap();
+ writeln!(
+ w,
+ "<startx-length \
+ low-off: [{}]0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin.0, begin_val, range.begin, length, range.end
+ )?;
+ }
+ gimli::RawRngListEntry::OffsetPair { begin, end } => {
+ let range = ranges.next()?.unwrap();
+ writeln!(
+ w,
+ "<offset pair \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, range.begin, end, range.end
+ )?;
+ }
+ gimli::RawRngListEntry::StartEnd { begin, end } => {
+ let range = if begin == end {
+ gimli::Range { begin, end }
+ } else {
+ ranges.next()?.unwrap()
+ };
+ writeln!(
+ w,
+ "<start-end \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, range.begin, end, range.end
+ )?;
+ }
+ gimli::RawRngListEntry::StartLength { begin, length } => {
+ let range = ranges.next()?.unwrap();
+ writeln!(
+ w,
+ "<start-length \
+ low-off: 0x{:08x} addr 0x{:08x} \
+ high-off: 0x{:08x} addr 0x{:08x}>",
+ begin, range.begin, length, range.end
+ )?;
+ }
+ };
+ }
+ Ok(())
+}
+
+fn dump_line<R: Reader, W: Write>(w: &mut W, dwarf: &gimli::Dwarf<R>) -> Result<()> {
+ let mut iter = dwarf.units();
+ while let Some(header) = iter.next()? {
+ writeln!(
+ w,
+ "\n.debug_line: line number info for unit at .debug_info offset 0x{:08x}",
+ header.offset().as_debug_info_offset().unwrap().0
+ )?;
+ let unit = match dwarf.unit(header) {
+ Ok(unit) => unit,
+ Err(err) => {
+ writeln_error(
+ w,
+ dwarf,
+ err.into(),
+ "Failed to parse unit root entry for dump_line",
+ )?;
+ continue;
+ }
+ };
+ match dump_line_program(w, &unit, dwarf) {
+ Ok(_) => (),
+ Err(Error::IoError) => return Err(Error::IoError),
+ Err(err) => writeln_error(w, dwarf, err, "Failed to dump line program")?,
+ }
+ }
+ Ok(())
+}
+
+fn dump_line_program<R: Reader, W: Write>(
+ w: &mut W,
+ unit: &gimli::Unit<R>,
+ dwarf: &gimli::Dwarf<R>,
+) -> Result<()> {
+ if let Some(program) = unit.line_program.clone() {
+ {
+ let header = program.header();
+ writeln!(w)?;
+ writeln!(
+ w,
+ "Offset: 0x{:x}",
+ header.offset().0
+ )?;
+ writeln!(
+ w,
+ "Length: {}",
+ header.unit_length()
+ )?;
+ writeln!(
+ w,
+ "DWARF version: {}",
+ header.version()
+ )?;
+ writeln!(
+ w,
+ "Address size: {}",
+ header.address_size()
+ )?;
+ writeln!(
+ w,
+ "Prologue length: {}",
+ header.header_length()
+ )?;
+ writeln!(
+ w,
+ "Minimum instruction length: {}",
+ header.minimum_instruction_length()
+ )?;
+ writeln!(
+ w,
+ "Maximum operations per instruction: {}",
+ header.maximum_operations_per_instruction()
+ )?;
+ writeln!(
+ w,
+ "Default is_stmt: {}",
+ header.default_is_stmt()
+ )?;
+ writeln!(
+ w,
+ "Line base: {}",
+ header.line_base()
+ )?;
+ writeln!(
+ w,
+ "Line range: {}",
+ header.line_range()
+ )?;
+ writeln!(
+ w,
+ "Opcode base: {}",
+ header.opcode_base()
+ )?;
+
+ writeln!(w)?;
+ writeln!(w, "Opcodes:")?;
+ for (i, length) in header
+ .standard_opcode_lengths()
+ .to_slice()?
+ .iter()
+ .enumerate()
+ {
+ writeln!(w, " Opcode {} has {} args", i + 1, length)?;
+ }
+
+ let base = if header.version() >= 5 { 0 } else { 1 };
+ writeln!(w)?;
+ writeln!(w, "The Directory Table:")?;
+ for (i, dir) in header.include_directories().iter().enumerate() {
+ writeln!(
+ w,
+ " {} {}",
+ base + i,
+ dwarf.attr_string(unit, dir.clone())?.to_string_lossy()?
+ )?;
+ }
+
+ writeln!(w)?;
+ writeln!(w, "The File Name Table")?;
+ write!(w, " Entry\tDir\tTime\tSize")?;
+ if header.file_has_md5() {
+ write!(w, "\tMD5\t\t\t\t")?;
+ }
+ writeln!(w, "\tName")?;
+ for (i, file) in header.file_names().iter().enumerate() {
+ write!(
+ w,
+ " {}\t{}\t{}\t{}",
+ base + i,
+ file.directory_index(),
+ file.timestamp(),
+ file.size(),
+ )?;
+ if header.file_has_md5() {
+ let md5 = file.md5();
+ write!(w, "\t")?;
+ for i in 0..16 {
+ write!(w, "{:02X}", md5[i])?;
+ }
+ }
+ writeln!(
+ w,
+ "\t{}",
+ dwarf
+ .attr_string(unit, file.path_name())?
+ .to_string_lossy()?
+ )?;
+ }
+
+ writeln!(w)?;
+ writeln!(w, "Line Number Instructions:")?;
+ let mut instructions = header.instructions();
+ while let Some(instruction) = instructions.next_instruction(header)? {
+ writeln!(w, " {}", instruction)?;
+ }
+
+ writeln!(w)?;
+ writeln!(w, "Line Number Rows:")?;
+ writeln!(w, "<pc> [lno,col]")?;
+ }
+ let mut rows = program.rows();
+ let mut file_index = std::u64::MAX;
+ while let Some((header, row)) = rows.next_row()? {
+ let line = match row.line() {
+ Some(line) => line.get(),
+ None => 0,
+ };
+ let column = match row.column() {
+ gimli::ColumnType::Column(column) => column.get(),
+ gimli::ColumnType::LeftEdge => 0,
+ };
+ write!(w, "0x{:08x} [{:4},{:2}]", row.address(), line, column)?;
+ if row.is_stmt() {
+ write!(w, " NS")?;
+ }
+ if row.basic_block() {
+ write!(w, " BB")?;
+ }
+ if row.end_sequence() {
+ write!(w, " ET")?;
+ }
+ if row.prologue_end() {
+ write!(w, " PE")?;
+ }
+ if row.epilogue_begin() {
+ write!(w, " EB")?;
+ }
+ if row.isa() != 0 {
+ write!(w, " IS={}", row.isa())?;
+ }
+ if row.discriminator() != 0 {
+ write!(w, " DI={}", row.discriminator())?;
+ }
+ if file_index != row.file_index() {
+ file_index = row.file_index();
+ if let Some(file) = row.file(header) {
+ if let Some(directory) = file.directory(header) {
+ write!(
+ w,
+ " uri: \"{}/{}\"",
+ dwarf.attr_string(unit, directory)?.to_string_lossy()?,
+ dwarf
+ .attr_string(unit, file.path_name())?
+ .to_string_lossy()?
+ )?;
+ } else {
+ write!(
+ w,
+ " uri: \"{}\"",
+ dwarf
+ .attr_string(unit, file.path_name())?
+ .to_string_lossy()?
+ )?;
+ }
+ }
+ }
+ writeln!(w)?;
+ }
+ }
+ Ok(())
+}
+
+fn dump_pubnames<R: Reader, W: Write>(
+ w: &mut W,
+ debug_pubnames: &gimli::DebugPubNames<R>,
+ debug_info: &gimli::DebugInfo<R>,
+) -> Result<()> {
+ writeln!(w, "\n.debug_pubnames")?;
+
+ let mut cu_offset;
+ let mut cu_die_offset = gimli::DebugInfoOffset(0);
+ let mut prev_cu_offset = None;
+ let mut pubnames = debug_pubnames.items();
+ while let Some(pubname) = pubnames.next()? {
+ cu_offset = pubname.unit_header_offset();
+ if Some(cu_offset) != prev_cu_offset {
+ let cu = debug_info.header_from_offset(cu_offset)?;
+ cu_die_offset = gimli::DebugInfoOffset(cu_offset.0 + cu.header_size());
+ prev_cu_offset = Some(cu_offset);
+ }
+ let die_in_cu = pubname.die_offset();
+ let die_in_sect = cu_offset.0 + die_in_cu.0;
+ writeln!(w,
+ "global die-in-sect 0x{:08x}, cu-in-sect 0x{:08x}, die-in-cu 0x{:08x}, cu-header-in-sect 0x{:08x} '{}'",
+ die_in_sect,
+ cu_die_offset.0,
+ die_in_cu.0,
+ cu_offset.0,
+ pubname.name().to_string_lossy()?
+ )?;
+ }
+ Ok(())
+}
+
+fn dump_pubtypes<R: Reader, W: Write>(
+ w: &mut W,
+ debug_pubtypes: &gimli::DebugPubTypes<R>,
+ debug_info: &gimli::DebugInfo<R>,
+) -> Result<()> {
+ writeln!(w, "\n.debug_pubtypes")?;
+
+ let mut cu_offset;
+ let mut cu_die_offset = gimli::DebugInfoOffset(0);
+ let mut prev_cu_offset = None;
+ let mut pubtypes = debug_pubtypes.items();
+ while let Some(pubtype) = pubtypes.next()? {
+ cu_offset = pubtype.unit_header_offset();
+ if Some(cu_offset) != prev_cu_offset {
+ let cu = debug_info.header_from_offset(cu_offset)?;
+ cu_die_offset = gimli::DebugInfoOffset(cu_offset.0 + cu.header_size());
+ prev_cu_offset = Some(cu_offset);
+ }
+ let die_in_cu = pubtype.die_offset();
+ let die_in_sect = cu_offset.0 + die_in_cu.0;
+ writeln!(w,
+ "pubtype die-in-sect 0x{:08x}, cu-in-sect 0x{:08x}, die-in-cu 0x{:08x}, cu-header-in-sect 0x{:08x} '{}'",
+ die_in_sect,
+ cu_die_offset.0,
+ die_in_cu.0,
+ cu_offset.0,
+ pubtype.name().to_string_lossy()?
+ )?;
+ }
+ Ok(())
+}
+
+fn dump_aranges<R: Reader, W: Write>(
+ w: &mut W,
+ debug_aranges: &gimli::DebugAranges<R>,
+) -> Result<()> {
+ writeln!(w, "\n.debug_aranges")?;
+
+ let mut headers = debug_aranges.headers();
+ while let Some(header) = headers.next()? {
+ writeln!(
+ w,
+ "Address Range Header: length = 0x{:08x}, version = 0x{:04x}, cu_offset = 0x{:08x}, addr_size = 0x{:02x}, seg_size = 0x{:02x}",
+ header.length(),
+ header.encoding().version,
+ header.debug_info_offset().0,
+ header.encoding().address_size,
+ header.segment_size(),
+ )?;
+ let mut aranges = header.entries();
+ while let Some(arange) = aranges.next()? {
+ let range = arange.range();
+ if let Some(segment) = arange.segment() {
+ writeln!(
+ w,
+ "[0x{:016x}, 0x{:016x}) segment 0x{:x}",
+ range.begin, range.end, segment
+ )?;
+ } else {
+ writeln!(w, "[0x{:016x}, 0x{:016x})", range.begin, range.end)?;
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/vendor/gimli-0.26.2/examples/simple.rs b/vendor/gimli-0.26.2/examples/simple.rs
new file mode 100644
index 000000000..7c958d45c
--- /dev/null
+++ b/vendor/gimli-0.26.2/examples/simple.rs
@@ -0,0 +1,67 @@
+//! A simple example of parsing `.debug_info`.
+
+use object::{Object, ObjectSection};
+use std::{borrow, env, fs};
+
+fn main() {
+ for path in env::args().skip(1) {
+ let file = fs::File::open(&path).unwrap();
+ let mmap = unsafe { memmap2::Mmap::map(&file).unwrap() };
+ let object = object::File::parse(&*mmap).unwrap();
+ let endian = if object.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+ dump_file(&object, endian).unwrap();
+ }
+}
+
+fn dump_file(object: &object::File, endian: gimli::RunTimeEndian) -> Result<(), gimli::Error> {
+ // Load a section and return as `Cow<[u8]>`.
+ let load_section = |id: gimli::SectionId| -> Result<borrow::Cow<[u8]>, gimli::Error> {
+ match object.section_by_name(id.name()) {
+ Some(ref section) => Ok(section
+ .uncompressed_data()
+ .unwrap_or(borrow::Cow::Borrowed(&[][..]))),
+ None => Ok(borrow::Cow::Borrowed(&[][..])),
+ }
+ };
+
+ // Load all of the sections.
+ let dwarf_cow = gimli::Dwarf::load(&load_section)?;
+
+ // Borrow a `Cow<[u8]>` to create an `EndianSlice`.
+ let borrow_section: &dyn for<'a> Fn(
+ &'a borrow::Cow<[u8]>,
+ ) -> gimli::EndianSlice<'a, gimli::RunTimeEndian> =
+ &|section| gimli::EndianSlice::new(&*section, endian);
+
+ // Create `EndianSlice`s for all of the sections.
+ let dwarf = dwarf_cow.borrow(&borrow_section);
+
+ // Iterate over the compilation units.
+ let mut iter = dwarf.units();
+ while let Some(header) = iter.next()? {
+ println!(
+ "Unit at <.debug_info+0x{:x}>",
+ header.offset().as_debug_info_offset().unwrap().0
+ );
+ let unit = dwarf.unit(header)?;
+
+ // Iterate over the Debugging Information Entries (DIEs) in the unit.
+ let mut depth = 0;
+ let mut entries = unit.entries();
+ while let Some((delta_depth, entry)) = entries.next_dfs()? {
+ depth += delta_depth;
+ println!("<{}><{:x}> {}", depth, entry.offset().0, entry.tag());
+
+ // Iterate over the attributes in the DIE.
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next()? {
+ println!(" {}: {:?}", attr.name(), attr.value());
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/vendor/gimli-0.26.2/examples/simple_line.rs b/vendor/gimli-0.26.2/examples/simple_line.rs
new file mode 100644
index 000000000..87b224cda
--- /dev/null
+++ b/vendor/gimli-0.26.2/examples/simple_line.rs
@@ -0,0 +1,106 @@
+//! A simple example of parsing `.debug_line`.
+
+use object::{Object, ObjectSection};
+use std::{borrow, env, fs, path};
+
+fn main() {
+ for path in env::args().skip(1) {
+ let file = fs::File::open(&path).unwrap();
+ let mmap = unsafe { memmap2::Mmap::map(&file).unwrap() };
+ let object = object::File::parse(&*mmap).unwrap();
+ let endian = if object.is_little_endian() {
+ gimli::RunTimeEndian::Little
+ } else {
+ gimli::RunTimeEndian::Big
+ };
+ dump_file(&object, endian).unwrap();
+ }
+}
+
+fn dump_file(object: &object::File, endian: gimli::RunTimeEndian) -> Result<(), gimli::Error> {
+ // Load a section and return as `Cow<[u8]>`.
+ let load_section = |id: gimli::SectionId| -> Result<borrow::Cow<[u8]>, gimli::Error> {
+ match object.section_by_name(id.name()) {
+ Some(ref section) => Ok(section
+ .uncompressed_data()
+ .unwrap_or(borrow::Cow::Borrowed(&[][..]))),
+ None => Ok(borrow::Cow::Borrowed(&[][..])),
+ }
+ };
+
+ // Load all of the sections.
+ let dwarf_cow = gimli::Dwarf::load(&load_section)?;
+
+ // Borrow a `Cow<[u8]>` to create an `EndianSlice`.
+ let borrow_section: &dyn for<'a> Fn(
+ &'a borrow::Cow<[u8]>,
+ ) -> gimli::EndianSlice<'a, gimli::RunTimeEndian> =
+ &|section| gimli::EndianSlice::new(&*section, endian);
+
+ // Create `EndianSlice`s for all of the sections.
+ let dwarf = dwarf_cow.borrow(&borrow_section);
+
+ // Iterate over the compilation units.
+ let mut iter = dwarf.units();
+ while let Some(header) = iter.next()? {
+ println!(
+ "Line number info for unit at <.debug_info+0x{:x}>",
+ header.offset().as_debug_info_offset().unwrap().0
+ );
+ let unit = dwarf.unit(header)?;
+
+ // Get the line program for the compilation unit.
+ if let Some(program) = unit.line_program.clone() {
+ let comp_dir = if let Some(ref dir) = unit.comp_dir {
+ path::PathBuf::from(dir.to_string_lossy().into_owned())
+ } else {
+ path::PathBuf::new()
+ };
+
+ // Iterate over the line program rows.
+ let mut rows = program.rows();
+ while let Some((header, row)) = rows.next_row()? {
+ if row.end_sequence() {
+ // End of sequence indicates a possible gap in addresses.
+ println!("{:x} end-sequence", row.address());
+ } else {
+ // Determine the path. Real applications should cache this for performance.
+ let mut path = path::PathBuf::new();
+ if let Some(file) = row.file(header) {
+ path = comp_dir.clone();
+
+ // The directory index 0 is defined to correspond to the compilation unit directory.
+ if file.directory_index() != 0 {
+ if let Some(dir) = file.directory(header) {
+ path.push(
+ dwarf.attr_string(&unit, dir)?.to_string_lossy().as_ref(),
+ );
+ }
+ }
+
+ path.push(
+ dwarf
+ .attr_string(&unit, file.path_name())?
+ .to_string_lossy()
+ .as_ref(),
+ );
+ }
+
+ // Determine line/column. DWARF line/column is never 0, so we use that
+ // but other applications may want to display this differently.
+ let line = match row.line() {
+ Some(line) => line.get(),
+ None => 0,
+ };
+ let column = match row.column() {
+ gimli::ColumnType::LeftEdge => 0,
+ gimli::ColumnType::Column(column) => column.get(),
+ };
+
+ println!("{:x} {}:{}:{}", row.address(), path.display(), line, column);
+ }
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/vendor/gimli-0.26.2/fixtures/self/README.md b/vendor/gimli-0.26.2/fixtures/self/README.md
new file mode 100644
index 000000000..91053d9b4
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/README.md
@@ -0,0 +1,147 @@
+# What are these files?
+
+These files are the DWARF data generated for (an early version of) this
+library. Each file corresponds is a section from the built library's object
+file. By splitting the sections out to their own files, we don't need to worry
+about cross platform and cross object file format issues when running examples.
+
+# Updating and adding new sections
+
+## macOS
+
+Use `otool` to list the sections of a binary:
+
+```
+$ otool -l path/to/binary
+```
+
+You should see output similar to this:
+
+```
+Load command 0
+ cmd LC_SEGMENT_64
+ cmdsize 72
+ segname __PAGEZERO
+ vmaddr 0x0000000000000000
+ vmsize 0x0000000100000000
+ fileoff 0
+ filesize 0
+ maxprot 0x00000000
+ initprot 0x00000000
+ nsects 0
+ flags 0x0
+Load command 1
+ cmd LC_SEGMENT_64
+ cmdsize 712
+ segname __TEXT
+ vmaddr 0x0000000100000000
+ vmsize 0x00000000001b7000
+ fileoff 0
+ filesize 1798144
+ maxprot 0x00000007
+ initprot 0x00000005
+ nsects 8
+ flags 0x0
+Section
+ sectname __text
+ segname __TEXT
+ addr 0x0000000100000a50
+ size 0x0000000000170716
+ offset 2640
+ align 2^4 (16)
+ reloff 0
+ nreloc 0
+ flags 0x80000400
+ reserved1 0
+ reserved2 0
+```
+
+Etc.
+
+Find the `Section` entry of the section you'd like to isolate. For example, if
+you're looking for `eh_frame`, find an entry like this:
+
+```
+Section
+ sectname __eh_frame
+ segname __TEXT
+ addr 0x0000000100192f38
+ size 0x00000000000240c8
+ offset 1650488
+ align 2^3 (8)
+ reloff 0
+ nreloc 0
+ flags 0x00000000
+ reserved1 0
+ reserved2 0
+```
+
+Then use `dd` to copy `size` bytes starting from `offset`:
+
+```
+$ dd bs=1 skip=1650488 count=$(printf "%d" 0x00000000000240c8) if=path/to/binary of=fixtures/self/eh_frame
+```
+
+Finally, use `otool` and `hexdump` to verify that the isolated section has the
+same data as the section within the binary:
+
+```
+$ otool -s __TEXT __eh_frame path/to/binary | head
+path/to/binary:
+Contents of (__TEXT,__eh_frame) section
+0000000100192f38 14 00 00 00 00 00 00 00 01 7a 52 00 01 78 10 01
+0000000100192f48 10 0c 07 08 90 01 00 00 24 00 00 00 1c 00 00 00
+0000000100192f58 f8 da e6 ff ff ff ff ff 66 00 00 00 00 00 00 00
+0000000100192f68 00 41 0e 10 86 02 43 0d 06 00 00 00 00 00 00 00
+0000000100192f78 1c 00 00 00 00 00 00 00 01 7a 50 4c 52 00 01 78
+0000000100192f88 10 07 9b 9d 40 02 00 10 10 0c 07 08 90 01 00 00
+0000000100192f98 2c 00 00 00 24 00 00 00 20 db e6 ff ff ff ff ff
+0000000100192fa8 8d 00 00 00 00 00 00 00 08 37 e7 fd ff ff ff ff
+
+$ otool -s __TEXT __eh_frame path/to/binary | tail
+00000001001b6f68 9a 0a 00 00 00 00 00 00 00 41 0e 10 86 02 43 0d
+00000001001b6f78 06 50 83 07 8c 06 8d 05 8e 04 8f 03 00 00 00 00
+00000001001b6f88 24 00 00 00 7c 0e 00 00 30 a0 fb ff ff ff ff ff
+00000001001b6f98 15 00 00 00 00 00 00 00 00 41 0e 10 86 02 43 0d
+00000001001b6fa8 06 00 00 00 00 00 00 00 24 00 00 00 a4 0e 00 00
+00000001001b6fb8 28 a0 fb ff ff ff ff ff 1c 00 00 00 00 00 00 00
+00000001001b6fc8 00 41 0e 10 86 02 43 0d 06 00 00 00 00 00 00 00
+00000001001b6fd8 24 00 00 00 cc 0e 00 00 20 a0 fb ff ff ff ff ff
+00000001001b6fe8 66 01 00 00 00 00 00 00 00 41 0e 10 86 02 43 0d
+00000001001b6ff8 06 00 00 00 00 00 00 00
+```
+
+This should be the same, ignoring the leading offsets:
+
+```
+$ hexdump fixtures/self/eh_frame | head
+0000000 14 00 00 00 00 00 00 00 01 7a 52 00 01 78 10 01
+0000010 10 0c 07 08 90 01 00 00 24 00 00 00 1c 00 00 00
+0000020 f8 da e6 ff ff ff ff ff 66 00 00 00 00 00 00 00
+0000030 00 41 0e 10 86 02 43 0d 06 00 00 00 00 00 00 00
+0000040 1c 00 00 00 00 00 00 00 01 7a 50 4c 52 00 01 78
+0000050 10 07 9b 9d 40 02 00 10 10 0c 07 08 90 01 00 00
+0000060 2c 00 00 00 24 00 00 00 20 db e6 ff ff ff ff ff
+0000070 8d 00 00 00 00 00 00 00 08 37 e7 fd ff ff ff ff
+0000080 ff 41 0e 10 86 02 43 0d 06 00 00 00 00 00 00 00
+0000090 24 00 00 00 94 00 00 00 80 db e6 ff ff ff ff ff
+
+$ hexdump fixtures/self/eh_frame | tail
+0024040 06 50 83 07 8c 06 8d 05 8e 04 8f 03 00 00 00 00
+0024050 24 00 00 00 7c 0e 00 00 30 a0 fb ff ff ff ff ff
+0024060 15 00 00 00 00 00 00 00 00 41 0e 10 86 02 43 0d
+0024070 06 00 00 00 00 00 00 00 24 00 00 00 a4 0e 00 00
+0024080 28 a0 fb ff ff ff ff ff 1c 00 00 00 00 00 00 00
+0024090 00 41 0e 10 86 02 43 0d 06 00 00 00 00 00 00 00
+00240a0 24 00 00 00 cc 0e 00 00 20 a0 fb ff ff ff ff ff
+00240b0 66 01 00 00 00 00 00 00 00 41 0e 10 86 02 43 0d
+00240c0 06 00 00 00 00 00 00 00
+```
+
+## Linux
+
+Something like this:
+
+```
+objcopy --dump-section .eh_frame=fixtures/self/eh_frame path/to/binary
+```
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_abbrev b/vendor/gimli-0.26.2/fixtures/self/debug_abbrev
new file mode 100644
index 000000000..809e61152
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_abbrev
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_aranges b/vendor/gimli-0.26.2/fixtures/self/debug_aranges
new file mode 100644
index 000000000..b2d983d78
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_aranges
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_info b/vendor/gimli-0.26.2/fixtures/self/debug_info
new file mode 100644
index 000000000..aa430a5ce
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_info
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_inlined b/vendor/gimli-0.26.2/fixtures/self/debug_inlined
new file mode 100644
index 000000000..949d18c93
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_inlined
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_line b/vendor/gimli-0.26.2/fixtures/self/debug_line
new file mode 100644
index 000000000..896a07364
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_line
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_loc b/vendor/gimli-0.26.2/fixtures/self/debug_loc
new file mode 100644
index 000000000..3fcdb32ba
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_loc
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_pubnames b/vendor/gimli-0.26.2/fixtures/self/debug_pubnames
new file mode 100644
index 000000000..bbcd62e24
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_pubnames
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_pubtypes b/vendor/gimli-0.26.2/fixtures/self/debug_pubtypes
new file mode 100644
index 000000000..68b4e0405
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_pubtypes
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_ranges b/vendor/gimli-0.26.2/fixtures/self/debug_ranges
new file mode 100644
index 000000000..a5f52ed4a
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_ranges
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/debug_str b/vendor/gimli-0.26.2/fixtures/self/debug_str
new file mode 100644
index 000000000..da35ee574
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/debug_str
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/eh_frame b/vendor/gimli-0.26.2/fixtures/self/eh_frame
new file mode 100644
index 000000000..1d4df1a61
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/eh_frame
Binary files differ
diff --git a/vendor/gimli-0.26.2/fixtures/self/eh_frame_hdr b/vendor/gimli-0.26.2/fixtures/self/eh_frame_hdr
new file mode 100644
index 000000000..a590ba213
--- /dev/null
+++ b/vendor/gimli-0.26.2/fixtures/self/eh_frame_hdr
Binary files differ
diff --git a/vendor/gimli-0.26.2/rustfmt.toml b/vendor/gimli-0.26.2/rustfmt.toml
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/vendor/gimli-0.26.2/rustfmt.toml
diff --git a/vendor/gimli-0.26.2/src/arch.rs b/vendor/gimli-0.26.2/src/arch.rs
new file mode 100644
index 000000000..f5b2e5ed8
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/arch.rs
@@ -0,0 +1,603 @@
+use crate::common::Register;
+
+macro_rules! registers {
+ ($struct_name:ident, { $($name:ident = ($val:expr, $disp:expr)),+ $(,)? }
+ $(, aliases { $($alias_name:ident = ($alias_val:expr, $alias_disp:expr)),+ $(,)? })?) => {
+ #[allow(missing_docs)]
+ impl $struct_name {
+ $(
+ pub const $name: Register = Register($val);
+ )+
+ $(
+ $(pub const $alias_name: Register = Register($alias_val);)+
+ )*
+ }
+
+ impl $struct_name {
+ /// The name of a register, or `None` if the register number is unknown.
+ ///
+ /// Only returns the primary name for registers that alias with others.
+ pub fn register_name(register: Register) -> Option<&'static str> {
+ match register {
+ $(
+ Self::$name => Some($disp),
+ )+
+ _ => return None,
+ }
+ }
+
+ /// Converts a register name into a register number.
+ pub fn name_to_register(value: &str) -> Option<Register> {
+ match value {
+ $(
+ $disp => Some(Self::$name),
+ )+
+ $(
+ $($alias_disp => Some(Self::$alias_name),)+
+ )*
+ _ => return None,
+ }
+ }
+ }
+ };
+}
+
+/// ARM architecture specific definitions.
+///
+/// See [DWARF for the ARM Architecture](https://developer.arm.com/documentation/ihi0040/c/).
+#[derive(Debug, Clone, Copy)]
+pub struct Arm;
+
+registers!(Arm, {
+ R0 = (0, "R0"),
+ R1 = (1, "R1"),
+ R2 = (2, "R2"),
+ R3 = (3, "R3"),
+ R4 = (4, "R4"),
+ R5 = (5, "R5"),
+ R6 = (6, "R6"),
+ R7 = (7, "R7"),
+ R8 = (8, "R8"),
+ R9 = (9, "R9"),
+ R10 = (10, "R10"),
+ R11 = (11, "R11"),
+ R12 = (12, "R12"),
+ R13 = (13, "R13"),
+ R14 = (14, "R14"),
+ R15 = (15, "R15"),
+
+ WCGR0 = (104, "wCGR0"),
+ WCGR1 = (105, "wCGR1"),
+ WCGR2 = (106, "wCGR2"),
+ WCGR3 = (107, "wCGR3"),
+ WCGR4 = (108, "wCGR4"),
+ WCGR5 = (109, "wCGR5"),
+ WCGR6 = (110, "wCGR6"),
+ WCGR7 = (111, "wCGR7"),
+
+ WR0 = (112, "wR0"),
+ WR1 = (113, "wR1"),
+ WR2 = (114, "wR2"),
+ WR3 = (115, "wR3"),
+ WR4 = (116, "wR4"),
+ WR5 = (117, "wR5"),
+ WR6 = (118, "wR6"),
+ WR7 = (119, "wR7"),
+ WR8 = (120, "wR8"),
+ WR9 = (121, "wR9"),
+ WR10 = (122, "wR10"),
+ WR11 = (123, "wR11"),
+ WR12 = (124, "wR12"),
+ WR13 = (125, "wR13"),
+ WR14 = (126, "wR14"),
+ WR15 = (127, "wR15"),
+
+ SPSR = (128, "SPSR"),
+ SPSR_FIQ = (129, "SPSR_FIQ"),
+ SPSR_IRQ = (130, "SPSR_IRQ"),
+ SPSR_ABT = (131, "SPSR_ABT"),
+ SPSR_UND = (132, "SPSR_UND"),
+ SPSR_SVC = (133, "SPSR_SVC"),
+
+ R8_USR = (144, "R8_USR"),
+ R9_USR = (145, "R9_USR"),
+ R10_USR = (146, "R10_USR"),
+ R11_USR = (147, "R11_USR"),
+ R12_USR = (148, "R12_USR"),
+ R13_USR = (149, "R13_USR"),
+ R14_USR = (150, "R14_USR"),
+
+ R8_FIQ = (151, "R8_FIQ"),
+ R9_FIQ = (152, "R9_FIQ"),
+ R10_FIQ = (153, "R10_FIQ"),
+ R11_FIQ = (154, "R11_FIQ"),
+ R12_FIQ = (155, "R12_FIQ"),
+ R13_FIQ = (156, "R13_FIQ"),
+ R14_FIQ = (157, "R14_FIQ"),
+
+ R13_IRQ = (158, "R13_IRQ"),
+ R14_IRQ = (159, "R14_IRQ"),
+
+ R13_ABT = (160, "R13_ABT"),
+ R14_ABT = (161, "R14_ABT"),
+
+ R13_UND = (162, "R13_UND"),
+ R14_UND = (163, "R14_UND"),
+
+ R13_SVC = (164, "R13_SVC"),
+ R14_SVC = (165, "R14_SVC"),
+
+ WC0 = (192, "wC0"),
+ WC1 = (193, "wC1"),
+ WC2 = (194, "wC2"),
+ WC3 = (195, "wC3"),
+ WC4 = (196, "wC4"),
+ WC5 = (197, "wC5"),
+ WC6 = (198, "wC6"),
+ WC7 = (199, "wC7"),
+
+ D0 = (256, "D0"),
+ D1 = (257, "D1"),
+ D2 = (258, "D2"),
+ D3 = (259, "D3"),
+ D4 = (260, "D4"),
+ D5 = (261, "D5"),
+ D6 = (262, "D6"),
+ D7 = (263, "D7"),
+ D8 = (264, "D8"),
+ D9 = (265, "D9"),
+ D10 = (266, "D10"),
+ D11 = (267, "D11"),
+ D12 = (268, "D12"),
+ D13 = (269, "D13"),
+ D14 = (270, "D14"),
+ D15 = (271, "D15"),
+ D16 = (272, "D16"),
+ D17 = (273, "D17"),
+ D18 = (274, "D18"),
+ D19 = (275, "D19"),
+ D20 = (276, "D20"),
+ D21 = (277, "D21"),
+ D22 = (278, "D22"),
+ D23 = (279, "D23"),
+ D24 = (280, "D24"),
+ D25 = (281, "D25"),
+ D26 = (282, "D26"),
+ D27 = (283, "D27"),
+ D28 = (284, "D28"),
+ D29 = (285, "D29"),
+ D30 = (286, "D30"),
+ D31 = (287, "D31"),
+},
+aliases {
+ SP = (13, "SP"),
+ LR = (14, "LR"),
+ PC = (15, "PC"),
+
+ ACC0 = (104, "ACC0"),
+ ACC1 = (105, "ACC1"),
+ ACC2 = (106, "ACC2"),
+ ACC3 = (107, "ACC3"),
+ ACC4 = (108, "ACC4"),
+ ACC5 = (109, "ACC5"),
+ ACC6 = (110, "ACC6"),
+ ACC7 = (111, "ACC7"),
+
+ S0 = (256, "S0"),
+ S1 = (256, "S1"),
+ S2 = (257, "S2"),
+ S3 = (257, "S3"),
+ S4 = (258, "S4"),
+ S5 = (258, "S5"),
+ S6 = (259, "S6"),
+ S7 = (259, "S7"),
+ S8 = (260, "S8"),
+ S9 = (260, "S9"),
+ S10 = (261, "S10"),
+ S11 = (261, "S11"),
+ S12 = (262, "S12"),
+ S13 = (262, "S13"),
+ S14 = (263, "S14"),
+ S15 = (263, "S15"),
+ S16 = (264, "S16"),
+ S17 = (264, "S17"),
+ S18 = (265, "S18"),
+ S19 = (265, "S19"),
+ S20 = (266, "S20"),
+ S21 = (266, "S21"),
+ S22 = (267, "S22"),
+ S23 = (267, "S23"),
+ S24 = (268, "S24"),
+ S25 = (268, "S25"),
+ S26 = (269, "S26"),
+ S27 = (269, "S27"),
+ S28 = (270, "S28"),
+ S29 = (270, "S29"),
+ S30 = (271, "S30"),
+ S31 = (271, "S31"),
+});
+
+/// ARM 64-bit (AArch64) architecture specific definitions.
+///
+/// See [DWARF for the ARM 64-bit Architecture](https://developer.arm.com/documentation/ihi0057/b/).
+#[derive(Debug, Clone, Copy)]
+pub struct AArch64;
+
+registers!(AArch64, {
+ X0 = (0, "X0"),
+ X1 = (1, "X1"),
+ X2 = (2, "X2"),
+ X3 = (3, "X3"),
+ X4 = (4, "X4"),
+ X5 = (5, "X5"),
+ X6 = (6, "X6"),
+ X7 = (7, "X7"),
+ X8 = (8, "X8"),
+ X9 = (9, "X9"),
+ X10 = (10, "X10"),
+ X11 = (11, "X11"),
+ X12 = (12, "X12"),
+ X13 = (13, "X13"),
+ X14 = (14, "X14"),
+ X15 = (15, "X15"),
+ X16 = (16, "X16"),
+ X17 = (17, "X17"),
+ X18 = (18, "X18"),
+ X19 = (19, "X19"),
+ X20 = (20, "X20"),
+ X21 = (21, "X21"),
+ X22 = (22, "X22"),
+ X23 = (23, "X23"),
+ X24 = (24, "X24"),
+ X25 = (25, "X25"),
+ X26 = (26, "X26"),
+ X27 = (27, "X27"),
+ X28 = (28, "X28"),
+ X29 = (29, "X29"),
+ X30 = (30, "X30"),
+ SP = (31, "SP"),
+
+ V0 = (64, "V0"),
+ V1 = (65, "V1"),
+ V2 = (66, "V2"),
+ V3 = (67, "V3"),
+ V4 = (68, "V4"),
+ V5 = (69, "V5"),
+ V6 = (70, "V6"),
+ V7 = (71, "V7"),
+ V8 = (72, "V8"),
+ V9 = (73, "V9"),
+ V10 = (74, "V10"),
+ V11 = (75, "V11"),
+ V12 = (76, "V12"),
+ V13 = (77, "V13"),
+ V14 = (78, "V14"),
+ V15 = (79, "V15"),
+ V16 = (80, "V16"),
+ V17 = (81, "V17"),
+ V18 = (82, "V18"),
+ V19 = (83, "V19"),
+ V20 = (84, "V20"),
+ V21 = (85, "V21"),
+ V22 = (86, "V22"),
+ V23 = (87, "V23"),
+ V24 = (88, "V24"),
+ V25 = (89, "V25"),
+ V26 = (90, "V26"),
+ V27 = (91, "V27"),
+ V28 = (92, "V28"),
+ V29 = (93, "V29"),
+ V30 = (94, "V30"),
+ V31 = (95, "V31"),
+});
+
+/// RISC-V architecture specific definitions.
+///
+/// See [RISC-V ELF psABI specification](https://github.com/riscv/riscv-elf-psabi-doc).
+#[derive(Debug, Clone, Copy)]
+pub struct RiscV;
+
+registers!(RiscV, {
+ X0 = (0, "x0"),
+ X1 = (1, "x1"),
+ X2 = (2, "x2"),
+ X3 = (3, "x3"),
+ X4 = (4, "x4"),
+ X5 = (5, "x5"),
+ X6 = (6, "x6"),
+ X7 = (7, "x7"),
+ X8 = (8, "x8"),
+ X9 = (9, "x9"),
+ X10 = (10, "x10"),
+ X11 = (11, "x11"),
+ X12 = (12, "x12"),
+ X13 = (13, "x13"),
+ X14 = (14, "x14"),
+ X15 = (15, "x15"),
+ X16 = (16, "x16"),
+ X17 = (17, "x17"),
+ X18 = (18, "x18"),
+ X19 = (19, "x19"),
+ X20 = (20, "x20"),
+ X21 = (21, "x21"),
+ X22 = (22, "x22"),
+ X23 = (23, "x23"),
+ X24 = (24, "x24"),
+ X25 = (25, "x25"),
+ X26 = (26, "x26"),
+ X27 = (27, "x27"),
+ X28 = (28, "x28"),
+ X29 = (29, "x29"),
+ X30 = (30, "x30"),
+ X31 = (31, "x31"),
+
+ F0 = (32, "f0"),
+ F1 = (33, "f1"),
+ F2 = (34, "f2"),
+ F3 = (35, "f3"),
+ F4 = (36, "f4"),
+ F5 = (37, "f5"),
+ F6 = (38, "f6"),
+ F7 = (39, "f7"),
+ F8 = (40, "f8"),
+ F9 = (41, "f9"),
+ F10 = (42, "f10"),
+ F11 = (43, "f11"),
+ F12 = (44, "f12"),
+ F13 = (45, "f13"),
+ F14 = (46, "f14"),
+ F15 = (47, "f15"),
+ F16 = (48, "f16"),
+ F17 = (49, "f17"),
+ F18 = (50, "f18"),
+ F19 = (51, "f19"),
+ F20 = (52, "f20"),
+ F21 = (53, "f21"),
+ F22 = (54, "f22"),
+ F23 = (55, "f23"),
+ F24 = (56, "f24"),
+ F25 = (57, "f25"),
+ F26 = (58, "f26"),
+ F27 = (59, "f27"),
+ F28 = (60, "f28"),
+ F29 = (61, "f29"),
+ F30 = (62, "f30"),
+ F31 = (63, "f31"),
+},
+aliases {
+ ZERO = (0, "zero"),
+ RA = (1, "ra"),
+ SP = (2, "sp"),
+ GP = (3, "gp"),
+ TP = (4, "tp"),
+ T0 = (5, "t0"),
+ T1 = (6, "t1"),
+ T2 = (7, "t2"),
+ S0 = (8, "s0"),
+ S1 = (9, "s1"),
+ A0 = (10, "a0"),
+ A1 = (11, "a1"),
+ A2 = (12, "a2"),
+ A3 = (13, "a3"),
+ A4 = (14, "a4"),
+ A5 = (15, "a5"),
+ A6 = (16, "a6"),
+ A7 = (17, "a7"),
+ S2 = (18, "s2"),
+ S3 = (19, "s3"),
+ S4 = (20, "s4"),
+ S5 = (21, "s5"),
+ S6 = (22, "s6"),
+ S7 = (23, "s7"),
+ S8 = (24, "s8"),
+ S9 = (25, "s9"),
+ S10 = (26, "s10"),
+ S11 = (27, "s11"),
+ T3 = (28, "t3"),
+ T4 = (29, "t4"),
+ T5 = (30, "t5"),
+ T6 = (31, "t6"),
+
+ FT0 = (32, "ft0"),
+ FT1 = (33, "ft1"),
+ FT2 = (34, "ft2"),
+ FT3 = (35, "ft3"),
+ FT4 = (36, "ft4"),
+ FT5 = (37, "ft5"),
+ FT6 = (38, "ft6"),
+ FT7 = (39, "ft7"),
+ FS0 = (40, "fs0"),
+ FS1 = (41, "fs1"),
+ FA0 = (42, "fa0"),
+ FA1 = (43, "fa1"),
+ FA2 = (44, "fa2"),
+ FA3 = (45, "fa3"),
+ FA4 = (46, "fa4"),
+ FA5 = (47, "fa5"),
+ FA6 = (48, "fa6"),
+ FA7 = (49, "fa7"),
+ FS2 = (50, "fs2"),
+ FS3 = (51, "fs3"),
+ FS4 = (52, "fs4"),
+ FS5 = (53, "fs5"),
+ FS6 = (54, "fs6"),
+ FS7 = (55, "fs7"),
+ FS8 = (56, "fs8"),
+ FS9 = (57, "fs9"),
+ FS10 = (58, "fs10"),
+ FS11 = (59, "fs11"),
+ FT8 = (60, "ft8"),
+ FT9 = (61, "ft9"),
+ FT10 = (62, "ft10"),
+ FT11 = (63, "ft11"),
+});
+
+/// Intel i386 architecture specific definitions.
+///
+/// See Intel386 psABi version 1.1 at the [X86 psABI wiki](https://github.com/hjl-tools/x86-psABI/wiki/X86-psABI).
+#[derive(Debug, Clone, Copy)]
+pub struct X86;
+
+registers!(X86, {
+ EAX = (0, "eax"),
+ ECX = (1, "ecx"),
+ EDX = (2, "edx"),
+ EBX = (3, "ebx"),
+ ESP = (4, "esp"),
+ EBP = (5, "ebp"),
+ ESI = (6, "esi"),
+ EDI = (7, "edi"),
+
+ // Return Address register. This is stored in `0(%esp, "")` and is not a physical register.
+ RA = (8, "RA"),
+
+ ST0 = (11, "st0"),
+ ST1 = (12, "st1"),
+ ST2 = (13, "st2"),
+ ST3 = (14, "st3"),
+ ST4 = (15, "st4"),
+ ST5 = (16, "st5"),
+ ST6 = (17, "st6"),
+ ST7 = (18, "st7"),
+
+ XMM0 = (21, "xmm0"),
+ XMM1 = (22, "xmm1"),
+ XMM2 = (23, "xmm2"),
+ XMM3 = (24, "xmm3"),
+ XMM4 = (25, "xmm4"),
+ XMM5 = (26, "xmm5"),
+ XMM6 = (27, "xmm6"),
+ XMM7 = (28, "xmm7"),
+
+ MM0 = (29, "mm0"),
+ MM1 = (30, "mm1"),
+ MM2 = (31, "mm2"),
+ MM3 = (32, "mm3"),
+ MM4 = (33, "mm4"),
+ MM5 = (34, "mm5"),
+ MM6 = (35, "mm6"),
+ MM7 = (36, "mm7"),
+
+ MXCSR = (39, "mxcsr"),
+
+ ES = (40, "es"),
+ CS = (41, "cs"),
+ SS = (42, "ss"),
+ DS = (43, "ds"),
+ FS = (44, "fs"),
+ GS = (45, "gs"),
+
+ TR = (48, "tr"),
+ LDTR = (49, "ldtr"),
+
+ FS_BASE = (93, "fs.base"),
+ GS_BASE = (94, "gs.base"),
+});
+
+/// AMD64 architecture specific definitions.
+///
+/// See x86-64 psABI version 1.0 at the [X86 psABI wiki](https://github.com/hjl-tools/x86-psABI/wiki/X86-psABI).
+#[derive(Debug, Clone, Copy)]
+pub struct X86_64;
+
+registers!(X86_64, {
+ RAX = (0, "rax"),
+ RDX = (1, "rdx"),
+ RCX = (2, "rcx"),
+ RBX = (3, "rbx"),
+ RSI = (4, "rsi"),
+ RDI = (5, "rdi"),
+ RBP = (6, "rbp"),
+ RSP = (7, "rsp"),
+
+ R8 = (8, "r8"),
+ R9 = (9, "r9"),
+ R10 = (10, "r10"),
+ R11 = (11, "r11"),
+ R12 = (12, "r12"),
+ R13 = (13, "r13"),
+ R14 = (14, "r14"),
+ R15 = (15, "r15"),
+
+ // Return Address register. This is stored in `0(%rsp, "")` and is not a physical register.
+ RA = (16, "RA"),
+
+ XMM0 = (17, "xmm0"),
+ XMM1 = (18, "xmm1"),
+ XMM2 = (19, "xmm2"),
+ XMM3 = (20, "xmm3"),
+ XMM4 = (21, "xmm4"),
+ XMM5 = (22, "xmm5"),
+ XMM6 = (23, "xmm6"),
+ XMM7 = (24, "xmm7"),
+
+ XMM8 = (25, "xmm8"),
+ XMM9 = (26, "xmm9"),
+ XMM10 = (27, "xmm10"),
+ XMM11 = (28, "xmm11"),
+ XMM12 = (29, "xmm12"),
+ XMM13 = (30, "xmm13"),
+ XMM14 = (31, "xmm14"),
+ XMM15 = (32, "xmm15"),
+
+ ST0 = (33, "st0"),
+ ST1 = (34, "st1"),
+ ST2 = (35, "st2"),
+ ST3 = (36, "st3"),
+ ST4 = (37, "st4"),
+ ST5 = (38, "st5"),
+ ST6 = (39, "st6"),
+ ST7 = (40, "st7"),
+
+ MM0 = (41, "mm0"),
+ MM1 = (42, "mm1"),
+ MM2 = (43, "mm2"),
+ MM3 = (44, "mm3"),
+ MM4 = (45, "mm4"),
+ MM5 = (46, "mm5"),
+ MM6 = (47, "mm6"),
+ MM7 = (48, "mm7"),
+
+ RFLAGS = (49, "rFLAGS"),
+ ES = (50, "es"),
+ CS = (51, "cs"),
+ SS = (52, "ss"),
+ DS = (53, "ds"),
+ FS = (54, "fs"),
+ GS = (55, "gs"),
+
+ FS_BASE = (58, "fs.base"),
+ GS_BASE = (59, "gs.base"),
+
+ TR = (62, "tr"),
+ LDTR = (63, "ldtr"),
+ MXCSR = (64, "mxcsr"),
+ FCW = (65, "fcw"),
+ FSW = (66, "fsw"),
+
+ XMM16 = (67, "xmm16"),
+ XMM17 = (68, "xmm17"),
+ XMM18 = (69, "xmm18"),
+ XMM19 = (70, "xmm19"),
+ XMM20 = (71, "xmm20"),
+ XMM21 = (72, "xmm21"),
+ XMM22 = (73, "xmm22"),
+ XMM23 = (74, "xmm23"),
+ XMM24 = (75, "xmm24"),
+ XMM25 = (76, "xmm25"),
+ XMM26 = (77, "xmm26"),
+ XMM27 = (78, "xmm27"),
+ XMM28 = (79, "xmm28"),
+ XMM29 = (80, "xmm29"),
+ XMM30 = (81, "xmm30"),
+ XMM31 = (82, "xmm31"),
+
+ K0 = (118, "k0"),
+ K1 = (119, "k1"),
+ K2 = (120, "k2"),
+ K3 = (121, "k3"),
+ K4 = (122, "k4"),
+ K5 = (123, "k5"),
+ K6 = (124, "k6"),
+ K7 = (125, "k7"),
+});
diff --git a/vendor/gimli-0.26.2/src/common.rs b/vendor/gimli-0.26.2/src/common.rs
new file mode 100644
index 000000000..79cf76616
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/common.rs
@@ -0,0 +1,363 @@
+/// Whether the format of a compilation unit is 32- or 64-bit.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum Format {
+ /// 64-bit DWARF
+ Dwarf64 = 8,
+ /// 32-bit DWARF
+ Dwarf32 = 4,
+}
+
+impl Format {
+ /// Return the serialized size of an initial length field for the format.
+ #[inline]
+ pub fn initial_length_size(self) -> u8 {
+ match self {
+ Format::Dwarf32 => 4,
+ Format::Dwarf64 => 12,
+ }
+ }
+
+ /// Return the natural word size for the format
+ #[inline]
+ pub fn word_size(self) -> u8 {
+ match self {
+ Format::Dwarf32 => 4,
+ Format::Dwarf64 => 8,
+ }
+ }
+}
+
+/// Encoding parameters that are commonly used for multiple DWARF sections.
+///
+/// This is intended to be small enough to pass by value.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+// `address_size` and `format` are used more often than `version`, so keep
+// them first.
+#[repr(C)]
+pub struct Encoding {
+ /// The size of an address.
+ pub address_size: u8,
+
+ // The size of a segment selector.
+ // TODO: pub segment_size: u8,
+ /// Whether the DWARF format is 32- or 64-bit.
+ pub format: Format,
+
+ /// The DWARF version of the header.
+ pub version: u16,
+}
+
+/// Encoding parameters for a line number program.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LineEncoding {
+ /// The size in bytes of the smallest target machine instruction.
+ pub minimum_instruction_length: u8,
+
+ /// The maximum number of individual operations that may be encoded in an
+ /// instruction.
+ pub maximum_operations_per_instruction: u8,
+
+ /// The initial value of the `is_stmt` register.
+ pub default_is_stmt: bool,
+
+ /// The minimum value which a special opcode can add to the line register.
+ pub line_base: i8,
+
+ /// The range of values which a special opcode can add to the line register.
+ pub line_range: u8,
+}
+
+impl Default for LineEncoding {
+ fn default() -> Self {
+ // Values from LLVM.
+ LineEncoding {
+ minimum_instruction_length: 1,
+ maximum_operations_per_instruction: 1,
+ default_is_stmt: true,
+ line_base: -5,
+ line_range: 14,
+ }
+ }
+}
+
+/// A DWARF register number.
+///
+/// The meaning of this value is ABI dependent. This is generally encoded as
+/// a ULEB128, but supported architectures need 16 bits at most.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Register(pub u16);
+
+/// An offset into the `.debug_abbrev` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DebugAbbrevOffset<T = usize>(pub T);
+
+/// An offset to a set of entries in the `.debug_addr` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugAddrBase<T = usize>(pub T);
+
+/// An index into a set of addresses in the `.debug_addr` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugAddrIndex<T = usize>(pub T);
+
+/// An offset into the `.debug_aranges` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugArangesOffset<T = usize>(pub T);
+
+/// An offset into the `.debug_info` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub struct DebugInfoOffset<T = usize>(pub T);
+
+/// An offset into the `.debug_line` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugLineOffset<T = usize>(pub T);
+
+/// An offset into the `.debug_line_str` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugLineStrOffset<T = usize>(pub T);
+
+/// An offset into either the `.debug_loc` section or the `.debug_loclists` section,
+/// depending on the version of the unit the offset was contained in.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LocationListsOffset<T = usize>(pub T);
+
+/// An offset to a set of location list offsets in the `.debug_loclists` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugLocListsBase<T = usize>(pub T);
+
+/// An index into a set of location list offsets in the `.debug_loclists` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugLocListsIndex<T = usize>(pub T);
+
+/// An offset into the `.debug_macinfo` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DebugMacinfoOffset<T = usize>(pub T);
+
+/// An offset into the `.debug_macro` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DebugMacroOffset<T = usize>(pub T);
+
+/// An offset into either the `.debug_ranges` section or the `.debug_rnglists` section,
+/// depending on the version of the unit the offset was contained in.
+///
+/// If this is from a DWARF 4 DWO file, then it must additionally be offset by the
+/// value of `DW_AT_GNU_ranges_base`. You can use `Dwarf::ranges_offset_from_raw` to do this.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RawRangeListsOffset<T = usize>(pub T);
+
+/// An offset into either the `.debug_ranges` section or the `.debug_rnglists` section,
+/// depending on the version of the unit the offset was contained in.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RangeListsOffset<T = usize>(pub T);
+
+/// An offset to a set of range list offsets in the `.debug_rnglists` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugRngListsBase<T = usize>(pub T);
+
+/// An index into a set of range list offsets in the `.debug_rnglists` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugRngListsIndex<T = usize>(pub T);
+
+/// An offset into the `.debug_str` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugStrOffset<T = usize>(pub T);
+
+/// An offset to a set of entries in the `.debug_str_offsets` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugStrOffsetsBase<T = usize>(pub T);
+
+/// An index into a set of entries in the `.debug_str_offsets` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct DebugStrOffsetsIndex<T = usize>(pub T);
+
+/// An offset into the `.debug_types` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub struct DebugTypesOffset<T = usize>(pub T);
+
+/// A type signature as used in the `.debug_types` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DebugTypeSignature(pub u64);
+
+/// An offset into the `.debug_frame` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DebugFrameOffset<T = usize>(pub T);
+
+impl<T> From<T> for DebugFrameOffset<T> {
+ #[inline]
+ fn from(o: T) -> Self {
+ DebugFrameOffset(o)
+ }
+}
+
+/// An offset into the `.eh_frame` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct EhFrameOffset<T = usize>(pub T);
+
+impl<T> From<T> for EhFrameOffset<T> {
+ #[inline]
+ fn from(o: T) -> Self {
+ EhFrameOffset(o)
+ }
+}
+
+/// An offset into the `.debug_info` or `.debug_types` sections.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum UnitSectionOffset<T = usize> {
+ /// An offset into the `.debug_info` section.
+ DebugInfoOffset(DebugInfoOffset<T>),
+ /// An offset into the `.debug_types` section.
+ DebugTypesOffset(DebugTypesOffset<T>),
+}
+
+impl<T> From<DebugInfoOffset<T>> for UnitSectionOffset<T> {
+ fn from(offset: DebugInfoOffset<T>) -> Self {
+ UnitSectionOffset::DebugInfoOffset(offset)
+ }
+}
+
+impl<T> From<DebugTypesOffset<T>> for UnitSectionOffset<T> {
+ fn from(offset: DebugTypesOffset<T>) -> Self {
+ UnitSectionOffset::DebugTypesOffset(offset)
+ }
+}
+
+impl<T> UnitSectionOffset<T>
+where
+ T: Clone,
+{
+ /// Returns the `DebugInfoOffset` inside, or `None` otherwise.
+ pub fn as_debug_info_offset(&self) -> Option<DebugInfoOffset<T>> {
+ match self {
+ UnitSectionOffset::DebugInfoOffset(offset) => Some(offset.clone()),
+ UnitSectionOffset::DebugTypesOffset(_) => None,
+ }
+ }
+ /// Returns the `DebugTypesOffset` inside, or `None` otherwise.
+ pub fn as_debug_types_offset(&self) -> Option<DebugTypesOffset<T>> {
+ match self {
+ UnitSectionOffset::DebugInfoOffset(_) => None,
+ UnitSectionOffset::DebugTypesOffset(offset) => Some(offset.clone()),
+ }
+ }
+}
+
+/// An identifier for a DWARF section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum SectionId {
+ /// The `.debug_abbrev` section.
+ DebugAbbrev,
+ /// The `.debug_addr` section.
+ DebugAddr,
+ /// The `.debug_aranges` section.
+ DebugAranges,
+ /// The `.debug_cu_index` section.
+ DebugCuIndex,
+ /// The `.debug_frame` section.
+ DebugFrame,
+ /// The `.eh_frame` section.
+ EhFrame,
+ /// The `.eh_frame_hdr` section.
+ EhFrameHdr,
+ /// The `.debug_info` section.
+ DebugInfo,
+ /// The `.debug_line` section.
+ DebugLine,
+ /// The `.debug_line_str` section.
+ DebugLineStr,
+ /// The `.debug_loc` section.
+ DebugLoc,
+ /// The `.debug_loclists` section.
+ DebugLocLists,
+ /// The `.debug_macinfo` section.
+ DebugMacinfo,
+ /// The `.debug_macro` section.
+ DebugMacro,
+ /// The `.debug_pubnames` section.
+ DebugPubNames,
+ /// The `.debug_pubtypes` section.
+ DebugPubTypes,
+ /// The `.debug_ranges` section.
+ DebugRanges,
+ /// The `.debug_rnglists` section.
+ DebugRngLists,
+ /// The `.debug_str` section.
+ DebugStr,
+ /// The `.debug_str_offsets` section.
+ DebugStrOffsets,
+ /// The `.debug_tu_index` section.
+ DebugTuIndex,
+ /// The `.debug_types` section.
+ DebugTypes,
+}
+
+impl SectionId {
+ /// Returns the ELF section name for this kind.
+ pub fn name(self) -> &'static str {
+ match self {
+ SectionId::DebugAbbrev => ".debug_abbrev",
+ SectionId::DebugAddr => ".debug_addr",
+ SectionId::DebugAranges => ".debug_aranges",
+ SectionId::DebugCuIndex => ".debug_cu_index",
+ SectionId::DebugFrame => ".debug_frame",
+ SectionId::EhFrame => ".eh_frame",
+ SectionId::EhFrameHdr => ".eh_frame_hdr",
+ SectionId::DebugInfo => ".debug_info",
+ SectionId::DebugLine => ".debug_line",
+ SectionId::DebugLineStr => ".debug_line_str",
+ SectionId::DebugLoc => ".debug_loc",
+ SectionId::DebugLocLists => ".debug_loclists",
+ SectionId::DebugMacinfo => ".debug_macinfo",
+ SectionId::DebugMacro => ".debug_macro",
+ SectionId::DebugPubNames => ".debug_pubnames",
+ SectionId::DebugPubTypes => ".debug_pubtypes",
+ SectionId::DebugRanges => ".debug_ranges",
+ SectionId::DebugRngLists => ".debug_rnglists",
+ SectionId::DebugStr => ".debug_str",
+ SectionId::DebugStrOffsets => ".debug_str_offsets",
+ SectionId::DebugTuIndex => ".debug_tu_index",
+ SectionId::DebugTypes => ".debug_types",
+ }
+ }
+
+ /// Returns the ELF section name for this kind, when found in a .dwo or .dwp file.
+ pub fn dwo_name(self) -> Option<&'static str> {
+ Some(match self {
+ SectionId::DebugAbbrev => ".debug_abbrev.dwo",
+ SectionId::DebugCuIndex => ".debug_cu_index",
+ SectionId::DebugInfo => ".debug_info.dwo",
+ SectionId::DebugLine => ".debug_line.dwo",
+ // The debug_loc section can be present in the dwo when using the
+ // GNU split-dwarf extension to DWARF4.
+ SectionId::DebugLoc => ".debug_loc.dwo",
+ SectionId::DebugLocLists => ".debug_loclists.dwo",
+ SectionId::DebugMacro => ".debug_macro.dwo",
+ SectionId::DebugRngLists => ".debug_rnglists.dwo",
+ SectionId::DebugStr => ".debug_str.dwo",
+ SectionId::DebugStrOffsets => ".debug_str_offsets.dwo",
+ SectionId::DebugTuIndex => ".debug_tu_index",
+ SectionId::DebugTypes => ".debug_types.dwo",
+ _ => return None,
+ })
+ }
+}
+
+/// An optionally-provided implementation-defined compilation unit ID to enable
+/// split DWARF and linking a split compilation unit back together.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DwoId(pub u64);
+
+/// The "type" of file with DWARF debugging information. This determines, among other things,
+/// which files DWARF sections should be loaded from.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum DwarfFileType {
+ /// A normal executable or object file.
+ Main,
+ /// A .dwo split DWARF file.
+ Dwo,
+ // TODO: Supplementary files, .dwps?
+}
+
+impl Default for DwarfFileType {
+ fn default() -> Self {
+ DwarfFileType::Main
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/constants.rs b/vendor/gimli-0.26.2/src/constants.rs
new file mode 100644
index 000000000..c617f4e2e
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/constants.rs
@@ -0,0 +1,1425 @@
+// This file originally from https://github.com/philipc/rust-dwarf/ and
+// distributed under either MIT or Apache 2.0 licenses.
+//
+// Copyright 2016 The rust-dwarf Developers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Constant definitions.
+//!
+//! The DWARF spec's `DW_AT_*` type is represented as `struct DwAt(u16)`,
+//! `DW_FORM_*` as `DwForm(u16)`, etc.
+//!
+//! There are also exported const definitions for each constant.
+
+#![allow(non_upper_case_globals)]
+#![allow(missing_docs)]
+
+use core::fmt;
+
+// The `dw!` macro turns this:
+//
+// dw!(DwFoo(u32) {
+// DW_FOO_bar = 0,
+// DW_FOO_baz = 1,
+// DW_FOO_bang = 2,
+// });
+//
+// into this:
+//
+// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+// pub struct DwFoo(pub u32);
+//
+// pub const DW_FOO_bar: DwFoo = DwFoo(0);
+// pub const DW_FOO_baz: DwFoo = DwFoo(1);
+// pub const DW_FOO_bang: DwFoo = DwFoo(2);
+//
+// impl DwFoo {
+// pub fn static_string(&self) -> Option<&'static str> {
+// ...
+// }
+// }
+//
+// impl fmt::Display for DwFoo {
+// fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+// ...
+// }
+// }
+macro_rules! dw {
+ ($(#[$meta:meta])* $struct_name:ident($struct_type:ty) { $($name:ident = $val:expr),+ $(,)? }) => {
+ $(#[$meta])*
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+ pub struct $struct_name(pub $struct_type);
+
+ $(
+ pub const $name: $struct_name = $struct_name($val);
+ )+
+
+ impl $struct_name {
+ pub fn static_string(&self) -> Option<&'static str> {
+ Some(match *self {
+ $(
+ $name => stringify!($name),
+ )+
+ _ => return None,
+ })
+ }
+ }
+
+ impl fmt::Display for $struct_name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ if let Some(s) = self.static_string() {
+ f.pad(s)
+ } else {
+ #[cfg(feature = "read")]
+ {
+ f.pad(&format!("Unknown {}: {}", stringify!($struct_name), self.0))
+ }
+ #[cfg(not(feature = "read"))]
+ {
+ write!(f, "Unknown {}: {}", stringify!($struct_name), self.0)
+ }
+ }
+ }
+ }
+ };
+}
+
+dw!(
+/// The section type field in a `.dwp` unit index.
+///
+/// This is used for version 5 and later.
+///
+/// See Section 7.3.5.
+DwSect(u32) {
+ DW_SECT_INFO = 1,
+ DW_SECT_ABBREV = 3,
+ DW_SECT_LINE = 4,
+ DW_SECT_LOCLISTS = 5,
+ DW_SECT_STR_OFFSETS = 6,
+ DW_SECT_MACRO = 7,
+ DW_SECT_RNGLISTS = 8,
+});
+
+dw!(
+/// The section type field in a `.dwp` unit index with version 2.
+DwSectV2(u32) {
+ DW_SECT_V2_INFO = 1,
+ DW_SECT_V2_TYPES = 2,
+ DW_SECT_V2_ABBREV = 3,
+ DW_SECT_V2_LINE = 4,
+ DW_SECT_V2_LOC = 5,
+ DW_SECT_V2_STR_OFFSETS = 6,
+ DW_SECT_V2_MACINFO = 7,
+ DW_SECT_V2_MACRO = 8,
+});
+
+dw!(
+/// The unit type field in a unit header.
+///
+/// See Section 7.5.1, Table 7.2.
+DwUt(u8) {
+ DW_UT_compile = 0x01,
+ DW_UT_type = 0x02,
+ DW_UT_partial = 0x03,
+ DW_UT_skeleton = 0x04,
+ DW_UT_split_compile = 0x05,
+ DW_UT_split_type = 0x06,
+ DW_UT_lo_user = 0x80,
+ DW_UT_hi_user = 0xff,
+});
+
+dw!(
+/// The opcode for a call frame instruction.
+///
+/// Section 7.24:
+/// > Call frame instructions are encoded in one or more bytes. The primary
+/// > opcode is encoded in the high order two bits of the first byte (that is,
+/// > opcode = byte >> 6). An operand or extended opcode may be encoded in the
+/// > low order 6 bits. Additional operands are encoded in subsequent bytes.
+DwCfa(u8) {
+ DW_CFA_advance_loc = 0x01 << 6,
+ DW_CFA_offset = 0x02 << 6,
+ DW_CFA_restore = 0x03 << 6,
+ DW_CFA_nop = 0,
+ DW_CFA_set_loc = 0x01,
+ DW_CFA_advance_loc1 = 0x02,
+ DW_CFA_advance_loc2 = 0x03,
+ DW_CFA_advance_loc4 = 0x04,
+ DW_CFA_offset_extended = 0x05,
+ DW_CFA_restore_extended = 0x06,
+ DW_CFA_undefined = 0x07,
+ DW_CFA_same_value = 0x08,
+ DW_CFA_register = 0x09,
+ DW_CFA_remember_state = 0x0a,
+ DW_CFA_restore_state = 0x0b,
+ DW_CFA_def_cfa = 0x0c,
+ DW_CFA_def_cfa_register = 0x0d,
+ DW_CFA_def_cfa_offset = 0x0e,
+ DW_CFA_def_cfa_expression = 0x0f,
+ DW_CFA_expression = 0x10,
+ DW_CFA_offset_extended_sf = 0x11,
+ DW_CFA_def_cfa_sf = 0x12,
+ DW_CFA_def_cfa_offset_sf = 0x13,
+ DW_CFA_val_offset = 0x14,
+ DW_CFA_val_offset_sf = 0x15,
+ DW_CFA_val_expression = 0x16,
+
+ DW_CFA_lo_user = 0x1c,
+ DW_CFA_hi_user = 0x3f,
+
+ DW_CFA_MIPS_advance_loc8 = 0x1d,
+ DW_CFA_GNU_window_save = 0x2d,
+ DW_CFA_GNU_args_size = 0x2e,
+ DW_CFA_GNU_negative_offset_extended = 0x2f,
+});
+
+dw!(
+/// The child determination encodings for DIE attributes.
+///
+/// See Section 7.5.3, Table 7.4.
+DwChildren(u8) {
+ DW_CHILDREN_no = 0,
+ DW_CHILDREN_yes = 1,
+});
+
+dw!(
+/// The tag encodings for DIE attributes.
+///
+/// See Section 7.5.3, Table 7.3.
+DwTag(u16) {
+ DW_TAG_null = 0x00,
+
+ DW_TAG_array_type = 0x01,
+ DW_TAG_class_type = 0x02,
+ DW_TAG_entry_point = 0x03,
+ DW_TAG_enumeration_type = 0x04,
+ DW_TAG_formal_parameter = 0x05,
+ DW_TAG_imported_declaration = 0x08,
+ DW_TAG_label = 0x0a,
+ DW_TAG_lexical_block = 0x0b,
+ DW_TAG_member = 0x0d,
+ DW_TAG_pointer_type = 0x0f,
+ DW_TAG_reference_type = 0x10,
+ DW_TAG_compile_unit = 0x11,
+ DW_TAG_string_type = 0x12,
+ DW_TAG_structure_type = 0x13,
+ DW_TAG_subroutine_type = 0x15,
+ DW_TAG_typedef = 0x16,
+ DW_TAG_union_type = 0x17,
+ DW_TAG_unspecified_parameters = 0x18,
+ DW_TAG_variant = 0x19,
+ DW_TAG_common_block = 0x1a,
+ DW_TAG_common_inclusion = 0x1b,
+ DW_TAG_inheritance = 0x1c,
+ DW_TAG_inlined_subroutine = 0x1d,
+ DW_TAG_module = 0x1e,
+ DW_TAG_ptr_to_member_type = 0x1f,
+ DW_TAG_set_type = 0x20,
+ DW_TAG_subrange_type = 0x21,
+ DW_TAG_with_stmt = 0x22,
+ DW_TAG_access_declaration = 0x23,
+ DW_TAG_base_type = 0x24,
+ DW_TAG_catch_block = 0x25,
+ DW_TAG_const_type = 0x26,
+ DW_TAG_constant = 0x27,
+ DW_TAG_enumerator = 0x28,
+ DW_TAG_file_type = 0x29,
+ DW_TAG_friend = 0x2a,
+ DW_TAG_namelist = 0x2b,
+ DW_TAG_namelist_item = 0x2c,
+ DW_TAG_packed_type = 0x2d,
+ DW_TAG_subprogram = 0x2e,
+ DW_TAG_template_type_parameter = 0x2f,
+ DW_TAG_template_value_parameter = 0x30,
+ DW_TAG_thrown_type = 0x31,
+ DW_TAG_try_block = 0x32,
+ DW_TAG_variant_part = 0x33,
+ DW_TAG_variable = 0x34,
+ DW_TAG_volatile_type = 0x35,
+
+// DWARF 3.
+ DW_TAG_dwarf_procedure = 0x36,
+ DW_TAG_restrict_type = 0x37,
+ DW_TAG_interface_type = 0x38,
+ DW_TAG_namespace = 0x39,
+ DW_TAG_imported_module = 0x3a,
+ DW_TAG_unspecified_type = 0x3b,
+ DW_TAG_partial_unit = 0x3c,
+ DW_TAG_imported_unit = 0x3d,
+ DW_TAG_condition = 0x3f,
+ DW_TAG_shared_type = 0x40,
+
+// DWARF 4.
+ DW_TAG_type_unit = 0x41,
+ DW_TAG_rvalue_reference_type = 0x42,
+ DW_TAG_template_alias = 0x43,
+
+// DWARF 5.
+ DW_TAG_coarray_type = 0x44,
+ DW_TAG_generic_subrange = 0x45,
+ DW_TAG_dynamic_type = 0x46,
+ DW_TAG_atomic_type = 0x47,
+ DW_TAG_call_site = 0x48,
+ DW_TAG_call_site_parameter = 0x49,
+ DW_TAG_skeleton_unit = 0x4a,
+ DW_TAG_immutable_type = 0x4b,
+
+ DW_TAG_lo_user = 0x4080,
+ DW_TAG_hi_user = 0xffff,
+
+// SGI/MIPS extensions.
+ DW_TAG_MIPS_loop = 0x4081,
+
+// HP extensions.
+ DW_TAG_HP_array_descriptor = 0x4090,
+ DW_TAG_HP_Bliss_field = 0x4091,
+ DW_TAG_HP_Bliss_field_set = 0x4092,
+
+// GNU extensions.
+ DW_TAG_format_label = 0x4101,
+ DW_TAG_function_template = 0x4102,
+ DW_TAG_class_template = 0x4103,
+ DW_TAG_GNU_BINCL = 0x4104,
+ DW_TAG_GNU_EINCL = 0x4105,
+ DW_TAG_GNU_template_template_param = 0x4106,
+ DW_TAG_GNU_template_parameter_pack = 0x4107,
+ DW_TAG_GNU_formal_parameter_pack = 0x4108,
+ DW_TAG_GNU_call_site = 0x4109,
+ DW_TAG_GNU_call_site_parameter = 0x410a,
+
+ DW_TAG_APPLE_property = 0x4200,
+
+// SUN extensions.
+ DW_TAG_SUN_function_template = 0x4201,
+ DW_TAG_SUN_class_template = 0x4202,
+ DW_TAG_SUN_struct_template = 0x4203,
+ DW_TAG_SUN_union_template = 0x4204,
+ DW_TAG_SUN_indirect_inheritance = 0x4205,
+ DW_TAG_SUN_codeflags = 0x4206,
+ DW_TAG_SUN_memop_info = 0x4207,
+ DW_TAG_SUN_omp_child_func = 0x4208,
+ DW_TAG_SUN_rtti_descriptor = 0x4209,
+ DW_TAG_SUN_dtor_info = 0x420a,
+ DW_TAG_SUN_dtor = 0x420b,
+ DW_TAG_SUN_f90_interface = 0x420c,
+ DW_TAG_SUN_fortran_vax_structure = 0x420d,
+
+// ALTIUM extensions.
+ DW_TAG_ALTIUM_circ_type = 0x5101,
+ DW_TAG_ALTIUM_mwa_circ_type = 0x5102,
+ DW_TAG_ALTIUM_rev_carry_type = 0x5103,
+ DW_TAG_ALTIUM_rom = 0x5111,
+
+// Extensions for UPC.
+ DW_TAG_upc_shared_type = 0x8765,
+ DW_TAG_upc_strict_type = 0x8766,
+ DW_TAG_upc_relaxed_type = 0x8767,
+
+// PGI (STMicroelectronics) extensions.
+ DW_TAG_PGI_kanji_type = 0xa000,
+ DW_TAG_PGI_interface_block = 0xa020,
+
+// Borland extensions.
+ DW_TAG_BORLAND_property = 0xb000,
+ DW_TAG_BORLAND_Delphi_string = 0xb001,
+ DW_TAG_BORLAND_Delphi_dynamic_array = 0xb002,
+ DW_TAG_BORLAND_Delphi_set = 0xb003,
+ DW_TAG_BORLAND_Delphi_variant = 0xb004,
+});
+
+dw!(
+/// The attribute encodings for DIE attributes.
+///
+/// See Section 7.5.4, Table 7.5.
+DwAt(u16) {
+ DW_AT_null = 0x00,
+
+ DW_AT_sibling = 0x01,
+ DW_AT_location = 0x02,
+ DW_AT_name = 0x03,
+ DW_AT_ordering = 0x09,
+ DW_AT_byte_size = 0x0b,
+ DW_AT_bit_offset = 0x0c,
+ DW_AT_bit_size = 0x0d,
+ DW_AT_stmt_list = 0x10,
+ DW_AT_low_pc = 0x11,
+ DW_AT_high_pc = 0x12,
+ DW_AT_language = 0x13,
+ DW_AT_discr = 0x15,
+ DW_AT_discr_value = 0x16,
+ DW_AT_visibility = 0x17,
+ DW_AT_import = 0x18,
+ DW_AT_string_length = 0x19,
+ DW_AT_common_reference = 0x1a,
+ DW_AT_comp_dir = 0x1b,
+ DW_AT_const_value = 0x1c,
+ DW_AT_containing_type = 0x1d,
+ DW_AT_default_value = 0x1e,
+ DW_AT_inline = 0x20,
+ DW_AT_is_optional = 0x21,
+ DW_AT_lower_bound = 0x22,
+ DW_AT_producer = 0x25,
+ DW_AT_prototyped = 0x27,
+ DW_AT_return_addr = 0x2a,
+ DW_AT_start_scope = 0x2c,
+ DW_AT_bit_stride = 0x2e,
+ DW_AT_upper_bound = 0x2f,
+ DW_AT_abstract_origin = 0x31,
+ DW_AT_accessibility = 0x32,
+ DW_AT_address_class = 0x33,
+ DW_AT_artificial = 0x34,
+ DW_AT_base_types = 0x35,
+ DW_AT_calling_convention = 0x36,
+ DW_AT_count = 0x37,
+ DW_AT_data_member_location = 0x38,
+ DW_AT_decl_column = 0x39,
+ DW_AT_decl_file = 0x3a,
+ DW_AT_decl_line = 0x3b,
+ DW_AT_declaration = 0x3c,
+ DW_AT_discr_list = 0x3d,
+ DW_AT_encoding = 0x3e,
+ DW_AT_external = 0x3f,
+ DW_AT_frame_base = 0x40,
+ DW_AT_friend = 0x41,
+ DW_AT_identifier_case = 0x42,
+ DW_AT_macro_info = 0x43,
+ DW_AT_namelist_item = 0x44,
+ DW_AT_priority = 0x45,
+ DW_AT_segment = 0x46,
+ DW_AT_specification = 0x47,
+ DW_AT_static_link = 0x48,
+ DW_AT_type = 0x49,
+ DW_AT_use_location = 0x4a,
+ DW_AT_variable_parameter = 0x4b,
+ DW_AT_virtuality = 0x4c,
+ DW_AT_vtable_elem_location = 0x4d,
+
+// DWARF 3.
+ DW_AT_allocated = 0x4e,
+ DW_AT_associated = 0x4f,
+ DW_AT_data_location = 0x50,
+ DW_AT_byte_stride = 0x51,
+ DW_AT_entry_pc = 0x52,
+ DW_AT_use_UTF8 = 0x53,
+ DW_AT_extension = 0x54,
+ DW_AT_ranges = 0x55,
+ DW_AT_trampoline = 0x56,
+ DW_AT_call_column = 0x57,
+ DW_AT_call_file = 0x58,
+ DW_AT_call_line = 0x59,
+ DW_AT_description = 0x5a,
+ DW_AT_binary_scale = 0x5b,
+ DW_AT_decimal_scale = 0x5c,
+ DW_AT_small = 0x5d,
+ DW_AT_decimal_sign = 0x5e,
+ DW_AT_digit_count = 0x5f,
+ DW_AT_picture_string = 0x60,
+ DW_AT_mutable = 0x61,
+ DW_AT_threads_scaled = 0x62,
+ DW_AT_explicit = 0x63,
+ DW_AT_object_pointer = 0x64,
+ DW_AT_endianity = 0x65,
+ DW_AT_elemental = 0x66,
+ DW_AT_pure = 0x67,
+ DW_AT_recursive = 0x68,
+
+// DWARF 4.
+ DW_AT_signature = 0x69,
+ DW_AT_main_subprogram = 0x6a,
+ DW_AT_data_bit_offset = 0x6b,
+ DW_AT_const_expr = 0x6c,
+ DW_AT_enum_class = 0x6d,
+ DW_AT_linkage_name = 0x6e,
+
+// DWARF 5.
+ DW_AT_string_length_bit_size = 0x6f,
+ DW_AT_string_length_byte_size = 0x70,
+ DW_AT_rank = 0x71,
+ DW_AT_str_offsets_base = 0x72,
+ DW_AT_addr_base = 0x73,
+ DW_AT_rnglists_base = 0x74,
+ DW_AT_dwo_name = 0x76,
+ DW_AT_reference = 0x77,
+ DW_AT_rvalue_reference = 0x78,
+ DW_AT_macros = 0x79,
+ DW_AT_call_all_calls = 0x7a,
+ DW_AT_call_all_source_calls = 0x7b,
+ DW_AT_call_all_tail_calls = 0x7c,
+ DW_AT_call_return_pc = 0x7d,
+ DW_AT_call_value = 0x7e,
+ DW_AT_call_origin = 0x7f,
+ DW_AT_call_parameter = 0x80,
+ DW_AT_call_pc = 0x81,
+ DW_AT_call_tail_call = 0x82,
+ DW_AT_call_target = 0x83,
+ DW_AT_call_target_clobbered = 0x84,
+ DW_AT_call_data_location = 0x85,
+ DW_AT_call_data_value = 0x86,
+ DW_AT_noreturn = 0x87,
+ DW_AT_alignment = 0x88,
+ DW_AT_export_symbols = 0x89,
+ DW_AT_deleted = 0x8a,
+ DW_AT_defaulted = 0x8b,
+ DW_AT_loclists_base = 0x8c,
+
+ DW_AT_lo_user = 0x2000,
+ DW_AT_hi_user = 0x3fff,
+
+// SGI/MIPS extensions.
+ DW_AT_MIPS_fde = 0x2001,
+ DW_AT_MIPS_loop_begin = 0x2002,
+ DW_AT_MIPS_tail_loop_begin = 0x2003,
+ DW_AT_MIPS_epilog_begin = 0x2004,
+ DW_AT_MIPS_loop_unroll_factor = 0x2005,
+ DW_AT_MIPS_software_pipeline_depth = 0x2006,
+ DW_AT_MIPS_linkage_name = 0x2007,
+ DW_AT_MIPS_stride = 0x2008,
+ DW_AT_MIPS_abstract_name = 0x2009,
+ DW_AT_MIPS_clone_origin = 0x200a,
+ DW_AT_MIPS_has_inlines = 0x200b,
+ DW_AT_MIPS_stride_byte = 0x200c,
+ DW_AT_MIPS_stride_elem = 0x200d,
+ DW_AT_MIPS_ptr_dopetype = 0x200e,
+ DW_AT_MIPS_allocatable_dopetype = 0x200f,
+ DW_AT_MIPS_assumed_shape_dopetype = 0x2010,
+
+// This one appears to have only been implemented by Open64 for
+// fortran and may conflict with other extensions.
+ DW_AT_MIPS_assumed_size = 0x2011,
+
+// TODO: HP/CPQ extensions.
+// These conflict with the MIPS extensions.
+
+ DW_AT_INTEL_other_endian = 0x2026,
+
+// GNU extensions
+ DW_AT_sf_names = 0x2101,
+ DW_AT_src_info = 0x2102,
+ DW_AT_mac_info = 0x2103,
+ DW_AT_src_coords = 0x2104,
+ DW_AT_body_begin = 0x2105,
+ DW_AT_body_end = 0x2106,
+ DW_AT_GNU_vector = 0x2107,
+ DW_AT_GNU_guarded_by = 0x2108,
+ DW_AT_GNU_pt_guarded_by = 0x2109,
+ DW_AT_GNU_guarded = 0x210a,
+ DW_AT_GNU_pt_guarded = 0x210b,
+ DW_AT_GNU_locks_excluded = 0x210c,
+ DW_AT_GNU_exclusive_locks_required = 0x210d,
+ DW_AT_GNU_shared_locks_required = 0x210e,
+ DW_AT_GNU_odr_signature = 0x210f,
+ DW_AT_GNU_template_name = 0x2110,
+ DW_AT_GNU_call_site_value = 0x2111,
+ DW_AT_GNU_call_site_data_value = 0x2112,
+ DW_AT_GNU_call_site_target = 0x2113,
+ DW_AT_GNU_call_site_target_clobbered = 0x2114,
+ DW_AT_GNU_tail_call = 0x2115,
+ DW_AT_GNU_all_tail_call_sites = 0x2116,
+ DW_AT_GNU_all_call_sites = 0x2117,
+ DW_AT_GNU_all_source_call_sites = 0x2118,
+ DW_AT_GNU_macros = 0x2119,
+
+// Extensions for Fission proposal.
+ DW_AT_GNU_dwo_name = 0x2130,
+ DW_AT_GNU_dwo_id = 0x2131,
+ DW_AT_GNU_ranges_base = 0x2132,
+ DW_AT_GNU_addr_base = 0x2133,
+ DW_AT_GNU_pubnames = 0x2134,
+ DW_AT_GNU_pubtypes = 0x2135,
+ DW_AT_GNU_discriminator = 0x2136,
+ DW_AT_GNU_locviews = 0x2137,
+ DW_AT_GNU_entry_view = 0x2138,
+
+// Conflict with Sun.
+// DW_AT_VMS_rtnbeg_pd_address = 0x2201,
+
+// Sun extensions.
+ DW_AT_SUN_template = 0x2201,
+ DW_AT_SUN_alignment = 0x2202,
+ DW_AT_SUN_vtable = 0x2203,
+ DW_AT_SUN_count_guarantee = 0x2204,
+ DW_AT_SUN_command_line = 0x2205,
+ DW_AT_SUN_vbase = 0x2206,
+ DW_AT_SUN_compile_options = 0x2207,
+ DW_AT_SUN_language = 0x2208,
+ DW_AT_SUN_browser_file = 0x2209,
+ DW_AT_SUN_vtable_abi = 0x2210,
+ DW_AT_SUN_func_offsets = 0x2211,
+ DW_AT_SUN_cf_kind = 0x2212,
+ DW_AT_SUN_vtable_index = 0x2213,
+ DW_AT_SUN_omp_tpriv_addr = 0x2214,
+ DW_AT_SUN_omp_child_func = 0x2215,
+ DW_AT_SUN_func_offset = 0x2216,
+ DW_AT_SUN_memop_type_ref = 0x2217,
+ DW_AT_SUN_profile_id = 0x2218,
+ DW_AT_SUN_memop_signature = 0x2219,
+ DW_AT_SUN_obj_dir = 0x2220,
+ DW_AT_SUN_obj_file = 0x2221,
+ DW_AT_SUN_original_name = 0x2222,
+ DW_AT_SUN_hwcprof_signature = 0x2223,
+ DW_AT_SUN_amd64_parmdump = 0x2224,
+ DW_AT_SUN_part_link_name = 0x2225,
+ DW_AT_SUN_link_name = 0x2226,
+ DW_AT_SUN_pass_with_const = 0x2227,
+ DW_AT_SUN_return_with_const = 0x2228,
+ DW_AT_SUN_import_by_name = 0x2229,
+ DW_AT_SUN_f90_pointer = 0x222a,
+ DW_AT_SUN_pass_by_ref = 0x222b,
+ DW_AT_SUN_f90_allocatable = 0x222c,
+ DW_AT_SUN_f90_assumed_shape_array = 0x222d,
+ DW_AT_SUN_c_vla = 0x222e,
+ DW_AT_SUN_return_value_ptr = 0x2230,
+ DW_AT_SUN_dtor_start = 0x2231,
+ DW_AT_SUN_dtor_length = 0x2232,
+ DW_AT_SUN_dtor_state_initial = 0x2233,
+ DW_AT_SUN_dtor_state_final = 0x2234,
+ DW_AT_SUN_dtor_state_deltas = 0x2235,
+ DW_AT_SUN_import_by_lname = 0x2236,
+ DW_AT_SUN_f90_use_only = 0x2237,
+ DW_AT_SUN_namelist_spec = 0x2238,
+ DW_AT_SUN_is_omp_child_func = 0x2239,
+ DW_AT_SUN_fortran_main_alias = 0x223a,
+ DW_AT_SUN_fortran_based = 0x223b,
+
+ DW_AT_ALTIUM_loclist = 0x2300,
+
+ DW_AT_use_GNAT_descriptive_type = 0x2301,
+ DW_AT_GNAT_descriptive_type = 0x2302,
+ DW_AT_GNU_numerator = 0x2303,
+ DW_AT_GNU_denominator = 0x2304,
+ DW_AT_GNU_bias = 0x2305,
+
+ DW_AT_upc_threads_scaled = 0x3210,
+
+// PGI (STMicroelectronics) extensions.
+ DW_AT_PGI_lbase = 0x3a00,
+ DW_AT_PGI_soffset = 0x3a01,
+ DW_AT_PGI_lstride = 0x3a02,
+
+// Borland extensions.
+ DW_AT_BORLAND_property_read = 0x3b11,
+ DW_AT_BORLAND_property_write = 0x3b12,
+ DW_AT_BORLAND_property_implements = 0x3b13,
+ DW_AT_BORLAND_property_index = 0x3b14,
+ DW_AT_BORLAND_property_default = 0x3b15,
+ DW_AT_BORLAND_Delphi_unit = 0x3b20,
+ DW_AT_BORLAND_Delphi_class = 0x3b21,
+ DW_AT_BORLAND_Delphi_record = 0x3b22,
+ DW_AT_BORLAND_Delphi_metaclass = 0x3b23,
+ DW_AT_BORLAND_Delphi_constructor = 0x3b24,
+ DW_AT_BORLAND_Delphi_destructor = 0x3b25,
+ DW_AT_BORLAND_Delphi_anonymous_method = 0x3b26,
+ DW_AT_BORLAND_Delphi_interface = 0x3b27,
+ DW_AT_BORLAND_Delphi_ABI = 0x3b28,
+ DW_AT_BORLAND_Delphi_return = 0x3b29,
+ DW_AT_BORLAND_Delphi_frameptr = 0x3b30,
+ DW_AT_BORLAND_closure = 0x3b31,
+
+// LLVM project extensions.
+ DW_AT_LLVM_include_path = 0x3e00,
+ DW_AT_LLVM_config_macros = 0x3e01,
+ DW_AT_LLVM_isysroot = 0x3e02,
+
+// Apple extensions.
+ DW_AT_APPLE_optimized = 0x3fe1,
+ DW_AT_APPLE_flags = 0x3fe2,
+ DW_AT_APPLE_isa = 0x3fe3,
+ DW_AT_APPLE_block = 0x3fe4,
+ DW_AT_APPLE_major_runtime_vers = 0x3fe5,
+ DW_AT_APPLE_runtime_class = 0x3fe6,
+ DW_AT_APPLE_omit_frame_ptr = 0x3fe7,
+ DW_AT_APPLE_property_name = 0x3fe8,
+ DW_AT_APPLE_property_getter = 0x3fe9,
+ DW_AT_APPLE_property_setter = 0x3fea,
+ DW_AT_APPLE_property_attribute = 0x3feb,
+ DW_AT_APPLE_objc_complete_type = 0x3fec,
+ DW_AT_APPLE_property = 0x3fed
+});
+
+dw!(
+/// The attribute form encodings for DIE attributes.
+///
+/// See Section 7.5.6, Table 7.6.
+DwForm(u16) {
+ DW_FORM_null = 0x00,
+
+ DW_FORM_addr = 0x01,
+ DW_FORM_block2 = 0x03,
+ DW_FORM_block4 = 0x04,
+ DW_FORM_data2 = 0x05,
+ DW_FORM_data4 = 0x06,
+ DW_FORM_data8 = 0x07,
+ DW_FORM_string = 0x08,
+ DW_FORM_block = 0x09,
+ DW_FORM_block1 = 0x0a,
+ DW_FORM_data1 = 0x0b,
+ DW_FORM_flag = 0x0c,
+ DW_FORM_sdata = 0x0d,
+ DW_FORM_strp = 0x0e,
+ DW_FORM_udata = 0x0f,
+ DW_FORM_ref_addr = 0x10,
+ DW_FORM_ref1 = 0x11,
+ DW_FORM_ref2 = 0x12,
+ DW_FORM_ref4 = 0x13,
+ DW_FORM_ref8 = 0x14,
+ DW_FORM_ref_udata = 0x15,
+ DW_FORM_indirect = 0x16,
+
+// DWARF 4.
+ DW_FORM_sec_offset = 0x17,
+ DW_FORM_exprloc = 0x18,
+ DW_FORM_flag_present = 0x19,
+ DW_FORM_ref_sig8 = 0x20,
+
+// DWARF 5.
+ DW_FORM_strx = 0x1a,
+ DW_FORM_addrx = 0x1b,
+ DW_FORM_ref_sup4 = 0x1c,
+ DW_FORM_strp_sup = 0x1d,
+ DW_FORM_data16 = 0x1e,
+ DW_FORM_line_strp = 0x1f,
+ DW_FORM_implicit_const = 0x21,
+ DW_FORM_loclistx = 0x22,
+ DW_FORM_rnglistx = 0x23,
+ DW_FORM_ref_sup8 = 0x24,
+ DW_FORM_strx1 = 0x25,
+ DW_FORM_strx2 = 0x26,
+ DW_FORM_strx3 = 0x27,
+ DW_FORM_strx4 = 0x28,
+ DW_FORM_addrx1 = 0x29,
+ DW_FORM_addrx2 = 0x2a,
+ DW_FORM_addrx3 = 0x2b,
+ DW_FORM_addrx4 = 0x2c,
+
+// Extensions for Fission proposal
+ DW_FORM_GNU_addr_index = 0x1f01,
+ DW_FORM_GNU_str_index = 0x1f02,
+
+// Alternate debug sections proposal (output of "dwz" tool).
+ DW_FORM_GNU_ref_alt = 0x1f20,
+ DW_FORM_GNU_strp_alt = 0x1f21
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_encoding` attribute.
+///
+/// See Section 7.8, Table 7.11.
+DwAte(u8) {
+ DW_ATE_address = 0x01,
+ DW_ATE_boolean = 0x02,
+ DW_ATE_complex_float = 0x03,
+ DW_ATE_float = 0x04,
+ DW_ATE_signed = 0x05,
+ DW_ATE_signed_char = 0x06,
+ DW_ATE_unsigned = 0x07,
+ DW_ATE_unsigned_char = 0x08,
+
+// DWARF 3.
+ DW_ATE_imaginary_float = 0x09,
+ DW_ATE_packed_decimal = 0x0a,
+ DW_ATE_numeric_string = 0x0b,
+ DW_ATE_edited = 0x0c,
+ DW_ATE_signed_fixed = 0x0d,
+ DW_ATE_unsigned_fixed = 0x0e,
+ DW_ATE_decimal_float = 0x0f ,
+
+// DWARF 4.
+ DW_ATE_UTF = 0x10,
+ DW_ATE_UCS = 0x11,
+ DW_ATE_ASCII = 0x12,
+
+ DW_ATE_lo_user = 0x80,
+ DW_ATE_hi_user = 0xff,
+});
+
+dw!(
+/// The encodings of the constants used in location list entries.
+///
+/// See Section 7.7.3, Table 7.10.
+DwLle(u8) {
+ DW_LLE_end_of_list = 0x00,
+ DW_LLE_base_addressx = 0x01,
+ DW_LLE_startx_endx = 0x02,
+ DW_LLE_startx_length = 0x03,
+ DW_LLE_offset_pair = 0x04,
+ DW_LLE_default_location = 0x05,
+ DW_LLE_base_address = 0x06,
+ DW_LLE_start_end = 0x07,
+ DW_LLE_start_length = 0x08,
+ DW_LLE_GNU_view_pair = 0x09,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_decimal_sign` attribute.
+///
+/// See Section 7.8, Table 7.12.
+DwDs(u8) {
+ DW_DS_unsigned = 0x01,
+ DW_DS_leading_overpunch = 0x02,
+ DW_DS_trailing_overpunch = 0x03,
+ DW_DS_leading_separate = 0x04,
+ DW_DS_trailing_separate = 0x05,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_endianity` attribute.
+///
+/// See Section 7.8, Table 7.13.
+DwEnd(u8) {
+ DW_END_default = 0x00,
+ DW_END_big = 0x01,
+ DW_END_little = 0x02,
+ DW_END_lo_user = 0x40,
+ DW_END_hi_user = 0xff,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_accessibility` attribute.
+///
+/// See Section 7.9, Table 7.14.
+DwAccess(u8) {
+ DW_ACCESS_public = 0x01,
+ DW_ACCESS_protected = 0x02,
+ DW_ACCESS_private = 0x03,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_visibility` attribute.
+///
+/// See Section 7.10, Table 7.15.
+DwVis(u8) {
+ DW_VIS_local = 0x01,
+ DW_VIS_exported = 0x02,
+ DW_VIS_qualified = 0x03,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_virtuality` attribute.
+///
+/// See Section 7.11, Table 7.16.
+DwVirtuality(u8) {
+ DW_VIRTUALITY_none = 0x00,
+ DW_VIRTUALITY_virtual = 0x01,
+ DW_VIRTUALITY_pure_virtual = 0x02,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_language` attribute.
+///
+/// See Section 7.12, Table 7.17.
+DwLang(u16) {
+ DW_LANG_C89 = 0x0001,
+ DW_LANG_C = 0x0002,
+ DW_LANG_Ada83 = 0x0003,
+ DW_LANG_C_plus_plus = 0x0004,
+ DW_LANG_Cobol74 = 0x0005,
+ DW_LANG_Cobol85 = 0x0006,
+ DW_LANG_Fortran77 = 0x0007,
+ DW_LANG_Fortran90 = 0x0008,
+ DW_LANG_Pascal83 = 0x0009,
+ DW_LANG_Modula2 = 0x000a,
+ DW_LANG_Java = 0x000b,
+ DW_LANG_C99 = 0x000c,
+ DW_LANG_Ada95 = 0x000d,
+ DW_LANG_Fortran95 = 0x000e,
+ DW_LANG_PLI = 0x000f,
+ DW_LANG_ObjC = 0x0010,
+ DW_LANG_ObjC_plus_plus = 0x0011,
+ DW_LANG_UPC = 0x0012,
+ DW_LANG_D = 0x0013,
+ DW_LANG_Python = 0x0014,
+ DW_LANG_OpenCL = 0x0015,
+ DW_LANG_Go = 0x0016,
+ DW_LANG_Modula3 = 0x0017,
+ DW_LANG_Haskell = 0x0018,
+ DW_LANG_C_plus_plus_03 = 0x0019,
+ DW_LANG_C_plus_plus_11 = 0x001a,
+ DW_LANG_OCaml = 0x001b,
+ DW_LANG_Rust = 0x001c,
+ DW_LANG_C11 = 0x001d,
+ DW_LANG_Swift = 0x001e,
+ DW_LANG_Julia = 0x001f,
+ DW_LANG_Dylan = 0x0020,
+ DW_LANG_C_plus_plus_14 = 0x0021,
+ DW_LANG_Fortran03 = 0x0022,
+ DW_LANG_Fortran08 = 0x0023,
+ DW_LANG_RenderScript = 0x0024,
+ DW_LANG_BLISS = 0x0025,
+ DW_LANG_Kotlin = 0x0026,
+ DW_LANG_Zig = 0x0027,
+ DW_LANG_Crystal = 0x0028,
+ DW_LANG_C_plus_plus_17 = 0x002a,
+ DW_LANG_C_plus_plus_20 = 0x002b,
+ DW_LANG_C17 = 0x002c,
+ DW_LANG_Fortran18 = 0x002d,
+ DW_LANG_Ada2005 = 0x002e,
+ DW_LANG_Ada2012 = 0x002f,
+
+ DW_LANG_lo_user = 0x8000,
+ DW_LANG_hi_user = 0xffff,
+
+ DW_LANG_Mips_Assembler = 0x8001,
+ DW_LANG_GOOGLE_RenderScript = 0x8e57,
+ DW_LANG_SUN_Assembler = 0x9001,
+ DW_LANG_ALTIUM_Assembler = 0x9101,
+ DW_LANG_BORLAND_Delphi = 0xb000,
+});
+
+impl DwLang {
+ /// Get the default DW_AT_lower_bound for this language.
+ pub fn default_lower_bound(self) -> Option<usize> {
+ match self {
+ DW_LANG_C89
+ | DW_LANG_C
+ | DW_LANG_C_plus_plus
+ | DW_LANG_Java
+ | DW_LANG_C99
+ | DW_LANG_ObjC
+ | DW_LANG_ObjC_plus_plus
+ | DW_LANG_UPC
+ | DW_LANG_D
+ | DW_LANG_Python
+ | DW_LANG_OpenCL
+ | DW_LANG_Go
+ | DW_LANG_Haskell
+ | DW_LANG_C_plus_plus_03
+ | DW_LANG_C_plus_plus_11
+ | DW_LANG_OCaml
+ | DW_LANG_Rust
+ | DW_LANG_C11
+ | DW_LANG_Swift
+ | DW_LANG_Dylan
+ | DW_LANG_C_plus_plus_14
+ | DW_LANG_RenderScript
+ | DW_LANG_BLISS => Some(0),
+ DW_LANG_Ada83 | DW_LANG_Cobol74 | DW_LANG_Cobol85 | DW_LANG_Fortran77
+ | DW_LANG_Fortran90 | DW_LANG_Pascal83 | DW_LANG_Modula2 | DW_LANG_Ada95
+ | DW_LANG_Fortran95 | DW_LANG_PLI | DW_LANG_Modula3 | DW_LANG_Julia
+ | DW_LANG_Fortran03 | DW_LANG_Fortran08 => Some(1),
+ _ => None,
+ }
+ }
+}
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_address_class` attribute.
+///
+/// There is only one value that is common to all target architectures.
+/// See Section 7.13.
+DwAddr(u64) {
+ DW_ADDR_none = 0x00,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_identifier_case` attribute.
+///
+/// See Section 7.14, Table 7.18.
+DwId(u8) {
+ DW_ID_case_sensitive = 0x00,
+ DW_ID_up_case = 0x01,
+ DW_ID_down_case = 0x02,
+ DW_ID_case_insensitive = 0x03,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_calling_convention` attribute.
+///
+/// See Section 7.15, Table 7.19.
+DwCc(u8) {
+ DW_CC_normal = 0x01,
+ DW_CC_program = 0x02,
+ DW_CC_nocall = 0x03,
+ DW_CC_pass_by_reference = 0x04,
+ DW_CC_pass_by_value = 0x05,
+ DW_CC_lo_user = 0x40,
+ DW_CC_hi_user = 0xff,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_inline` attribute.
+///
+/// See Section 7.16, Table 7.20.
+DwInl(u8) {
+ DW_INL_not_inlined = 0x00,
+ DW_INL_inlined = 0x01,
+ DW_INL_declared_not_inlined = 0x02,
+ DW_INL_declared_inlined = 0x03,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_ordering` attribute.
+///
+/// See Section 7.17, Table 7.17.
+DwOrd(u8) {
+ DW_ORD_row_major = 0x00,
+ DW_ORD_col_major = 0x01,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_discr_list` attribute.
+///
+/// See Section 7.18, Table 7.22.
+DwDsc(u8) {
+ DW_DSC_label = 0x00,
+ DW_DSC_range = 0x01,
+});
+
+dw!(
+/// Name index attribute encodings.
+///
+/// See Section 7.19, Table 7.23.
+DwIdx(u16) {
+ DW_IDX_compile_unit = 1,
+ DW_IDX_type_unit = 2,
+ DW_IDX_die_offset = 3,
+ DW_IDX_parent = 4,
+ DW_IDX_type_hash = 5,
+ DW_IDX_lo_user = 0x2000,
+ DW_IDX_hi_user = 0x3fff,
+});
+
+dw!(
+/// The encodings of the constants used in the `DW_AT_defaulted` attribute.
+///
+/// See Section 7.20, Table 7.24.
+DwDefaulted(u8) {
+ DW_DEFAULTED_no = 0x00,
+ DW_DEFAULTED_in_class = 0x01,
+ DW_DEFAULTED_out_of_class = 0x02,
+});
+
+dw!(
+/// The encodings for the standard opcodes for line number information.
+///
+/// See Section 7.22, Table 7.25.
+DwLns(u8) {
+ DW_LNS_copy = 0x01,
+ DW_LNS_advance_pc = 0x02,
+ DW_LNS_advance_line = 0x03,
+ DW_LNS_set_file = 0x04,
+ DW_LNS_set_column = 0x05,
+ DW_LNS_negate_stmt = 0x06,
+ DW_LNS_set_basic_block = 0x07,
+ DW_LNS_const_add_pc = 0x08,
+ DW_LNS_fixed_advance_pc = 0x09,
+ DW_LNS_set_prologue_end = 0x0a,
+ DW_LNS_set_epilogue_begin = 0x0b,
+ DW_LNS_set_isa = 0x0c,
+});
+
+dw!(
+/// The encodings for the extended opcodes for line number information.
+///
+/// See Section 7.22, Table 7.26.
+DwLne(u8) {
+ DW_LNE_end_sequence = 0x01,
+ DW_LNE_set_address = 0x02,
+ DW_LNE_define_file = 0x03,
+ DW_LNE_set_discriminator = 0x04,
+
+ DW_LNE_lo_user = 0x80,
+ DW_LNE_hi_user = 0xff,
+});
+
+dw!(
+/// The encodings for the line number header entry formats.
+///
+/// See Section 7.22, Table 7.27.
+DwLnct(u16) {
+ DW_LNCT_path = 0x1,
+ DW_LNCT_directory_index = 0x2,
+ DW_LNCT_timestamp = 0x3,
+ DW_LNCT_size = 0x4,
+ DW_LNCT_MD5 = 0x5,
+ DW_LNCT_lo_user = 0x2000,
+ DW_LNCT_hi_user = 0x3fff,
+});
+
+dw!(
+/// The encodings for macro information entry types.
+///
+/// See Section 7.23, Table 7.28.
+DwMacro(u8) {
+ DW_MACRO_define = 0x01,
+ DW_MACRO_undef = 0x02,
+ DW_MACRO_start_file = 0x03,
+ DW_MACRO_end_file = 0x04,
+ DW_MACRO_define_strp = 0x05,
+ DW_MACRO_undef_strp = 0x06,
+ DW_MACRO_import = 0x07,
+ DW_MACRO_define_sup = 0x08,
+ DW_MACRO_undef_sup = 0x09,
+ DW_MACRO_import_sup = 0x0a,
+ DW_MACRO_define_strx = 0x0b,
+ DW_MACRO_undef_strx = 0x0c,
+ DW_MACRO_lo_user = 0xe0,
+ DW_MACRO_hi_user = 0xff,
+});
+
+dw!(
+/// Range list entry encoding values.
+///
+/// See Section 7.25, Table 7.30.
+DwRle(u8) {
+ DW_RLE_end_of_list = 0x00,
+ DW_RLE_base_addressx = 0x01,
+ DW_RLE_startx_endx = 0x02,
+ DW_RLE_startx_length = 0x03,
+ DW_RLE_offset_pair = 0x04,
+ DW_RLE_base_address = 0x05,
+ DW_RLE_start_end = 0x06,
+ DW_RLE_start_length = 0x07,
+});
+
+dw!(
+/// The encodings for DWARF expression operations.
+///
+/// See Section 7.7.1, Table 7.9.
+DwOp(u8) {
+ DW_OP_addr = 0x03,
+ DW_OP_deref = 0x06,
+ DW_OP_const1u = 0x08,
+ DW_OP_const1s = 0x09,
+ DW_OP_const2u = 0x0a,
+ DW_OP_const2s = 0x0b,
+ DW_OP_const4u = 0x0c,
+ DW_OP_const4s = 0x0d,
+ DW_OP_const8u = 0x0e,
+ DW_OP_const8s = 0x0f,
+ DW_OP_constu = 0x10,
+ DW_OP_consts = 0x11,
+ DW_OP_dup = 0x12,
+ DW_OP_drop = 0x13,
+ DW_OP_over = 0x14,
+ DW_OP_pick = 0x15,
+ DW_OP_swap = 0x16,
+ DW_OP_rot = 0x17,
+ DW_OP_xderef = 0x18,
+ DW_OP_abs = 0x19,
+ DW_OP_and = 0x1a,
+ DW_OP_div = 0x1b,
+ DW_OP_minus = 0x1c,
+ DW_OP_mod = 0x1d,
+ DW_OP_mul = 0x1e,
+ DW_OP_neg = 0x1f,
+ DW_OP_not = 0x20,
+ DW_OP_or = 0x21,
+ DW_OP_plus = 0x22,
+ DW_OP_plus_uconst = 0x23,
+ DW_OP_shl = 0x24,
+ DW_OP_shr = 0x25,
+ DW_OP_shra = 0x26,
+ DW_OP_xor = 0x27,
+ DW_OP_bra = 0x28,
+ DW_OP_eq = 0x29,
+ DW_OP_ge = 0x2a,
+ DW_OP_gt = 0x2b,
+ DW_OP_le = 0x2c,
+ DW_OP_lt = 0x2d,
+ DW_OP_ne = 0x2e,
+ DW_OP_skip = 0x2f,
+ DW_OP_lit0 = 0x30,
+ DW_OP_lit1 = 0x31,
+ DW_OP_lit2 = 0x32,
+ DW_OP_lit3 = 0x33,
+ DW_OP_lit4 = 0x34,
+ DW_OP_lit5 = 0x35,
+ DW_OP_lit6 = 0x36,
+ DW_OP_lit7 = 0x37,
+ DW_OP_lit8 = 0x38,
+ DW_OP_lit9 = 0x39,
+ DW_OP_lit10 = 0x3a,
+ DW_OP_lit11 = 0x3b,
+ DW_OP_lit12 = 0x3c,
+ DW_OP_lit13 = 0x3d,
+ DW_OP_lit14 = 0x3e,
+ DW_OP_lit15 = 0x3f,
+ DW_OP_lit16 = 0x40,
+ DW_OP_lit17 = 0x41,
+ DW_OP_lit18 = 0x42,
+ DW_OP_lit19 = 0x43,
+ DW_OP_lit20 = 0x44,
+ DW_OP_lit21 = 0x45,
+ DW_OP_lit22 = 0x46,
+ DW_OP_lit23 = 0x47,
+ DW_OP_lit24 = 0x48,
+ DW_OP_lit25 = 0x49,
+ DW_OP_lit26 = 0x4a,
+ DW_OP_lit27 = 0x4b,
+ DW_OP_lit28 = 0x4c,
+ DW_OP_lit29 = 0x4d,
+ DW_OP_lit30 = 0x4e,
+ DW_OP_lit31 = 0x4f,
+ DW_OP_reg0 = 0x50,
+ DW_OP_reg1 = 0x51,
+ DW_OP_reg2 = 0x52,
+ DW_OP_reg3 = 0x53,
+ DW_OP_reg4 = 0x54,
+ DW_OP_reg5 = 0x55,
+ DW_OP_reg6 = 0x56,
+ DW_OP_reg7 = 0x57,
+ DW_OP_reg8 = 0x58,
+ DW_OP_reg9 = 0x59,
+ DW_OP_reg10 = 0x5a,
+ DW_OP_reg11 = 0x5b,
+ DW_OP_reg12 = 0x5c,
+ DW_OP_reg13 = 0x5d,
+ DW_OP_reg14 = 0x5e,
+ DW_OP_reg15 = 0x5f,
+ DW_OP_reg16 = 0x60,
+ DW_OP_reg17 = 0x61,
+ DW_OP_reg18 = 0x62,
+ DW_OP_reg19 = 0x63,
+ DW_OP_reg20 = 0x64,
+ DW_OP_reg21 = 0x65,
+ DW_OP_reg22 = 0x66,
+ DW_OP_reg23 = 0x67,
+ DW_OP_reg24 = 0x68,
+ DW_OP_reg25 = 0x69,
+ DW_OP_reg26 = 0x6a,
+ DW_OP_reg27 = 0x6b,
+ DW_OP_reg28 = 0x6c,
+ DW_OP_reg29 = 0x6d,
+ DW_OP_reg30 = 0x6e,
+ DW_OP_reg31 = 0x6f,
+ DW_OP_breg0 = 0x70,
+ DW_OP_breg1 = 0x71,
+ DW_OP_breg2 = 0x72,
+ DW_OP_breg3 = 0x73,
+ DW_OP_breg4 = 0x74,
+ DW_OP_breg5 = 0x75,
+ DW_OP_breg6 = 0x76,
+ DW_OP_breg7 = 0x77,
+ DW_OP_breg8 = 0x78,
+ DW_OP_breg9 = 0x79,
+ DW_OP_breg10 = 0x7a,
+ DW_OP_breg11 = 0x7b,
+ DW_OP_breg12 = 0x7c,
+ DW_OP_breg13 = 0x7d,
+ DW_OP_breg14 = 0x7e,
+ DW_OP_breg15 = 0x7f,
+ DW_OP_breg16 = 0x80,
+ DW_OP_breg17 = 0x81,
+ DW_OP_breg18 = 0x82,
+ DW_OP_breg19 = 0x83,
+ DW_OP_breg20 = 0x84,
+ DW_OP_breg21 = 0x85,
+ DW_OP_breg22 = 0x86,
+ DW_OP_breg23 = 0x87,
+ DW_OP_breg24 = 0x88,
+ DW_OP_breg25 = 0x89,
+ DW_OP_breg26 = 0x8a,
+ DW_OP_breg27 = 0x8b,
+ DW_OP_breg28 = 0x8c,
+ DW_OP_breg29 = 0x8d,
+ DW_OP_breg30 = 0x8e,
+ DW_OP_breg31 = 0x8f,
+ DW_OP_regx = 0x90,
+ DW_OP_fbreg = 0x91,
+ DW_OP_bregx = 0x92,
+ DW_OP_piece = 0x93,
+ DW_OP_deref_size = 0x94,
+ DW_OP_xderef_size = 0x95,
+ DW_OP_nop = 0x96,
+ DW_OP_push_object_address = 0x97,
+ DW_OP_call2 = 0x98,
+ DW_OP_call4 = 0x99,
+ DW_OP_call_ref = 0x9a,
+ DW_OP_form_tls_address = 0x9b,
+ DW_OP_call_frame_cfa = 0x9c,
+ DW_OP_bit_piece = 0x9d,
+ DW_OP_implicit_value = 0x9e,
+ DW_OP_stack_value = 0x9f,
+ DW_OP_implicit_pointer = 0xa0,
+ DW_OP_addrx = 0xa1,
+ DW_OP_constx = 0xa2,
+ DW_OP_entry_value = 0xa3,
+ DW_OP_const_type = 0xa4,
+ DW_OP_regval_type = 0xa5,
+ DW_OP_deref_type = 0xa6,
+ DW_OP_xderef_type = 0xa7,
+ DW_OP_convert = 0xa8,
+ DW_OP_reinterpret = 0xa9,
+
+ // GNU extensions
+ DW_OP_GNU_push_tls_address = 0xe0,
+ DW_OP_GNU_implicit_pointer = 0xf2,
+ DW_OP_GNU_entry_value = 0xf3,
+ DW_OP_GNU_const_type = 0xf4,
+ DW_OP_GNU_regval_type = 0xf5,
+ DW_OP_GNU_deref_type = 0xf6,
+ DW_OP_GNU_convert = 0xf7,
+ DW_OP_GNU_reinterpret = 0xf9,
+ DW_OP_GNU_parameter_ref = 0xfa,
+ DW_OP_GNU_addr_index = 0xfb,
+ DW_OP_GNU_const_index = 0xfc,
+
+ // Wasm extensions
+ DW_OP_WASM_location = 0xed,
+});
+
+dw!(
+/// Pointer encoding used by `.eh_frame`.
+///
+/// The four lower bits describe the
+/// format of the pointer, the upper four bits describe how the encoding should
+/// be applied.
+///
+/// Defined in https://refspecs.linuxfoundation.org/LSB_4.0.0/LSB-Core-generic/LSB-Core-generic/dwarfext.html
+DwEhPe(u8) {
+// Format of pointer encoding.
+
+// "Unsigned value is encoded using the Little Endian Base 128"
+ DW_EH_PE_uleb128 = 0x1,
+// "A 2 bytes unsigned value."
+ DW_EH_PE_udata2 = 0x2,
+// "A 4 bytes unsigned value."
+ DW_EH_PE_udata4 = 0x3,
+// "An 8 bytes unsigned value."
+ DW_EH_PE_udata8 = 0x4,
+// "Signed value is encoded using the Little Endian Base 128"
+ DW_EH_PE_sleb128 = 0x9,
+// "A 2 bytes signed value."
+ DW_EH_PE_sdata2 = 0x0a,
+// "A 4 bytes signed value."
+ DW_EH_PE_sdata4 = 0x0b,
+// "An 8 bytes signed value."
+ DW_EH_PE_sdata8 = 0x0c,
+
+// How the pointer encoding should be applied.
+
+// `DW_EH_PE_pcrel` pointers are relative to their own location.
+ DW_EH_PE_pcrel = 0x10,
+// "Value is relative to the beginning of the .text section."
+ DW_EH_PE_textrel = 0x20,
+// "Value is relative to the beginning of the .got or .eh_frame_hdr section."
+ DW_EH_PE_datarel = 0x30,
+// "Value is relative to the beginning of the function."
+ DW_EH_PE_funcrel = 0x40,
+// "Value is aligned to an address unit sized boundary."
+ DW_EH_PE_aligned = 0x50,
+
+// This bit can be set for any of the above encoding applications. When set,
+// the encoded value is the address of the real pointer result, not the
+// pointer result itself.
+//
+// This isn't defined in the DWARF or the `.eh_frame` standards, but is
+// generated by both GNU/Linux and macOS tooling.
+ DW_EH_PE_indirect = 0x80,
+
+// These constants apply to both the lower and upper bits.
+
+// "The Value is a literal pointer whose size is determined by the
+// architecture."
+ DW_EH_PE_absptr = 0x0,
+// The absence of a pointer and encoding.
+ DW_EH_PE_omit = 0xff,
+});
+
+const DW_EH_PE_FORMAT_MASK: u8 = 0b0000_1111;
+
+// Ignores indirection bit.
+const DW_EH_PE_APPLICATION_MASK: u8 = 0b0111_0000;
+
+impl DwEhPe {
+ /// Get the pointer encoding's format.
+ #[inline]
+ pub fn format(self) -> DwEhPe {
+ DwEhPe(self.0 & DW_EH_PE_FORMAT_MASK)
+ }
+
+ /// Get the pointer encoding's application.
+ #[inline]
+ pub fn application(self) -> DwEhPe {
+ DwEhPe(self.0 & DW_EH_PE_APPLICATION_MASK)
+ }
+
+ /// Is this encoding the absent pointer encoding?
+ #[inline]
+ pub fn is_absent(self) -> bool {
+ self == DW_EH_PE_omit
+ }
+
+ /// Is this coding indirect? If so, its encoded value is the address of the
+ /// real pointer result, not the pointer result itself.
+ #[inline]
+ pub fn is_indirect(self) -> bool {
+ self.0 & DW_EH_PE_indirect.0 != 0
+ }
+
+ /// Is this a known, valid pointer encoding?
+ pub fn is_valid_encoding(self) -> bool {
+ if self.is_absent() {
+ return true;
+ }
+
+ match self.format() {
+ DW_EH_PE_absptr | DW_EH_PE_uleb128 | DW_EH_PE_udata2 | DW_EH_PE_udata4
+ | DW_EH_PE_udata8 | DW_EH_PE_sleb128 | DW_EH_PE_sdata2 | DW_EH_PE_sdata4
+ | DW_EH_PE_sdata8 => {}
+ _ => return false,
+ }
+
+ match self.application() {
+ DW_EH_PE_absptr | DW_EH_PE_pcrel | DW_EH_PE_textrel | DW_EH_PE_datarel
+ | DW_EH_PE_funcrel | DW_EH_PE_aligned => {}
+ _ => return false,
+ }
+
+ true
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_dw_eh_pe_format() {
+ let encoding = DwEhPe(DW_EH_PE_pcrel.0 | DW_EH_PE_uleb128.0);
+ assert_eq!(encoding.format(), DW_EH_PE_uleb128);
+ }
+
+ #[test]
+ fn test_dw_eh_pe_application() {
+ let encoding = DwEhPe(DW_EH_PE_pcrel.0 | DW_EH_PE_uleb128.0);
+ assert_eq!(encoding.application(), DW_EH_PE_pcrel);
+ }
+
+ #[test]
+ fn test_dw_eh_pe_is_absent() {
+ assert_eq!(DW_EH_PE_absptr.is_absent(), false);
+ assert_eq!(DW_EH_PE_omit.is_absent(), true);
+ }
+
+ #[test]
+ fn test_dw_eh_pe_is_valid_encoding_ok() {
+ let encoding = DwEhPe(DW_EH_PE_uleb128.0 | DW_EH_PE_pcrel.0);
+ assert!(encoding.is_valid_encoding());
+ assert!(DW_EH_PE_absptr.is_valid_encoding());
+ assert!(DW_EH_PE_omit.is_valid_encoding());
+ }
+
+ #[test]
+ fn test_dw_eh_pe_is_valid_encoding_bad_format() {
+ let encoding = DwEhPe((DW_EH_PE_sdata8.0 + 1) | DW_EH_PE_pcrel.0);
+ assert_eq!(encoding.is_valid_encoding(), false);
+ }
+
+ #[test]
+ fn test_dw_eh_pe_is_valid_encoding_bad_application() {
+ let encoding = DwEhPe(DW_EH_PE_sdata8.0 | (DW_EH_PE_aligned.0 + 1));
+ assert_eq!(encoding.is_valid_encoding(), false);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/endianity.rs b/vendor/gimli-0.26.2/src/endianity.rs
new file mode 100644
index 000000000..3201551f1
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/endianity.rs
@@ -0,0 +1,256 @@
+//! Types for compile-time and run-time endianity.
+
+use core::convert::TryInto;
+use core::fmt::Debug;
+
+/// A trait describing the endianity of some buffer.
+pub trait Endianity: Debug + Default + Clone + Copy + PartialEq + Eq {
+ /// Return true for big endian byte order.
+ fn is_big_endian(self) -> bool;
+
+ /// Return true for little endian byte order.
+ #[inline]
+ fn is_little_endian(self) -> bool {
+ !self.is_big_endian()
+ }
+
+ /// Reads an unsigned 16 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 2`.
+ #[inline]
+ fn read_u16(self, buf: &[u8]) -> u16 {
+ let bytes: &[u8; 2] = buf[..2].try_into().unwrap();
+ if self.is_big_endian() {
+ u16::from_be_bytes(*bytes)
+ } else {
+ u16::from_le_bytes(*bytes)
+ }
+ }
+
+ /// Reads an unsigned 32 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 4`.
+ #[inline]
+ fn read_u32(self, buf: &[u8]) -> u32 {
+ let bytes: &[u8; 4] = buf[..4].try_into().unwrap();
+ if self.is_big_endian() {
+ u32::from_be_bytes(*bytes)
+ } else {
+ u32::from_le_bytes(*bytes)
+ }
+ }
+
+ /// Reads an unsigned 64 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 8`.
+ #[inline]
+ fn read_u64(self, buf: &[u8]) -> u64 {
+ let bytes: &[u8; 8] = buf[..8].try_into().unwrap();
+ if self.is_big_endian() {
+ u64::from_be_bytes(*bytes)
+ } else {
+ u64::from_le_bytes(*bytes)
+ }
+ }
+
+ /// Read an unsigned n-bytes integer u64.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 1` or `buf.len() > 8`.
+ #[inline]
+ fn read_uint(&mut self, buf: &[u8]) -> u64 {
+ let mut tmp = [0; 8];
+ if self.is_big_endian() {
+ tmp[8 - buf.len()..].copy_from_slice(buf);
+ } else {
+ tmp[..buf.len()].copy_from_slice(buf);
+ }
+ self.read_u64(&tmp)
+ }
+
+ /// Reads a signed 16 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 2`.
+ #[inline]
+ fn read_i16(self, buf: &[u8]) -> i16 {
+ self.read_u16(buf) as i16
+ }
+
+ /// Reads a signed 32 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 4`.
+ #[inline]
+ fn read_i32(self, buf: &[u8]) -> i32 {
+ self.read_u32(buf) as i32
+ }
+
+ /// Reads a signed 64 bit integer from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 8`.
+ #[inline]
+ fn read_i64(self, buf: &[u8]) -> i64 {
+ self.read_u64(buf) as i64
+ }
+
+ /// Reads a 32 bit floating point number from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 8`.
+ #[inline]
+ fn read_f32(self, buf: &[u8]) -> f32 {
+ f32::from_bits(self.read_u32(buf))
+ }
+
+ /// Reads a 32 bit floating point number from `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 8`.
+ #[inline]
+ fn read_f64(self, buf: &[u8]) -> f64 {
+ f64::from_bits(self.read_u64(buf))
+ }
+
+ /// Writes an unsigned 16 bit integer `n` to `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 2`.
+ #[inline]
+ fn write_u16(self, buf: &mut [u8], n: u16) {
+ let bytes = if self.is_big_endian() {
+ n.to_be_bytes()
+ } else {
+ n.to_le_bytes()
+ };
+ buf[..2].copy_from_slice(&bytes);
+ }
+
+ /// Writes an unsigned 32 bit integer `n` to `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 4`.
+ #[inline]
+ fn write_u32(self, buf: &mut [u8], n: u32) {
+ let bytes = if self.is_big_endian() {
+ n.to_be_bytes()
+ } else {
+ n.to_le_bytes()
+ };
+ buf[..4].copy_from_slice(&bytes);
+ }
+
+ /// Writes an unsigned 64 bit integer `n` to `buf`.
+ ///
+ /// # Panics
+ ///
+ /// Panics when `buf.len() < 8`.
+ #[inline]
+ fn write_u64(self, buf: &mut [u8], n: u64) {
+ let bytes = if self.is_big_endian() {
+ n.to_be_bytes()
+ } else {
+ n.to_le_bytes()
+ };
+ buf[..8].copy_from_slice(&bytes);
+ }
+}
+
+/// Byte order that is selectable at runtime.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum RunTimeEndian {
+ /// Little endian byte order.
+ Little,
+ /// Big endian byte order.
+ Big,
+}
+
+impl Default for RunTimeEndian {
+ #[cfg(target_endian = "little")]
+ #[inline]
+ fn default() -> RunTimeEndian {
+ RunTimeEndian::Little
+ }
+
+ #[cfg(target_endian = "big")]
+ #[inline]
+ fn default() -> RunTimeEndian {
+ RunTimeEndian::Big
+ }
+}
+
+impl Endianity for RunTimeEndian {
+ #[inline]
+ fn is_big_endian(self) -> bool {
+ self != RunTimeEndian::Little
+ }
+}
+
+/// Little endian byte order.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LittleEndian;
+
+impl Default for LittleEndian {
+ #[inline]
+ fn default() -> LittleEndian {
+ LittleEndian
+ }
+}
+
+impl Endianity for LittleEndian {
+ #[inline]
+ fn is_big_endian(self) -> bool {
+ false
+ }
+}
+
+/// Big endian byte order.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BigEndian;
+
+impl Default for BigEndian {
+ #[inline]
+ fn default() -> BigEndian {
+ BigEndian
+ }
+}
+
+impl Endianity for BigEndian {
+ #[inline]
+ fn is_big_endian(self) -> bool {
+ true
+ }
+}
+
+/// The native endianity for the target platform.
+#[cfg(target_endian = "little")]
+pub type NativeEndian = LittleEndian;
+
+#[cfg(target_endian = "little")]
+#[allow(non_upper_case_globals)]
+#[doc(hidden)]
+pub const NativeEndian: LittleEndian = LittleEndian;
+
+/// The native endianity for the target platform.
+#[cfg(target_endian = "big")]
+pub type NativeEndian = BigEndian;
+
+#[cfg(target_endian = "big")]
+#[allow(non_upper_case_globals)]
+#[doc(hidden)]
+pub const NativeEndian: BigEndian = BigEndian;
diff --git a/vendor/gimli-0.26.2/src/leb128.rs b/vendor/gimli-0.26.2/src/leb128.rs
new file mode 100644
index 000000000..de81cfdcf
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/leb128.rs
@@ -0,0 +1,612 @@
+//! Read and write DWARF's "Little Endian Base 128" (LEB128) variable length
+//! integer encoding.
+//!
+//! The implementation is a direct translation of the psuedocode in the DWARF 4
+//! standard's appendix C.
+//!
+//! Read and write signed integers:
+//!
+//! ```
+//! # #[cfg(all(feature = "read", feature = "write"))] {
+//! use gimli::{EndianSlice, NativeEndian, leb128};
+//!
+//! let mut buf = [0; 1024];
+//!
+//! // Write to anything that implements `std::io::Write`.
+//! {
+//! let mut writable = &mut buf[..];
+//! leb128::write::signed(&mut writable, -12345).expect("Should write number");
+//! }
+//!
+//! // Read from anything that implements `gimli::Reader`.
+//! let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+//! let val = leb128::read::signed(&mut readable).expect("Should read number");
+//! assert_eq!(val, -12345);
+//! # }
+//! ```
+//!
+//! Or read and write unsigned integers:
+//!
+//! ```
+//! # #[cfg(all(feature = "read", feature = "write"))] {
+//! use gimli::{EndianSlice, NativeEndian, leb128};
+//!
+//! let mut buf = [0; 1024];
+//!
+//! {
+//! let mut writable = &mut buf[..];
+//! leb128::write::unsigned(&mut writable, 98765).expect("Should write number");
+//! }
+//!
+//! let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+//! let val = leb128::read::unsigned(&mut readable).expect("Should read number");
+//! assert_eq!(val, 98765);
+//! # }
+//! ```
+
+const CONTINUATION_BIT: u8 = 1 << 7;
+#[cfg(feature = "read-core")]
+const SIGN_BIT: u8 = 1 << 6;
+
+#[inline]
+fn low_bits_of_byte(byte: u8) -> u8 {
+ byte & !CONTINUATION_BIT
+}
+
+#[inline]
+#[allow(dead_code)]
+fn low_bits_of_u64(val: u64) -> u8 {
+ let byte = val & u64::from(core::u8::MAX);
+ low_bits_of_byte(byte as u8)
+}
+
+/// A module for reading signed and unsigned integers that have been LEB128
+/// encoded.
+#[cfg(feature = "read-core")]
+pub mod read {
+ use super::{low_bits_of_byte, CONTINUATION_BIT, SIGN_BIT};
+ use crate::read::{Error, Reader, Result};
+
+ /// Read bytes until the LEB128 continuation bit is not set.
+ pub fn skip<R: Reader>(r: &mut R) -> Result<()> {
+ loop {
+ let byte = r.read_u8()?;
+ if byte & CONTINUATION_BIT == 0 {
+ return Ok(());
+ }
+ }
+ }
+
+ /// Read an unsigned LEB128 number from the given `Reader` and
+ /// return it or an error if reading failed.
+ pub fn unsigned<R: Reader>(r: &mut R) -> Result<u64> {
+ let mut result = 0;
+ let mut shift = 0;
+
+ loop {
+ let byte = r.read_u8()?;
+ if shift == 63 && byte != 0x00 && byte != 0x01 {
+ return Err(Error::BadUnsignedLeb128);
+ }
+
+ let low_bits = u64::from(low_bits_of_byte(byte));
+ result |= low_bits << shift;
+
+ if byte & CONTINUATION_BIT == 0 {
+ return Ok(result);
+ }
+
+ shift += 7;
+ }
+ }
+
+ /// Read an LEB128 u16 from the given `Reader` and
+ /// return it or an error if reading failed.
+ pub fn u16<R: Reader>(r: &mut R) -> Result<u16> {
+ let byte = r.read_u8()?;
+ let mut result = u16::from(low_bits_of_byte(byte));
+ if byte & CONTINUATION_BIT == 0 {
+ return Ok(result);
+ }
+
+ let byte = r.read_u8()?;
+ result |= u16::from(low_bits_of_byte(byte)) << 7;
+ if byte & CONTINUATION_BIT == 0 {
+ return Ok(result);
+ }
+
+ let byte = r.read_u8()?;
+ if byte > 0x03 {
+ return Err(Error::BadUnsignedLeb128);
+ }
+ result += u16::from(byte) << 14;
+ Ok(result)
+ }
+
+ /// Read a signed LEB128 number from the given `Reader` and
+ /// return it or an error if reading failed.
+ pub fn signed<R: Reader>(r: &mut R) -> Result<i64> {
+ let mut result = 0;
+ let mut shift = 0;
+ let size = 64;
+ let mut byte;
+
+ loop {
+ byte = r.read_u8()?;
+ if shift == 63 && byte != 0x00 && byte != 0x7f {
+ return Err(Error::BadSignedLeb128);
+ }
+
+ let low_bits = i64::from(low_bits_of_byte(byte));
+ result |= low_bits << shift;
+ shift += 7;
+
+ if byte & CONTINUATION_BIT == 0 {
+ break;
+ }
+ }
+
+ if shift < size && (SIGN_BIT & byte) == SIGN_BIT {
+ // Sign extend the result.
+ result |= !0 << shift;
+ }
+
+ Ok(result)
+ }
+}
+
+/// A module for writing integers encoded as LEB128.
+#[cfg(feature = "write")]
+pub mod write {
+ use super::{low_bits_of_u64, CONTINUATION_BIT};
+ use std::io;
+
+ /// Write the given unsigned number using the LEB128 encoding to the given
+ /// `std::io::Write`able. Returns the number of bytes written to `w`, or an
+ /// error if writing failed.
+ pub fn unsigned<W>(w: &mut W, mut val: u64) -> Result<usize, io::Error>
+ where
+ W: io::Write,
+ {
+ let mut bytes_written = 0;
+ loop {
+ let mut byte = low_bits_of_u64(val);
+ val >>= 7;
+ if val != 0 {
+ // More bytes to come, so set the continuation bit.
+ byte |= CONTINUATION_BIT;
+ }
+
+ let buf = [byte];
+ w.write_all(&buf)?;
+ bytes_written += 1;
+
+ if val == 0 {
+ return Ok(bytes_written);
+ }
+ }
+ }
+
+ /// Return the size of the LEB128 encoding of the given unsigned number.
+ pub fn uleb128_size(mut val: u64) -> usize {
+ let mut size = 0;
+ loop {
+ val >>= 7;
+ size += 1;
+ if val == 0 {
+ return size;
+ }
+ }
+ }
+
+ /// Write the given signed number using the LEB128 encoding to the given
+ /// `std::io::Write`able. Returns the number of bytes written to `w`, or an
+ /// error if writing failed.
+ pub fn signed<W>(w: &mut W, mut val: i64) -> Result<usize, io::Error>
+ where
+ W: io::Write,
+ {
+ let mut bytes_written = 0;
+ loop {
+ let mut byte = val as u8;
+ // Keep the sign bit for testing
+ val >>= 6;
+ let done = val == 0 || val == -1;
+ if done {
+ byte &= !CONTINUATION_BIT;
+ } else {
+ // Remove the sign bit
+ val >>= 1;
+ // More bytes to come, so set the continuation bit.
+ byte |= CONTINUATION_BIT;
+ }
+
+ let buf = [byte];
+ w.write_all(&buf)?;
+ bytes_written += 1;
+
+ if done {
+ return Ok(bytes_written);
+ }
+ }
+ }
+
+ /// Return the size of the LEB128 encoding of the given signed number.
+ pub fn sleb128_size(mut val: i64) -> usize {
+ let mut size = 0;
+ loop {
+ val >>= 6;
+ let done = val == 0 || val == -1;
+ val >>= 1;
+ size += 1;
+ if done {
+ return size;
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(all(feature = "read", feature = "write"))]
+mod tests {
+ use super::{low_bits_of_byte, low_bits_of_u64, read, write, CONTINUATION_BIT};
+ use crate::endianity::NativeEndian;
+ use crate::read::{EndianSlice, Error, ReaderOffsetId};
+
+ trait ResultExt {
+ fn map_eof(self, input: &[u8]) -> Self;
+ }
+
+ impl<T> ResultExt for Result<T, Error> {
+ fn map_eof(self, input: &[u8]) -> Self {
+ match self {
+ Err(Error::UnexpectedEof(id)) => {
+ let id = ReaderOffsetId(id.0 - input.as_ptr() as u64);
+ Err(Error::UnexpectedEof(id))
+ }
+ r => r,
+ }
+ }
+ }
+
+ #[test]
+ fn test_low_bits_of_byte() {
+ for i in 0..127 {
+ assert_eq!(i, low_bits_of_byte(i));
+ assert_eq!(i, low_bits_of_byte(i | CONTINUATION_BIT));
+ }
+ }
+
+ #[test]
+ fn test_low_bits_of_u64() {
+ for i in 0u64..127 {
+ assert_eq!(i as u8, low_bits_of_u64(1 << 16 | i));
+ assert_eq!(
+ i as u8,
+ low_bits_of_u64(i << 16 | i | (u64::from(CONTINUATION_BIT)))
+ );
+ }
+ }
+
+ // Examples from the DWARF 4 standard, section 7.6, figure 22.
+ #[test]
+ fn test_read_unsigned() {
+ let buf = [2u8];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 2,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+
+ let buf = [127u8];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 127,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+
+ let buf = [CONTINUATION_BIT, 1];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 128,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+
+ let buf = [1u8 | CONTINUATION_BIT, 1];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 129,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+
+ let buf = [2u8 | CONTINUATION_BIT, 1];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 130,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+
+ let buf = [57u8 | CONTINUATION_BIT, 100];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 12857,
+ read::unsigned(&mut readable).expect("Should read number")
+ );
+ }
+
+ // Examples from the DWARF 4 standard, section 7.6, figure 23.
+ #[test]
+ fn test_read_signed() {
+ let buf = [2u8];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(2, read::signed(&mut readable).expect("Should read number"));
+
+ let buf = [0x7eu8];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(-2, read::signed(&mut readable).expect("Should read number"));
+
+ let buf = [127u8 | CONTINUATION_BIT, 0];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 127,
+ read::signed(&mut readable).expect("Should read number")
+ );
+
+ let buf = [1u8 | CONTINUATION_BIT, 0x7f];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ -127,
+ read::signed(&mut readable).expect("Should read number")
+ );
+
+ let buf = [CONTINUATION_BIT, 1];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 128,
+ read::signed(&mut readable).expect("Should read number")
+ );
+
+ let buf = [CONTINUATION_BIT, 0x7f];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ -128,
+ read::signed(&mut readable).expect("Should read number")
+ );
+
+ let buf = [1u8 | CONTINUATION_BIT, 1];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ 129,
+ read::signed(&mut readable).expect("Should read number")
+ );
+
+ let buf = [0x7fu8 | CONTINUATION_BIT, 0x7e];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ -129,
+ read::signed(&mut readable).expect("Should read number")
+ );
+ }
+
+ #[test]
+ fn test_read_signed_63_bits() {
+ let buf = [
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ CONTINUATION_BIT,
+ 0x40,
+ ];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ -0x4000_0000_0000_0000,
+ read::signed(&mut readable).expect("Should read number")
+ );
+ }
+
+ #[test]
+ fn test_read_unsigned_not_enough_data() {
+ let buf = [CONTINUATION_BIT];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ read::unsigned(&mut readable).map_eof(&buf),
+ Err(Error::UnexpectedEof(ReaderOffsetId(1)))
+ );
+ }
+
+ #[test]
+ fn test_read_signed_not_enough_data() {
+ let buf = [CONTINUATION_BIT];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ read::signed(&mut readable).map_eof(&buf),
+ Err(Error::UnexpectedEof(ReaderOffsetId(1)))
+ );
+ }
+
+ #[test]
+ fn test_write_unsigned_not_enough_space() {
+ let mut buf = [0; 1];
+ let mut writable = &mut buf[..];
+ match write::unsigned(&mut writable, 128) {
+ Err(e) => assert_eq!(e.kind(), std::io::ErrorKind::WriteZero),
+ otherwise => panic!("Unexpected: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_write_signed_not_enough_space() {
+ let mut buf = [0; 1];
+ let mut writable = &mut buf[..];
+ match write::signed(&mut writable, 128) {
+ Err(e) => assert_eq!(e.kind(), std::io::ErrorKind::WriteZero),
+ otherwise => panic!("Unexpected: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn dogfood_signed() {
+ fn inner(i: i64) {
+ let mut buf = [0u8; 1024];
+
+ {
+ let mut writable = &mut buf[..];
+ write::signed(&mut writable, i).expect("Should write signed number");
+ }
+
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ let result = read::signed(&mut readable).expect("Should be able to read it back again");
+ assert_eq!(i, result);
+ }
+ for i in -513..513 {
+ inner(i);
+ }
+ inner(core::i64::MIN);
+ }
+
+ #[test]
+ fn dogfood_unsigned() {
+ for i in 0..1025 {
+ let mut buf = [0u8; 1024];
+
+ {
+ let mut writable = &mut buf[..];
+ write::unsigned(&mut writable, i).expect("Should write signed number");
+ }
+
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ let result =
+ read::unsigned(&mut readable).expect("Should be able to read it back again");
+ assert_eq!(i, result);
+ }
+ }
+
+ #[test]
+ fn test_read_unsigned_overflow() {
+ let buf = [
+ 2u8 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 1,
+ ];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert!(read::unsigned(&mut readable).is_err());
+ }
+
+ #[test]
+ fn test_read_signed_overflow() {
+ let buf = [
+ 2u8 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 2 | CONTINUATION_BIT,
+ 1,
+ ];
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert!(read::signed(&mut readable).is_err());
+ }
+
+ #[test]
+ fn test_read_multiple() {
+ let buf = [2u8 | CONTINUATION_BIT, 1u8, 1u8];
+
+ let mut readable = EndianSlice::new(&buf[..], NativeEndian);
+ assert_eq!(
+ read::unsigned(&mut readable).expect("Should read first number"),
+ 130u64
+ );
+ assert_eq!(
+ read::unsigned(&mut readable).expect("Should read first number"),
+ 1u64
+ );
+ }
+
+ #[test]
+ fn test_read_u16() {
+ for (buf, val) in [
+ (&[2][..], 2),
+ (&[0x7f][..], 0x7f),
+ (&[0x80, 1][..], 0x80),
+ (&[0x81, 1][..], 0x81),
+ (&[0x82, 1][..], 0x82),
+ (&[0xff, 0x7f][..], 0x3fff),
+ (&[0x80, 0x80, 1][..], 0x4000),
+ (&[0xff, 0xff, 1][..], 0x7fff),
+ (&[0xff, 0xff, 3][..], 0xffff),
+ ]
+ .iter()
+ {
+ let mut readable = EndianSlice::new(buf, NativeEndian);
+ assert_eq!(*val, read::u16(&mut readable).expect("Should read number"));
+ }
+
+ for buf in [
+ &[0x80][..],
+ &[0x80, 0x80][..],
+ &[0x80, 0x80, 4][..],
+ &[0x80, 0x80, 0x80, 3][..],
+ ]
+ .iter()
+ {
+ let mut readable = EndianSlice::new(buf, NativeEndian);
+ assert!(read::u16(&mut readable).is_err(), "{:?}", buf);
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/lib.rs b/vendor/gimli-0.26.2/src/lib.rs
new file mode 100644
index 000000000..ed1af9cbd
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/lib.rs
@@ -0,0 +1,76 @@
+//! `gimli` is a library for reading and writing the
+//! [DWARF debugging format](https://dwarfstd.org/).
+//!
+//! See the [read](./read/index.html) and [write](./write/index.html) modules
+//! for examples and API documentation.
+//!
+//! ## Cargo Features
+//!
+//! Cargo features that can be enabled with `gimli`:
+//!
+//! * `std`: Enabled by default. Use the `std` library. Disabling this feature
+//! allows using `gimli` in embedded environments that do not have access to
+//! `std`. Note that even when `std` is disabled, `gimli` still requires an
+//! implementation of the `alloc` crate.
+//!
+//! * `read`: Enabled by default. Enables the `read` module. Use of `std` is
+//! optional.
+//!
+//! * `write`: Enabled by default. Enables the `write` module. Always uses
+//! the `std` library.
+#![deny(missing_docs)]
+#![deny(missing_debug_implementations)]
+// Selectively enable rust 2018 warnings
+#![warn(bare_trait_objects)]
+#![warn(unused_extern_crates)]
+#![warn(ellipsis_inclusive_range_patterns)]
+//#![warn(elided_lifetimes_in_paths)]
+#![warn(explicit_outlives_requirements)]
+// Allow clippy warnings when we aren't building with clippy.
+#![allow(unknown_lints)]
+// False positives with `fallible_iterator`.
+#![allow(clippy::should_implement_trait)]
+// Many false positives involving `continue`.
+#![allow(clippy::never_loop)]
+// False positives when block expressions are used inside an assertion.
+#![allow(clippy::panic_params)]
+#![no_std]
+
+#[allow(unused_imports)]
+#[cfg(any(feature = "read", feature = "write"))]
+#[macro_use]
+extern crate alloc;
+
+#[cfg(any(feature = "std", feature = "write"))]
+#[macro_use]
+extern crate std;
+
+#[cfg(feature = "stable_deref_trait")]
+pub use stable_deref_trait::{CloneStableDeref, StableDeref};
+
+mod common;
+pub use crate::common::*;
+
+mod arch;
+pub use crate::arch::*;
+
+pub mod constants;
+// For backwards compat.
+pub use crate::constants::*;
+
+mod endianity;
+pub use crate::endianity::{BigEndian, Endianity, LittleEndian, NativeEndian, RunTimeEndian};
+
+pub mod leb128;
+
+#[cfg(feature = "read-core")]
+pub mod read;
+// For backwards compat.
+#[cfg(feature = "read-core")]
+pub use crate::read::*;
+
+#[cfg(feature = "write")]
+pub mod write;
+
+#[cfg(test)]
+mod test_util;
diff --git a/vendor/gimli-0.26.2/src/read/abbrev.rs b/vendor/gimli-0.26.2/src/read/abbrev.rs
new file mode 100644
index 000000000..1a24835a7
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/abbrev.rs
@@ -0,0 +1,996 @@
+//! Functions for parsing DWARF debugging abbreviations.
+
+use alloc::collections::btree_map;
+use alloc::vec::Vec;
+use core::convert::TryFrom;
+use core::fmt::{self, Debug};
+use core::iter::FromIterator;
+use core::ops::Deref;
+
+use crate::common::{DebugAbbrevOffset, Encoding, SectionId};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::{EndianSlice, Error, Reader, Result, Section, UnitHeader};
+
+/// The `DebugAbbrev` struct represents the abbreviations describing
+/// `DebuggingInformationEntry`s' attribute names and forms found in the
+/// `.debug_abbrev` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugAbbrev<R> {
+ debug_abbrev_section: R,
+}
+
+impl<'input, Endian> DebugAbbrev<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugAbbrev` instance from the data in the `.debug_abbrev`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_abbrev` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugAbbrev, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_abbrev_section_somehow = || &buf;
+ /// let debug_abbrev = DebugAbbrev::new(read_debug_abbrev_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_abbrev_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_abbrev_section, endian))
+ }
+}
+
+impl<R: Reader> DebugAbbrev<R> {
+ /// Parse the abbreviations at the given `offset` within this
+ /// `.debug_abbrev` section.
+ ///
+ /// The `offset` should generally be retrieved from a unit header.
+ pub fn abbreviations(
+ &self,
+ debug_abbrev_offset: DebugAbbrevOffset<R::Offset>,
+ ) -> Result<Abbreviations> {
+ let input = &mut self.debug_abbrev_section.clone();
+ input.skip(debug_abbrev_offset.0)?;
+ Abbreviations::parse(input)
+ }
+}
+
+impl<T> DebugAbbrev<T> {
+ /// Create a `DebugAbbrev` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugAbbrev<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugAbbrev<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.debug_abbrev_section).into()
+ }
+}
+
+impl<R> Section<R> for DebugAbbrev<R> {
+ fn id() -> SectionId {
+ SectionId::DebugAbbrev
+ }
+
+ fn reader(&self) -> &R {
+ &self.debug_abbrev_section
+ }
+}
+
+impl<R> From<R> for DebugAbbrev<R> {
+ fn from(debug_abbrev_section: R) -> Self {
+ DebugAbbrev {
+ debug_abbrev_section,
+ }
+ }
+}
+
+/// A set of type abbreviations.
+///
+/// Construct an `Abbreviations` instance with the
+/// [`abbreviations()`](struct.UnitHeader.html#method.abbreviations)
+/// method.
+#[derive(Debug, Default, Clone)]
+pub struct Abbreviations {
+ vec: Vec<Abbreviation>,
+ map: btree_map::BTreeMap<u64, Abbreviation>,
+}
+
+impl Abbreviations {
+ /// Construct a new, empty set of abbreviations.
+ fn empty() -> Abbreviations {
+ Abbreviations {
+ vec: Vec::new(),
+ map: btree_map::BTreeMap::new(),
+ }
+ }
+
+ /// Insert an abbreviation into the set.
+ ///
+ /// Returns `Ok` if it is the first abbreviation in the set with its code,
+ /// `Err` if the code is a duplicate and there already exists an
+ /// abbreviation in the set with the given abbreviation's code.
+ fn insert(&mut self, abbrev: Abbreviation) -> ::core::result::Result<(), ()> {
+ let code_usize = abbrev.code as usize;
+ if code_usize as u64 == abbrev.code {
+ // Optimize for sequential abbreviation codes by storing them
+ // in a Vec, as long as the map doesn't already contain them.
+ // A potential further optimization would be to allow some
+ // holes in the Vec, but there's no need for that yet.
+ if code_usize - 1 < self.vec.len() {
+ return Err(());
+ } else if code_usize - 1 == self.vec.len() {
+ if !self.map.is_empty() && self.map.contains_key(&abbrev.code) {
+ return Err(());
+ } else {
+ self.vec.push(abbrev);
+ return Ok(());
+ }
+ }
+ }
+ match self.map.entry(abbrev.code) {
+ btree_map::Entry::Occupied(_) => Err(()),
+ btree_map::Entry::Vacant(entry) => {
+ entry.insert(abbrev);
+ Ok(())
+ }
+ }
+ }
+
+ /// Get the abbreviation associated with the given code.
+ #[inline]
+ pub fn get(&self, code: u64) -> Option<&Abbreviation> {
+ if let Ok(code) = usize::try_from(code) {
+ let index = code.checked_sub(1)?;
+ if index < self.vec.len() {
+ return Some(&self.vec[index]);
+ }
+ }
+
+ self.map.get(&code)
+ }
+
+ /// Parse a series of abbreviations, terminated by a null abbreviation.
+ fn parse<R: Reader>(input: &mut R) -> Result<Abbreviations> {
+ let mut abbrevs = Abbreviations::empty();
+
+ while let Some(abbrev) = Abbreviation::parse(input)? {
+ if abbrevs.insert(abbrev).is_err() {
+ return Err(Error::DuplicateAbbreviationCode);
+ }
+ }
+
+ Ok(abbrevs)
+ }
+}
+
+/// An abbreviation describes the shape of a `DebuggingInformationEntry`'s type:
+/// its code, tag type, whether it has children, and its set of attributes.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Abbreviation {
+ code: u64,
+ tag: constants::DwTag,
+ has_children: constants::DwChildren,
+ attributes: Attributes,
+}
+
+impl Abbreviation {
+ /// Construct a new `Abbreviation`.
+ ///
+ /// ### Panics
+ ///
+ /// Panics if `code` is `0`.
+ pub(crate) fn new(
+ code: u64,
+ tag: constants::DwTag,
+ has_children: constants::DwChildren,
+ attributes: Attributes,
+ ) -> Abbreviation {
+ assert_ne!(code, 0);
+ Abbreviation {
+ code,
+ tag,
+ has_children,
+ attributes,
+ }
+ }
+
+ /// Get this abbreviation's code.
+ #[inline]
+ pub fn code(&self) -> u64 {
+ self.code
+ }
+
+ /// Get this abbreviation's tag.
+ #[inline]
+ pub fn tag(&self) -> constants::DwTag {
+ self.tag
+ }
+
+ /// Return true if this abbreviation's type has children, false otherwise.
+ #[inline]
+ pub fn has_children(&self) -> bool {
+ self.has_children == constants::DW_CHILDREN_yes
+ }
+
+ /// Get this abbreviation's attributes.
+ #[inline]
+ pub fn attributes(&self) -> &[AttributeSpecification] {
+ &self.attributes[..]
+ }
+
+ /// Parse an abbreviation's tag.
+ fn parse_tag<R: Reader>(input: &mut R) -> Result<constants::DwTag> {
+ let val = input.read_uleb128_u16()?;
+ if val == 0 {
+ Err(Error::AbbreviationTagZero)
+ } else {
+ Ok(constants::DwTag(val))
+ }
+ }
+
+ /// Parse an abbreviation's "does the type have children?" byte.
+ fn parse_has_children<R: Reader>(input: &mut R) -> Result<constants::DwChildren> {
+ let val = input.read_u8()?;
+ let val = constants::DwChildren(val);
+ if val == constants::DW_CHILDREN_no || val == constants::DW_CHILDREN_yes {
+ Ok(val)
+ } else {
+ Err(Error::BadHasChildren)
+ }
+ }
+
+ /// Parse a series of attribute specifications, terminated by a null attribute
+ /// specification.
+ fn parse_attributes<R: Reader>(input: &mut R) -> Result<Attributes> {
+ let mut attrs = Attributes::new();
+
+ while let Some(attr) = AttributeSpecification::parse(input)? {
+ attrs.push(attr);
+ }
+
+ Ok(attrs)
+ }
+
+ /// Parse an abbreviation. Return `None` for the null abbreviation, `Some`
+ /// for an actual abbreviation.
+ fn parse<R: Reader>(input: &mut R) -> Result<Option<Abbreviation>> {
+ let code = input.read_uleb128()?;
+ if code == 0 {
+ return Ok(None);
+ }
+
+ let tag = Self::parse_tag(input)?;
+ let has_children = Self::parse_has_children(input)?;
+ let attributes = Self::parse_attributes(input)?;
+ let abbrev = Abbreviation::new(code, tag, has_children, attributes);
+ Ok(Some(abbrev))
+ }
+}
+
+/// A list of attributes found in an `Abbreviation`
+#[derive(Clone)]
+pub(crate) enum Attributes {
+ Inline {
+ buf: [AttributeSpecification; MAX_ATTRIBUTES_INLINE],
+ len: usize,
+ },
+ Heap(Vec<AttributeSpecification>),
+}
+
+// Length of 5 based on benchmark results for both x86-64 and i686.
+const MAX_ATTRIBUTES_INLINE: usize = 5;
+
+impl Attributes {
+ /// Returns a new empty list of attributes
+ fn new() -> Attributes {
+ let default =
+ AttributeSpecification::new(constants::DW_AT_null, constants::DW_FORM_null, None);
+ Attributes::Inline {
+ buf: [default; 5],
+ len: 0,
+ }
+ }
+
+ /// Pushes a new value onto this list of attributes.
+ fn push(&mut self, attr: AttributeSpecification) {
+ match self {
+ Attributes::Heap(list) => return list.push(attr),
+ Attributes::Inline {
+ buf,
+ len: MAX_ATTRIBUTES_INLINE,
+ } => {
+ let mut list = buf.to_vec();
+ list.push(attr);
+ *self = Attributes::Heap(list);
+ }
+ Attributes::Inline { buf, len } => {
+ buf[*len] = attr;
+ *len += 1;
+ }
+ }
+ }
+}
+
+impl Debug for Attributes {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (&**self).fmt(f)
+ }
+}
+
+impl PartialEq for Attributes {
+ fn eq(&self, other: &Attributes) -> bool {
+ &**self == &**other
+ }
+}
+
+impl Eq for Attributes {}
+
+impl Deref for Attributes {
+ type Target = [AttributeSpecification];
+ fn deref(&self) -> &[AttributeSpecification] {
+ match self {
+ Attributes::Inline { buf, len } => &buf[..*len],
+ Attributes::Heap(list) => list,
+ }
+ }
+}
+
+impl FromIterator<AttributeSpecification> for Attributes {
+ fn from_iter<I>(iter: I) -> Attributes
+ where
+ I: IntoIterator<Item = AttributeSpecification>,
+ {
+ let mut list = Attributes::new();
+ for item in iter {
+ list.push(item);
+ }
+ return list;
+ }
+}
+
+impl From<Vec<AttributeSpecification>> for Attributes {
+ fn from(list: Vec<AttributeSpecification>) -> Attributes {
+ Attributes::Heap(list)
+ }
+}
+
+/// The description of an attribute in an abbreviated type. It is a pair of name
+/// and form.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct AttributeSpecification {
+ name: constants::DwAt,
+ form: constants::DwForm,
+ implicit_const_value: i64,
+}
+
+impl AttributeSpecification {
+ /// Construct a new `AttributeSpecification` from the given name and form
+ /// and implicit const value.
+ #[inline]
+ pub fn new(
+ name: constants::DwAt,
+ form: constants::DwForm,
+ implicit_const_value: Option<i64>,
+ ) -> AttributeSpecification {
+ debug_assert!(
+ (form == constants::DW_FORM_implicit_const && implicit_const_value.is_some())
+ || (form != constants::DW_FORM_implicit_const && implicit_const_value.is_none())
+ );
+ AttributeSpecification {
+ name,
+ form,
+ implicit_const_value: implicit_const_value.unwrap_or(0),
+ }
+ }
+
+ /// Get the attribute's name.
+ #[inline]
+ pub fn name(&self) -> constants::DwAt {
+ self.name
+ }
+
+ /// Get the attribute's form.
+ #[inline]
+ pub fn form(&self) -> constants::DwForm {
+ self.form
+ }
+
+ /// Get the attribute's implicit const value.
+ #[inline]
+ pub fn implicit_const_value(&self) -> Option<i64> {
+ if self.form == constants::DW_FORM_implicit_const {
+ Some(self.implicit_const_value)
+ } else {
+ None
+ }
+ }
+
+ /// Return the size of the attribute, in bytes.
+ ///
+ /// Note that because some attributes are variably sized, the size cannot
+ /// always be known without parsing, in which case we return `None`.
+ pub fn size<R: Reader>(&self, header: &UnitHeader<R>) -> Option<usize> {
+ get_attribute_size(self.form, header.encoding()).map(usize::from)
+ }
+
+ /// Parse an attribute's form.
+ fn parse_form<R: Reader>(input: &mut R) -> Result<constants::DwForm> {
+ let val = input.read_uleb128_u16()?;
+ if val == 0 {
+ Err(Error::AttributeFormZero)
+ } else {
+ Ok(constants::DwForm(val))
+ }
+ }
+
+ /// Parse an attribute specification. Returns `None` for the null attribute
+ /// specification, `Some` for an actual attribute specification.
+ fn parse<R: Reader>(input: &mut R) -> Result<Option<AttributeSpecification>> {
+ let name = input.read_uleb128_u16()?;
+ if name == 0 {
+ // Parse the null attribute specification.
+ let form = input.read_uleb128_u16()?;
+ return if form == 0 {
+ Ok(None)
+ } else {
+ Err(Error::ExpectedZero)
+ };
+ }
+
+ let name = constants::DwAt(name);
+ let form = Self::parse_form(input)?;
+ let implicit_const_value = if form == constants::DW_FORM_implicit_const {
+ Some(input.read_sleb128()?)
+ } else {
+ None
+ };
+ let spec = AttributeSpecification::new(name, form, implicit_const_value);
+ Ok(Some(spec))
+ }
+}
+
+#[inline]
+pub(crate) fn get_attribute_size(form: constants::DwForm, encoding: Encoding) -> Option<u8> {
+ match form {
+ constants::DW_FORM_addr => Some(encoding.address_size),
+
+ constants::DW_FORM_implicit_const |
+ constants::DW_FORM_flag_present => Some(0),
+
+ constants::DW_FORM_data1
+ | constants::DW_FORM_flag
+ | constants::DW_FORM_strx1
+ | constants::DW_FORM_ref1
+ | constants::DW_FORM_addrx1 => Some(1),
+
+ constants::DW_FORM_data2
+ | constants::DW_FORM_ref2
+ | constants::DW_FORM_addrx2
+ | constants::DW_FORM_strx2 => Some(2),
+
+ constants::DW_FORM_addrx3 | constants::DW_FORM_strx3 => Some(3),
+
+ constants::DW_FORM_data4
+ | constants::DW_FORM_ref_sup4
+ | constants::DW_FORM_ref4
+ | constants::DW_FORM_strx4
+ | constants::DW_FORM_addrx4 => Some(4),
+
+ constants::DW_FORM_data8
+ | constants::DW_FORM_ref8
+ | constants::DW_FORM_ref_sig8
+ | constants::DW_FORM_ref_sup8 => Some(8),
+
+ constants::DW_FORM_data16 => Some(16),
+
+ constants::DW_FORM_sec_offset
+ | constants::DW_FORM_GNU_ref_alt
+ | constants::DW_FORM_strp
+ | constants::DW_FORM_strp_sup
+ | constants::DW_FORM_GNU_strp_alt
+ | constants::DW_FORM_line_strp => Some(encoding.format.word_size()),
+
+ constants::DW_FORM_ref_addr => {
+ // This is an offset, but DWARF version 2 specifies that DW_FORM_ref_addr
+ // has the same size as an address on the target system. This was changed
+ // in DWARF version 3.
+ Some(if encoding.version == 2 {
+ encoding.address_size
+ } else {
+ encoding.format.word_size()
+ })
+ }
+
+ // Variably sized forms.
+ constants::DW_FORM_block |
+ constants::DW_FORM_block1 |
+ constants::DW_FORM_block2 |
+ constants::DW_FORM_block4 |
+ constants::DW_FORM_exprloc |
+ constants::DW_FORM_ref_udata |
+ constants::DW_FORM_string |
+ constants::DW_FORM_sdata |
+ constants::DW_FORM_udata |
+ constants::DW_FORM_indirect |
+
+ // We don't know the size of unknown forms.
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+pub mod tests {
+ use super::*;
+ use crate::constants;
+ use crate::endianity::LittleEndian;
+ use crate::read::{EndianSlice, Error};
+ use crate::test_util::GimliSectionMethods;
+ #[cfg(target_pointer_width = "32")]
+ use core::u32;
+ use test_assembler::Section;
+
+ pub trait AbbrevSectionMethods {
+ fn abbrev(self, code: u64, tag: constants::DwTag, children: constants::DwChildren) -> Self;
+ fn abbrev_null(self) -> Self;
+ fn abbrev_attr(self, name: constants::DwAt, form: constants::DwForm) -> Self;
+ fn abbrev_attr_implicit_const(self, name: constants::DwAt, value: i64) -> Self;
+ fn abbrev_attr_null(self) -> Self;
+ }
+
+ impl AbbrevSectionMethods for Section {
+ fn abbrev(self, code: u64, tag: constants::DwTag, children: constants::DwChildren) -> Self {
+ self.uleb(code).uleb(tag.0.into()).D8(children.0)
+ }
+
+ fn abbrev_null(self) -> Self {
+ self.D8(0)
+ }
+
+ fn abbrev_attr(self, name: constants::DwAt, form: constants::DwForm) -> Self {
+ self.uleb(name.0.into()).uleb(form.0.into())
+ }
+
+ fn abbrev_attr_implicit_const(self, name: constants::DwAt, value: i64) -> Self {
+ self.uleb(name.0.into())
+ .uleb(constants::DW_FORM_implicit_const.0.into())
+ .sleb(value)
+ }
+
+ fn abbrev_attr_null(self) -> Self {
+ self.D8(0).D8(0)
+ }
+ }
+
+ #[test]
+ fn test_debug_abbrev_ok() {
+ let extra_start = [1, 2, 3, 4];
+ let expected_rest = [5, 6, 7, 8];
+ #[rustfmt::skip]
+ let buf = Section::new()
+ .append_bytes(&extra_start)
+ .abbrev(2, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev(1, constants::DW_TAG_compile_unit, constants::DW_CHILDREN_yes)
+ .abbrev_attr(constants::DW_AT_producer, constants::DW_FORM_strp)
+ .abbrev_attr(constants::DW_AT_language, constants::DW_FORM_data2)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+
+ let abbrev1 = Abbreviation::new(
+ 1,
+ constants::DW_TAG_compile_unit,
+ constants::DW_CHILDREN_yes,
+ vec![
+ AttributeSpecification::new(
+ constants::DW_AT_producer,
+ constants::DW_FORM_strp,
+ None,
+ ),
+ AttributeSpecification::new(
+ constants::DW_AT_language,
+ constants::DW_FORM_data2,
+ None,
+ ),
+ ]
+ .into(),
+ );
+
+ let abbrev2 = Abbreviation::new(
+ 2,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_no,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_string,
+ None,
+ )]
+ .into(),
+ );
+
+ let debug_abbrev = DebugAbbrev::new(&buf, LittleEndian);
+ let debug_abbrev_offset = DebugAbbrevOffset(extra_start.len());
+ let abbrevs = debug_abbrev
+ .abbreviations(debug_abbrev_offset)
+ .expect("Should parse abbreviations");
+ assert_eq!(abbrevs.get(1), Some(&abbrev1));
+ assert_eq!(abbrevs.get(2), Some(&abbrev2));
+ }
+
+ #[test]
+ fn test_abbreviations_insert() {
+ fn abbrev(code: u16) -> Abbreviation {
+ Abbreviation::new(
+ code.into(),
+ constants::DwTag(code),
+ constants::DW_CHILDREN_no,
+ vec![].into(),
+ )
+ }
+
+ fn assert_abbrev(abbrevs: &Abbreviations, code: u16) {
+ let abbrev = abbrevs.get(code.into()).unwrap();
+ assert_eq!(abbrev.tag(), constants::DwTag(code));
+ }
+
+ // Sequential insert.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(1)).unwrap();
+ abbrevs.insert(abbrev(2)).unwrap();
+ assert_eq!(abbrevs.vec.len(), 2);
+ assert!(abbrevs.map.is_empty());
+ assert_abbrev(&abbrevs, 1);
+ assert_abbrev(&abbrevs, 2);
+
+ // Out of order insert.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(2)).unwrap();
+ abbrevs.insert(abbrev(3)).unwrap();
+ assert!(abbrevs.vec.is_empty());
+ assert_abbrev(&abbrevs, 2);
+ assert_abbrev(&abbrevs, 3);
+
+ // Mixed order insert.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(1)).unwrap();
+ abbrevs.insert(abbrev(3)).unwrap();
+ abbrevs.insert(abbrev(2)).unwrap();
+ assert_eq!(abbrevs.vec.len(), 2);
+ assert_abbrev(&abbrevs, 1);
+ assert_abbrev(&abbrevs, 2);
+ assert_abbrev(&abbrevs, 3);
+
+ // Duplicate code in vec.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(1)).unwrap();
+ abbrevs.insert(abbrev(2)).unwrap();
+ assert_eq!(abbrevs.insert(abbrev(1)), Err(()));
+ assert_eq!(abbrevs.insert(abbrev(2)), Err(()));
+
+ // Duplicate code in map when adding to map.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(2)).unwrap();
+ assert_eq!(abbrevs.insert(abbrev(2)), Err(()));
+
+ // Duplicate code in map when adding to vec.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(2)).unwrap();
+ abbrevs.insert(abbrev(1)).unwrap();
+ assert_eq!(abbrevs.insert(abbrev(2)), Err(()));
+
+ // 32-bit usize conversions.
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(2)).unwrap();
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "32")]
+ fn test_abbreviations_insert_32() {
+ fn abbrev(code: u64) -> Abbreviation {
+ Abbreviation::new(
+ code,
+ constants::DwTag(code as u16),
+ constants::DW_CHILDREN_no,
+ vec![].into(),
+ )
+ }
+
+ fn assert_abbrev(abbrevs: &Abbreviations, code: u64) {
+ let abbrev = abbrevs.get(code).unwrap();
+ assert_eq!(abbrev.tag(), constants::DwTag(code as u16));
+ }
+
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs.insert(abbrev(1)).unwrap();
+
+ let wrap_code = (u32::MAX as u64 + 1) + 1;
+ // `get` should not treat the wrapped code as `1`.
+ assert_eq!(abbrevs.get(wrap_code), None);
+ // `insert` should not treat the wrapped code as `1`.
+ abbrevs.insert(abbrev(wrap_code)).unwrap();
+ assert_abbrev(&abbrevs, 1);
+ assert_abbrev(&abbrevs, wrap_code);
+ }
+
+ #[test]
+ fn test_parse_abbreviations_ok() {
+ let expected_rest = [1, 2, 3, 4];
+ #[rustfmt::skip]
+ let buf = Section::new()
+ .abbrev(2, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev(1, constants::DW_TAG_compile_unit, constants::DW_CHILDREN_yes)
+ .abbrev_attr(constants::DW_AT_producer, constants::DW_FORM_strp)
+ .abbrev_attr(constants::DW_AT_language, constants::DW_FORM_data2)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+ let rest = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ let abbrev1 = Abbreviation::new(
+ 1,
+ constants::DW_TAG_compile_unit,
+ constants::DW_CHILDREN_yes,
+ vec![
+ AttributeSpecification::new(
+ constants::DW_AT_producer,
+ constants::DW_FORM_strp,
+ None,
+ ),
+ AttributeSpecification::new(
+ constants::DW_AT_language,
+ constants::DW_FORM_data2,
+ None,
+ ),
+ ]
+ .into(),
+ );
+
+ let abbrev2 = Abbreviation::new(
+ 2,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_no,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_string,
+ None,
+ )]
+ .into(),
+ );
+
+ let abbrevs = Abbreviations::parse(rest).expect("Should parse abbreviations");
+ assert_eq!(abbrevs.get(1), Some(&abbrev1));
+ assert_eq!(abbrevs.get(2), Some(&abbrev2));
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_abbreviations_duplicate() {
+ let expected_rest = [1, 2, 3, 4];
+ #[rustfmt::skip]
+ let buf = Section::new()
+ .abbrev(1, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev(1, constants::DW_TAG_compile_unit, constants::DW_CHILDREN_yes)
+ .abbrev_attr(constants::DW_AT_producer, constants::DW_FORM_strp)
+ .abbrev_attr(constants::DW_AT_language, constants::DW_FORM_data2)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+ let buf = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ match Abbreviations::parse(buf) {
+ Err(Error::DuplicateAbbreviationCode) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_abbreviation_tag_ok() {
+ let buf = [0x01, 0x02];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let tag = Abbreviation::parse_tag(rest).expect("Should parse tag");
+ assert_eq!(tag, constants::DW_TAG_array_type);
+ assert_eq!(*rest, EndianSlice::new(&buf[1..], LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_abbreviation_tag_zero() {
+ let buf = [0x00];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+ match Abbreviation::parse_tag(buf) {
+ Err(Error::AbbreviationTagZero) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_abbreviation_has_children() {
+ let buf = [0x00, 0x01, 0x02];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let val = Abbreviation::parse_has_children(rest).expect("Should parse children");
+ assert_eq!(val, constants::DW_CHILDREN_no);
+ let val = Abbreviation::parse_has_children(rest).expect("Should parse children");
+ assert_eq!(val, constants::DW_CHILDREN_yes);
+ match Abbreviation::parse_has_children(rest) {
+ Err(Error::BadHasChildren) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_abbreviation_ok() {
+ let expected_rest = [0x01, 0x02, 0x03, 0x04];
+ let buf = Section::new()
+ .abbrev(1, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_string)
+ .abbrev_attr_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+ let rest = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ let expect = Some(Abbreviation::new(
+ 1,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_no,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_string,
+ None,
+ )]
+ .into(),
+ ));
+
+ let abbrev = Abbreviation::parse(rest).expect("Should parse abbreviation");
+ assert_eq!(abbrev, expect);
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_abbreviation_implicit_const_ok() {
+ let expected_rest = [0x01, 0x02, 0x03, 0x04];
+ let buf = Section::new()
+ .abbrev(1, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr_implicit_const(constants::DW_AT_name, -42)
+ .abbrev_attr_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+ let rest = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ let expect = Some(Abbreviation::new(
+ 1,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_no,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_implicit_const,
+ Some(-42),
+ )]
+ .into(),
+ ));
+
+ let abbrev = Abbreviation::parse(rest).expect("Should parse abbreviation");
+ assert_eq!(abbrev, expect);
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_abbreviation_implicit_const_no_const() {
+ let buf = Section::new()
+ .abbrev(1, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_implicit_const)
+ .get_contents()
+ .unwrap();
+ let buf = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ match Abbreviation::parse(buf) {
+ Err(Error::UnexpectedEof(_)) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_parse_null_abbreviation_ok() {
+ let expected_rest = [0x01, 0x02, 0x03, 0x04];
+ let buf = Section::new()
+ .abbrev_null()
+ .append_bytes(&expected_rest)
+ .get_contents()
+ .unwrap();
+ let rest = &mut EndianSlice::new(&*buf, LittleEndian);
+
+ let abbrev = Abbreviation::parse(rest).expect("Should parse null abbreviation");
+ assert!(abbrev.is_none());
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_attribute_form_ok() {
+ let buf = [0x01, 0x02];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let tag = AttributeSpecification::parse_form(rest).expect("Should parse form");
+ assert_eq!(tag, constants::DW_FORM_addr);
+ assert_eq!(*rest, EndianSlice::new(&buf[1..], LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_attribute_form_zero() {
+ let buf = [0x00];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+ match AttributeSpecification::parse_form(buf) {
+ Err(Error::AttributeFormZero) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_null_attribute_specification_ok() {
+ let buf = [0x00, 0x00, 0x01];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let attr =
+ AttributeSpecification::parse(rest).expect("Should parse null attribute specification");
+ assert!(attr.is_none());
+ assert_eq!(*rest, EndianSlice::new(&buf[2..], LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_attribute_specifications_name_zero() {
+ let buf = [0x00, 0x01, 0x00, 0x00];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+ match AttributeSpecification::parse(buf) {
+ Err(Error::ExpectedZero) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_attribute_specifications_form_zero() {
+ let buf = [0x01, 0x00, 0x00, 0x00];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+ match AttributeSpecification::parse(buf) {
+ Err(Error::AttributeFormZero) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_get_abbrev_zero() {
+ let mut abbrevs = Abbreviations::empty();
+ abbrevs
+ .insert(Abbreviation::new(
+ 1,
+ constants::DwTag(1),
+ constants::DW_CHILDREN_no,
+ vec![].into(),
+ ))
+ .unwrap();
+ assert!(abbrevs.get(0).is_none());
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/addr.rs b/vendor/gimli-0.26.2/src/read/addr.rs
new file mode 100644
index 000000000..593f9fe3c
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/addr.rs
@@ -0,0 +1,128 @@
+use crate::common::{DebugAddrBase, DebugAddrIndex, SectionId};
+use crate::read::{Reader, ReaderOffset, Result, Section};
+
+/// The raw contents of the `.debug_addr` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugAddr<R> {
+ section: R,
+}
+
+impl<R: Reader> DebugAddr<R> {
+ // TODO: add an iterator over the sets of addresses in the section.
+ // This is not needed for common usage of the section though.
+
+ /// Returns the address at the given `base` and `index`.
+ ///
+ /// A set of addresses in the `.debug_addr` section consists of a header
+ /// followed by a series of addresses.
+ ///
+ /// The `base` must be the `DW_AT_addr_base` value from the compilation unit DIE.
+ /// This is an offset that points to the first address following the header.
+ ///
+ /// The `index` is the value of a `DW_FORM_addrx` attribute.
+ ///
+ /// The `address_size` must be the size of the address for the compilation unit.
+ /// This value must also match the header. However, note that we do not parse the
+ /// header to validate this, since locating the header is unreliable, and the GNU
+ /// extensions do not emit it.
+ pub fn get_address(
+ &self,
+ address_size: u8,
+ base: DebugAddrBase<R::Offset>,
+ index: DebugAddrIndex<R::Offset>,
+ ) -> Result<u64> {
+ let input = &mut self.section.clone();
+ input.skip(base.0)?;
+ input.skip(R::Offset::from_u64(
+ index.0.into_u64() * u64::from(address_size),
+ )?)?;
+ input.read_address(address_size)
+ }
+}
+
+impl<T> DebugAddr<T> {
+ /// Create a `DebugAddr` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugAddr<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugAddr<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.section).into()
+ }
+}
+
+impl<R> Section<R> for DebugAddr<R> {
+ fn id() -> SectionId {
+ SectionId::DebugAddr
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugAddr<R> {
+ fn from(section: R) -> Self {
+ DebugAddr { section }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::read::EndianSlice;
+ use crate::test_util::GimliSectionMethods;
+ use crate::{Format, LittleEndian};
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ #[test]
+ fn test_get_address() {
+ for format in vec![Format::Dwarf32, Format::Dwarf64] {
+ for address_size in vec![4, 8] {
+ let zero = Label::new();
+ let length = Label::new();
+ let start = Label::new();
+ let first = Label::new();
+ let end = Label::new();
+ let mut section = Section::with_endian(Endian::Little)
+ .mark(&zero)
+ .initial_length(format, &length, &start)
+ .D16(5)
+ .D8(address_size)
+ .D8(0)
+ .mark(&first);
+ for i in 0..20 {
+ section = section.word(address_size, 1000 + i);
+ }
+ section = section.mark(&end);
+ length.set_const((&end - &start) as u64);
+
+ let section = section.get_contents().unwrap();
+ let debug_addr = DebugAddr::from(EndianSlice::new(&section, LittleEndian));
+ let base = DebugAddrBase((&first - &zero) as usize);
+
+ assert_eq!(
+ debug_addr.get_address(address_size, base, DebugAddrIndex(0)),
+ Ok(1000)
+ );
+ assert_eq!(
+ debug_addr.get_address(address_size, base, DebugAddrIndex(19)),
+ Ok(1019)
+ );
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/aranges.rs b/vendor/gimli-0.26.2/src/read/aranges.rs
new file mode 100644
index 000000000..83159b69b
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/aranges.rs
@@ -0,0 +1,660 @@
+use crate::common::{DebugArangesOffset, DebugInfoOffset, Encoding, SectionId};
+use crate::endianity::Endianity;
+use crate::read::{EndianSlice, Error, Range, Reader, ReaderOffset, Result, Section};
+
+/// The `DebugAranges` struct represents the DWARF address range information
+/// found in the `.debug_aranges` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugAranges<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugAranges<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugAranges` instance from the data in the `.debug_aranges`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_aranges` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugAranges, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_aranges_section = || &buf;
+ /// let debug_aranges =
+ /// DebugAranges::new(read_debug_aranges_section(), LittleEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ DebugAranges {
+ section: EndianSlice::new(section, endian),
+ }
+ }
+}
+
+impl<R: Reader> DebugAranges<R> {
+ /// Iterate the sets of entries in the `.debug_aranges` section.
+ ///
+ /// Each set of entries belongs to a single unit.
+ pub fn headers(&self) -> ArangeHeaderIter<R> {
+ ArangeHeaderIter {
+ input: self.section.clone(),
+ offset: DebugArangesOffset(R::Offset::from_u8(0)),
+ }
+ }
+
+ /// Get the header at the given offset.
+ pub fn header(&self, offset: DebugArangesOffset<R::Offset>) -> Result<ArangeHeader<R>> {
+ let mut input = self.section.clone();
+ input.skip(offset.0)?;
+ ArangeHeader::parse(&mut input, offset)
+ }
+}
+
+impl<T> DebugAranges<T> {
+ /// Create a `DebugAranges` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugAranges<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugAranges<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.section).into()
+ }
+}
+
+impl<R> Section<R> for DebugAranges<R> {
+ fn id() -> SectionId {
+ SectionId::DebugAranges
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugAranges<R> {
+ fn from(section: R) -> Self {
+ DebugAranges { section }
+ }
+}
+
+/// An iterator over the headers of a `.debug_aranges` section.
+#[derive(Clone, Debug)]
+pub struct ArangeHeaderIter<R: Reader> {
+ input: R,
+ offset: DebugArangesOffset<R::Offset>,
+}
+
+impl<R: Reader> ArangeHeaderIter<R> {
+ /// Advance the iterator to the next header.
+ pub fn next(&mut self) -> Result<Option<ArangeHeader<R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ let len = self.input.len();
+ match ArangeHeader::parse(&mut self.input, self.offset) {
+ Ok(header) => {
+ self.offset.0 += len - self.input.len();
+ Ok(Some(header))
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for ArangeHeaderIter<R> {
+ type Item = ArangeHeader<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ ArangeHeaderIter::next(self)
+ }
+}
+
+/// A header for a set of entries in the `.debug_arange` section.
+///
+/// These entries all belong to a single unit.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ArangeHeader<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ offset: DebugArangesOffset<Offset>,
+ encoding: Encoding,
+ length: Offset,
+ debug_info_offset: DebugInfoOffset<Offset>,
+ segment_size: u8,
+ entries: R,
+}
+
+impl<R, Offset> ArangeHeader<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ fn parse(input: &mut R, offset: DebugArangesOffset<Offset>) -> Result<Self> {
+ let (length, format) = input.read_initial_length()?;
+ let mut rest = input.split(length)?;
+
+ // Check the version. The DWARF 5 spec says that this is always 2, but version 3
+ // has been observed in the wild, potentially due to a bug; see
+ // https://github.com/gimli-rs/gimli/issues/559 for more information.
+ // lldb allows versions 2 through 5, possibly by mistake.
+ let version = rest.read_u16()?;
+ if version != 2 && version != 3 {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+
+ let debug_info_offset = rest.read_offset(format).map(DebugInfoOffset)?;
+ let address_size = rest.read_u8()?;
+ let segment_size = rest.read_u8()?;
+
+ // unit_length + version + offset + address_size + segment_size
+ let header_length = format.initial_length_size() + 2 + format.word_size() + 1 + 1;
+
+ // The first tuple following the header in each set begins at an offset that is
+ // a multiple of the size of a single tuple (that is, the size of a segment selector
+ // plus twice the size of an address).
+ let tuple_length = address_size
+ .checked_mul(2)
+ .and_then(|x| x.checked_add(segment_size))
+ .ok_or(Error::InvalidAddressRange)?;
+ if tuple_length == 0 {
+ return Err(Error::InvalidAddressRange)?;
+ }
+ let padding = if header_length % tuple_length == 0 {
+ 0
+ } else {
+ tuple_length - header_length % tuple_length
+ };
+ rest.skip(R::Offset::from_u8(padding))?;
+
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ // TODO: segment_size
+ };
+ Ok(ArangeHeader {
+ offset,
+ encoding,
+ length,
+ debug_info_offset,
+ segment_size,
+ entries: rest,
+ })
+ }
+
+ /// Return the offset of this header within the `.debug_aranges` section.
+ #[inline]
+ pub fn offset(&self) -> DebugArangesOffset<Offset> {
+ self.offset
+ }
+
+ /// Return the length of this set of entries, including the header.
+ #[inline]
+ pub fn length(&self) -> Offset {
+ self.length
+ }
+
+ /// Return the encoding parameters for this set of entries.
+ #[inline]
+ pub fn encoding(&self) -> Encoding {
+ self.encoding
+ }
+
+ /// Return the segment size for this set of entries.
+ #[inline]
+ pub fn segment_size(&self) -> u8 {
+ self.segment_size
+ }
+
+ /// Return the offset into the .debug_info section for this set of arange entries.
+ #[inline]
+ pub fn debug_info_offset(&self) -> DebugInfoOffset<Offset> {
+ self.debug_info_offset
+ }
+
+ /// Return the arange entries in this set.
+ #[inline]
+ pub fn entries(&self) -> ArangeEntryIter<R> {
+ ArangeEntryIter {
+ input: self.entries.clone(),
+ encoding: self.encoding,
+ segment_size: self.segment_size,
+ }
+ }
+}
+
+/// An iterator over the aranges from a `.debug_aranges` section.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+#[derive(Debug, Clone)]
+pub struct ArangeEntryIter<R: Reader> {
+ input: R,
+ encoding: Encoding,
+ segment_size: u8,
+}
+
+impl<R: Reader> ArangeEntryIter<R> {
+ /// Advance the iterator and return the next arange.
+ ///
+ /// Returns the newly parsed arange as `Ok(Some(arange))`. Returns `Ok(None)`
+ /// when iteration is complete and all aranges have already been parsed and
+ /// yielded. If an error occurs while parsing the next arange, then this error
+ /// is returned as `Err(e)`, and all subsequent calls return `Ok(None)`.
+ pub fn next(&mut self) -> Result<Option<ArangeEntry>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match ArangeEntry::parse(&mut self.input, self.encoding, self.segment_size) {
+ Ok(Some(entry)) => Ok(Some(entry)),
+ Ok(None) => {
+ self.input.empty();
+ Ok(None)
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for ArangeEntryIter<R> {
+ type Item = ArangeEntry;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ ArangeEntryIter::next(self)
+ }
+}
+
+/// A single parsed arange.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub struct ArangeEntry {
+ segment: Option<u64>,
+ address: u64,
+ length: u64,
+}
+
+impl ArangeEntry {
+ /// Parse a single arange. Return `None` for the null arange, `Some` for an actual arange.
+ fn parse<R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ segment_size: u8,
+ ) -> Result<Option<Self>> {
+ let address_size = encoding.address_size;
+
+ let tuple_length = R::Offset::from_u8(2 * address_size + segment_size);
+ if tuple_length > input.len() {
+ input.empty();
+ return Ok(None);
+ }
+
+ let segment = if segment_size != 0 {
+ input.read_address(segment_size)?
+ } else {
+ 0
+ };
+ let address = input.read_address(address_size)?;
+ let length = input.read_address(address_size)?;
+
+ match (segment, address, length) {
+ // This is meant to be a null terminator, but in practice it can occur
+ // before the end, possibly due to a linker omitting a function and
+ // leaving an unrelocated entry.
+ (0, 0, 0) => Self::parse(input, encoding, segment_size),
+ _ => Ok(Some(ArangeEntry {
+ segment: if segment_size != 0 {
+ Some(segment)
+ } else {
+ None
+ },
+ address,
+ length,
+ })),
+ }
+ }
+
+ /// Return the segment selector of this arange.
+ #[inline]
+ pub fn segment(&self) -> Option<u64> {
+ self.segment
+ }
+
+ /// Return the beginning address of this arange.
+ #[inline]
+ pub fn address(&self) -> u64 {
+ self.address
+ }
+
+ /// Return the length of this arange.
+ #[inline]
+ pub fn length(&self) -> u64 {
+ self.length
+ }
+
+ /// Return the range.
+ #[inline]
+ pub fn range(&self) -> Range {
+ Range {
+ begin: self.address,
+ end: self.address.wrapping_add(self.length),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::common::{DebugInfoOffset, Format};
+ use crate::endianity::LittleEndian;
+ use crate::read::EndianSlice;
+
+ #[test]
+ fn test_iterate_headers() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 28.
+ 0x1c, 0x00, 0x00, 0x00,
+ // Version.
+ 0x02, 0x00,
+ // Offset.
+ 0x01, 0x02, 0x03, 0x04,
+ // Address size.
+ 0x04,
+ // Segment size.
+ 0x00,
+ // Dummy padding and arange tuples.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+
+ // 32-bit length = 36.
+ 0x24, 0x00, 0x00, 0x00,
+ // Version.
+ 0x02, 0x00,
+ // Offset.
+ 0x11, 0x12, 0x13, 0x14,
+ // Address size.
+ 0x04,
+ // Segment size.
+ 0x00,
+ // Dummy padding and arange tuples.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let debug_aranges = DebugAranges::new(&buf, LittleEndian);
+ let mut headers = debug_aranges.headers();
+
+ let header = headers
+ .next()
+ .expect("should parse header ok")
+ .expect("should have a header");
+ assert_eq!(header.offset(), DebugArangesOffset(0));
+ assert_eq!(header.debug_info_offset(), DebugInfoOffset(0x0403_0201));
+
+ let header = headers
+ .next()
+ .expect("should parse header ok")
+ .expect("should have a header");
+ assert_eq!(header.offset(), DebugArangesOffset(0x20));
+ assert_eq!(header.debug_info_offset(), DebugInfoOffset(0x1413_1211));
+ }
+
+ #[test]
+ fn test_parse_header_ok() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 32.
+ 0x20, 0x00, 0x00, 0x00,
+ // Version.
+ 0x02, 0x00,
+ // Offset.
+ 0x01, 0x02, 0x03, 0x04,
+ // Address size.
+ 0x08,
+ // Segment size.
+ 0x04,
+ // Length to here = 12, tuple length = 20.
+ // Padding to tuple length multiple = 4.
+ 0x10, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy arange tuple data.
+ 0x20, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next arange.
+ 0x30, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ let header =
+ ArangeHeader::parse(rest, DebugArangesOffset(0x10)).expect("should parse header ok");
+
+ assert_eq!(
+ *rest,
+ EndianSlice::new(&buf[buf.len() - 16..], LittleEndian)
+ );
+ assert_eq!(
+ header,
+ ArangeHeader {
+ offset: DebugArangesOffset(0x10),
+ encoding: Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 8,
+ },
+ length: 0x20,
+ debug_info_offset: DebugInfoOffset(0x0403_0201),
+ segment_size: 4,
+ entries: EndianSlice::new(&buf[buf.len() - 32..buf.len() - 16], LittleEndian),
+ }
+ );
+ }
+
+ #[test]
+ fn test_parse_header_overflow_error() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 32.
+ 0x20, 0x00, 0x00, 0x00,
+ // Version.
+ 0x02, 0x00,
+ // Offset.
+ 0x01, 0x02, 0x03, 0x04,
+ // Address size.
+ 0xff,
+ // Segment size.
+ 0xff,
+ // Length to here = 12, tuple length = 20.
+ // Padding to tuple length multiple = 4.
+ 0x10, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy arange tuple data.
+ 0x20, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next arange.
+ 0x30, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ let error = ArangeHeader::parse(rest, DebugArangesOffset(0x10))
+ .expect_err("should fail to parse header");
+ assert_eq!(error, Error::InvalidAddressRange);
+ }
+
+ #[test]
+ fn test_parse_header_div_by_zero_error() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 32.
+ 0x20, 0x00, 0x00, 0x00,
+ // Version.
+ 0x02, 0x00,
+ // Offset.
+ 0x01, 0x02, 0x03, 0x04,
+ // Address size = 0. Could cause a division by zero if we aren't
+ // careful.
+ 0x00,
+ // Segment size.
+ 0x00,
+ // Length to here = 12, tuple length = 20.
+ // Padding to tuple length multiple = 4.
+ 0x10, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy arange tuple data.
+ 0x20, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next arange.
+ 0x30, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ let error = ArangeHeader::parse(rest, DebugArangesOffset(0x10))
+ .expect_err("should fail to parse header");
+ assert_eq!(error, Error::InvalidAddressRange);
+ }
+
+ #[test]
+ fn test_parse_entry_ok() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 4,
+ };
+ let segment_size = 0;
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let entry =
+ ArangeEntry::parse(rest, encoding, segment_size).expect("should parse entry ok");
+ assert_eq!(*rest, EndianSlice::new(&buf[buf.len() - 1..], LittleEndian));
+ assert_eq!(
+ entry,
+ Some(ArangeEntry {
+ segment: None,
+ address: 0x0403_0201,
+ length: 0x0807_0605,
+ })
+ );
+ }
+
+ #[test]
+ fn test_parse_entry_segment() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 4,
+ };
+ let segment_size = 8;
+ #[rustfmt::skip]
+ let buf = [
+ // Segment.
+ 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
+ // Address.
+ 0x01, 0x02, 0x03, 0x04,
+ // Length.
+ 0x05, 0x06, 0x07, 0x08,
+ // Next tuple.
+ 0x09
+ ];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let entry =
+ ArangeEntry::parse(rest, encoding, segment_size).expect("should parse entry ok");
+ assert_eq!(*rest, EndianSlice::new(&buf[buf.len() - 1..], LittleEndian));
+ assert_eq!(
+ entry,
+ Some(ArangeEntry {
+ segment: Some(0x1817_1615_1413_1211),
+ address: 0x0403_0201,
+ length: 0x0807_0605,
+ })
+ );
+ }
+
+ #[test]
+ fn test_parse_entry_zero() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 4,
+ };
+ let segment_size = 0;
+ #[rustfmt::skip]
+ let buf = [
+ // Zero tuple.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ // Address.
+ 0x01, 0x02, 0x03, 0x04,
+ // Length.
+ 0x05, 0x06, 0x07, 0x08,
+ // Next tuple.
+ 0x09
+ ];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let entry =
+ ArangeEntry::parse(rest, encoding, segment_size).expect("should parse entry ok");
+ assert_eq!(*rest, EndianSlice::new(&buf[buf.len() - 1..], LittleEndian));
+ assert_eq!(
+ entry,
+ Some(ArangeEntry {
+ segment: None,
+ address: 0x0403_0201,
+ length: 0x0807_0605,
+ })
+ );
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/cfi.rs b/vendor/gimli-0.26.2/src/read/cfi.rs
new file mode 100644
index 000000000..2e5167349
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/cfi.rs
@@ -0,0 +1,7585 @@
+#[cfg(feature = "read")]
+use alloc::vec::Vec;
+
+use core::cmp::{Ord, Ordering};
+use core::fmt::{self, Debug};
+use core::iter::FromIterator;
+use core::mem;
+use core::num::Wrapping;
+
+use super::util::{ArrayLike, ArrayVec};
+use crate::common::{DebugFrameOffset, EhFrameOffset, Encoding, Format, Register, SectionId};
+use crate::constants::{self, DwEhPe};
+use crate::endianity::Endianity;
+use crate::read::{
+ EndianSlice, Error, Expression, Reader, ReaderOffset, Result, Section, StoreOnHeap,
+};
+
+/// `DebugFrame` contains the `.debug_frame` section's frame unwinding
+/// information required to unwind to and recover registers from older frames on
+/// the stack. For example, this is useful for a debugger that wants to print
+/// locals in a backtrace.
+///
+/// Most interesting methods are defined in the
+/// [`UnwindSection`](trait.UnwindSection.html) trait.
+///
+/// ### Differences between `.debug_frame` and `.eh_frame`
+///
+/// While the `.debug_frame` section's information has a lot of overlap with the
+/// `.eh_frame` section's information, the `.eh_frame` information tends to only
+/// encode the subset of information needed for exception handling. Often, only
+/// one of `.eh_frame` or `.debug_frame` will be present in an object file.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct DebugFrame<R: Reader> {
+ section: R,
+ address_size: u8,
+ segment_size: u8,
+}
+
+impl<R: Reader> DebugFrame<R> {
+ /// Set the size of a target address in bytes.
+ ///
+ /// This defaults to the native word size.
+ /// This is only used if the CIE version is less than 4.
+ pub fn set_address_size(&mut self, address_size: u8) {
+ self.address_size = address_size
+ }
+
+ /// Set the size of a segment selector in bytes.
+ ///
+ /// This defaults to 0.
+ /// This is only used if the CIE version is less than 4.
+ pub fn set_segment_size(&mut self, segment_size: u8) {
+ self.segment_size = segment_size
+ }
+}
+
+impl<'input, Endian> DebugFrame<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugFrame` instance from the data in the
+ /// `.debug_frame` section.
+ ///
+ /// It is the caller's responsibility to read the section and present it as
+ /// a `&[u8]` slice. That means using some ELF loader on Linux, a Mach-O
+ /// loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugFrame, NativeEndian};
+ ///
+ /// // Use with `.debug_frame`
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_frame_section_somehow = || &buf;
+ /// let debug_frame = DebugFrame::new(read_debug_frame_section_somehow(), NativeEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R: Reader> Section<R> for DebugFrame<R> {
+ fn id() -> SectionId {
+ SectionId::DebugFrame
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R: Reader> From<R> for DebugFrame<R> {
+ fn from(section: R) -> Self {
+ // Default to no segments and native word size.
+ DebugFrame {
+ section,
+ address_size: mem::size_of::<usize>() as u8,
+ segment_size: 0,
+ }
+ }
+}
+
+/// `EhFrameHdr` contains the information about the `.eh_frame_hdr` section.
+///
+/// A pointer to the start of the `.eh_frame` data, and optionally, a binary
+/// search table of pointers to the `.eh_frame` records that are found in this section.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct EhFrameHdr<R: Reader>(R);
+
+/// `ParsedEhFrameHdr` contains the parsed information from the `.eh_frame_hdr` section.
+#[derive(Clone, Debug)]
+pub struct ParsedEhFrameHdr<R: Reader> {
+ address_size: u8,
+ section: R,
+
+ eh_frame_ptr: Pointer,
+ fde_count: u64,
+ table_enc: DwEhPe,
+ table: R,
+}
+
+impl<'input, Endian> EhFrameHdr<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Constructs a new `EhFrameHdr` instance from the data in the `.eh_frame_hdr` section.
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R: Reader> EhFrameHdr<R> {
+ /// Parses this `EhFrameHdr` to a `ParsedEhFrameHdr`.
+ pub fn parse(&self, bases: &BaseAddresses, address_size: u8) -> Result<ParsedEhFrameHdr<R>> {
+ let mut reader = self.0.clone();
+ let version = reader.read_u8()?;
+ if version != 1 {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+
+ let eh_frame_ptr_enc = parse_pointer_encoding(&mut reader)?;
+ let fde_count_enc = parse_pointer_encoding(&mut reader)?;
+ let table_enc = parse_pointer_encoding(&mut reader)?;
+
+ let parameters = PointerEncodingParameters {
+ bases: &bases.eh_frame_hdr,
+ func_base: None,
+ address_size,
+ section: &self.0,
+ };
+
+ // Omitting this pointer is not valid (defeats the purpose of .eh_frame_hdr entirely)
+ if eh_frame_ptr_enc == constants::DW_EH_PE_omit {
+ return Err(Error::CannotParseOmitPointerEncoding);
+ }
+ let eh_frame_ptr = parse_encoded_pointer(eh_frame_ptr_enc, &parameters, &mut reader)?;
+
+ let fde_count;
+ if fde_count_enc == constants::DW_EH_PE_omit || table_enc == constants::DW_EH_PE_omit {
+ fde_count = 0
+ } else {
+ let ptr = parse_encoded_pointer(fde_count_enc, &parameters, &mut reader)?;
+ fde_count = match ptr {
+ Pointer::Direct(c) => c,
+ Pointer::Indirect(_) => return Err(Error::UnsupportedPointerEncoding),
+ }
+ }
+
+ Ok(ParsedEhFrameHdr {
+ address_size,
+ section: self.0.clone(),
+
+ eh_frame_ptr,
+ fde_count,
+ table_enc,
+ table: reader,
+ })
+ }
+}
+
+impl<R: Reader> Section<R> for EhFrameHdr<R> {
+ fn id() -> SectionId {
+ SectionId::EhFrameHdr
+ }
+
+ fn reader(&self) -> &R {
+ &self.0
+ }
+}
+
+impl<R: Reader> From<R> for EhFrameHdr<R> {
+ fn from(section: R) -> Self {
+ EhFrameHdr(section)
+ }
+}
+
+impl<R: Reader> ParsedEhFrameHdr<R> {
+ /// Returns the address of the binary's `.eh_frame` section.
+ pub fn eh_frame_ptr(&self) -> Pointer {
+ self.eh_frame_ptr
+ }
+
+ /// Retrieves the CFI binary search table, if there is one.
+ pub fn table(&self) -> Option<EhHdrTable<R>> {
+ // There are two big edge cases here:
+ // * You search the table for an invalid address. As this is just a binary
+ // search table, we always have to return a valid result for that (unless
+ // you specify an address that is lower than the first address in the
+ // table). Since this means that you have to recheck that the FDE contains
+ // your address anyways, we just return the first FDE even when the address
+ // is too low. After all, we're just doing a normal binary search.
+ // * This falls apart when the table is empty - there is no entry we could
+ // return. We conclude that an empty table is not really a table at all.
+ if self.fde_count == 0 {
+ None
+ } else {
+ Some(EhHdrTable { hdr: self })
+ }
+ }
+}
+
+/// An iterator for `.eh_frame_hdr` section's binary search table.
+///
+/// Each table entry consists of a tuple containing an `initial_location` and `address`.
+/// The `initial location` represents the first address that the targeted FDE
+/// is able to decode. The `address` is the address of the FDE in the `.eh_frame` section.
+/// The `address` can be converted with `EhHdrTable::pointer_to_offset` and `EhFrame::fde_from_offset` to an FDE.
+#[derive(Debug)]
+pub struct EhHdrTableIter<'a, 'bases, R: Reader> {
+ hdr: &'a ParsedEhFrameHdr<R>,
+ table: R,
+ bases: &'bases BaseAddresses,
+ remain: u64,
+}
+
+impl<'a, 'bases, R: Reader> EhHdrTableIter<'a, 'bases, R> {
+ /// Yield the next entry in the `EhHdrTableIter`.
+ pub fn next(&mut self) -> Result<Option<(Pointer, Pointer)>> {
+ if self.remain == 0 {
+ return Ok(None);
+ }
+
+ let parameters = PointerEncodingParameters {
+ bases: &self.bases.eh_frame_hdr,
+ func_base: None,
+ address_size: self.hdr.address_size,
+ section: &self.hdr.section,
+ };
+
+ self.remain -= 1;
+ let from = parse_encoded_pointer(self.hdr.table_enc, &parameters, &mut self.table)?;
+ let to = parse_encoded_pointer(self.hdr.table_enc, &parameters, &mut self.table)?;
+ Ok(Some((from, to)))
+ }
+ /// Yield the nth entry in the `EhHdrTableIter`
+ pub fn nth(&mut self, n: usize) -> Result<Option<(Pointer, Pointer)>> {
+ use core::convert::TryFrom;
+ let size = match self.hdr.table_enc.format() {
+ constants::DW_EH_PE_uleb128 | constants::DW_EH_PE_sleb128 => {
+ return Err(Error::VariableLengthSearchTable);
+ }
+ constants::DW_EH_PE_sdata2 | constants::DW_EH_PE_udata2 => 2,
+ constants::DW_EH_PE_sdata4 | constants::DW_EH_PE_udata4 => 4,
+ constants::DW_EH_PE_sdata8 | constants::DW_EH_PE_udata8 => 8,
+ _ => return Err(Error::UnknownPointerEncoding),
+ };
+
+ let row_size = size * 2;
+ let n = u64::try_from(n).map_err(|_| Error::UnsupportedOffset)?;
+ self.remain = self.remain.saturating_sub(n);
+ self.table.skip(R::Offset::from_u64(n * row_size)?)?;
+ self.next()
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'a, 'bases, R: Reader> fallible_iterator::FallibleIterator for EhHdrTableIter<'a, 'bases, R> {
+ type Item = (Pointer, Pointer);
+ type Error = Error;
+ fn next(&mut self) -> Result<Option<Self::Item>> {
+ EhHdrTableIter::next(self)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ use core::convert::TryInto;
+ (
+ self.remain.try_into().unwrap_or(0),
+ self.remain.try_into().ok(),
+ )
+ }
+
+ fn nth(&mut self, n: usize) -> Result<Option<Self::Item>> {
+ EhHdrTableIter::nth(self, n)
+ }
+}
+
+/// The CFI binary search table that is an optional part of the `.eh_frame_hdr` section.
+#[derive(Debug, Clone)]
+pub struct EhHdrTable<'a, R: Reader> {
+ hdr: &'a ParsedEhFrameHdr<R>,
+}
+
+impl<'a, R: Reader + 'a> EhHdrTable<'a, R> {
+ /// Return an iterator that can walk the `.eh_frame_hdr` table.
+ ///
+ /// Each table entry consists of a tuple containing an `initial_location` and `address`.
+ /// The `initial location` represents the first address that the targeted FDE
+ /// is able to decode. The `address` is the address of the FDE in the `.eh_frame` section.
+ /// The `address` can be converted with `EhHdrTable::pointer_to_offset` and `EhFrame::fde_from_offset` to an FDE.
+ pub fn iter<'bases>(&self, bases: &'bases BaseAddresses) -> EhHdrTableIter<'_, 'bases, R> {
+ EhHdrTableIter {
+ hdr: self.hdr,
+ bases,
+ remain: self.hdr.fde_count,
+ table: self.hdr.table.clone(),
+ }
+ }
+ /// *Probably* returns a pointer to the FDE for the given address.
+ ///
+ /// This performs a binary search, so if there is no FDE for the given address,
+ /// this function **will** return a pointer to any other FDE that's close by.
+ ///
+ /// To be sure, you **must** call `contains` on the FDE.
+ pub fn lookup(&self, address: u64, bases: &BaseAddresses) -> Result<Pointer> {
+ let size = match self.hdr.table_enc.format() {
+ constants::DW_EH_PE_uleb128 | constants::DW_EH_PE_sleb128 => {
+ return Err(Error::VariableLengthSearchTable);
+ }
+ constants::DW_EH_PE_sdata2 | constants::DW_EH_PE_udata2 => 2,
+ constants::DW_EH_PE_sdata4 | constants::DW_EH_PE_udata4 => 4,
+ constants::DW_EH_PE_sdata8 | constants::DW_EH_PE_udata8 => 8,
+ _ => return Err(Error::UnknownPointerEncoding),
+ };
+
+ let row_size = size * 2;
+
+ let mut len = self.hdr.fde_count;
+
+ let mut reader = self.hdr.table.clone();
+
+ let parameters = PointerEncodingParameters {
+ bases: &bases.eh_frame_hdr,
+ func_base: None,
+ address_size: self.hdr.address_size,
+ section: &self.hdr.section,
+ };
+
+ while len > 1 {
+ let head = reader.split(R::Offset::from_u64((len / 2) * row_size)?)?;
+ let tail = reader.clone();
+
+ let pivot = parse_encoded_pointer(self.hdr.table_enc, &parameters, &mut reader)?;
+ let pivot = match pivot {
+ Pointer::Direct(x) => x,
+ Pointer::Indirect(_) => return Err(Error::UnsupportedPointerEncoding),
+ };
+
+ match pivot.cmp(&address) {
+ Ordering::Equal => {
+ reader = tail;
+ break;
+ }
+ Ordering::Less => {
+ reader = tail;
+ len = len - (len / 2);
+ }
+ Ordering::Greater => {
+ reader = head;
+ len /= 2;
+ }
+ }
+ }
+
+ reader.skip(R::Offset::from_u64(size)?)?;
+
+ parse_encoded_pointer(self.hdr.table_enc, &parameters, &mut reader)
+ }
+
+ /// Convert a `Pointer` to a section offset.
+ ///
+ /// This does not support indirect pointers.
+ pub fn pointer_to_offset(&self, ptr: Pointer) -> Result<EhFrameOffset<R::Offset>> {
+ let ptr = match ptr {
+ Pointer::Direct(x) => x,
+ _ => return Err(Error::UnsupportedPointerEncoding),
+ };
+
+ let eh_frame_ptr = match self.hdr.eh_frame_ptr() {
+ Pointer::Direct(x) => x,
+ _ => return Err(Error::UnsupportedPointerEncoding),
+ };
+
+ // Calculate the offset in the EhFrame section
+ R::Offset::from_u64(ptr - eh_frame_ptr).map(EhFrameOffset)
+ }
+
+ /// Returns a parsed FDE for the given address, or `NoUnwindInfoForAddress`
+ /// if there are none.
+ ///
+ /// You must provide a function to get its associated CIE. See
+ /// `PartialFrameDescriptionEntry::parse` for more information.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use gimli::{BaseAddresses, EhFrame, ParsedEhFrameHdr, EndianSlice, NativeEndian, Error, UnwindSection};
+ /// # fn foo() -> Result<(), Error> {
+ /// # let eh_frame: EhFrame<EndianSlice<NativeEndian>> = unreachable!();
+ /// # let eh_frame_hdr: ParsedEhFrameHdr<EndianSlice<NativeEndian>> = unimplemented!();
+ /// # let addr = 0;
+ /// # let bases = unimplemented!();
+ /// let table = eh_frame_hdr.table().unwrap();
+ /// let fde = table.fde_for_address(&eh_frame, &bases, addr, EhFrame::cie_from_offset)?;
+ /// # Ok(())
+ /// # }
+ /// ```
+ pub fn fde_for_address<F>(
+ &self,
+ frame: &EhFrame<R>,
+ bases: &BaseAddresses,
+ address: u64,
+ get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ F: FnMut(
+ &EhFrame<R>,
+ &BaseAddresses,
+ EhFrameOffset<R::Offset>,
+ ) -> Result<CommonInformationEntry<R>>,
+ {
+ let fdeptr = self.lookup(address, bases)?;
+ let offset = self.pointer_to_offset(fdeptr)?;
+ let entry = frame.fde_from_offset(bases, offset, get_cie)?;
+ if entry.contains(address) {
+ Ok(entry)
+ } else {
+ Err(Error::NoUnwindInfoForAddress)
+ }
+ }
+
+ #[inline]
+ #[doc(hidden)]
+ #[deprecated(note = "Method renamed to fde_for_address; use that instead.")]
+ pub fn lookup_and_parse<F>(
+ &self,
+ address: u64,
+ bases: &BaseAddresses,
+ frame: EhFrame<R>,
+ get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ F: FnMut(
+ &EhFrame<R>,
+ &BaseAddresses,
+ EhFrameOffset<R::Offset>,
+ ) -> Result<CommonInformationEntry<R>>,
+ {
+ self.fde_for_address(&frame, bases, address, get_cie)
+ }
+
+ /// Returns the frame unwind information for the given address,
+ /// or `NoUnwindInfoForAddress` if there are none.
+ ///
+ /// You must provide a function to get the associated CIE. See
+ /// `PartialFrameDescriptionEntry::parse` for more information.
+ pub fn unwind_info_for_address<'ctx, F, A: UnwindContextStorage<R>>(
+ &self,
+ frame: &EhFrame<R>,
+ bases: &BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ address: u64,
+ get_cie: F,
+ ) -> Result<&'ctx UnwindTableRow<R, A>>
+ where
+ F: FnMut(
+ &EhFrame<R>,
+ &BaseAddresses,
+ EhFrameOffset<R::Offset>,
+ ) -> Result<CommonInformationEntry<R>>,
+ {
+ let fde = self.fde_for_address(frame, bases, address, get_cie)?;
+ fde.unwind_info_for_address(frame, bases, ctx, address)
+ }
+}
+
+/// `EhFrame` contains the frame unwinding information needed during exception
+/// handling found in the `.eh_frame` section.
+///
+/// Most interesting methods are defined in the
+/// [`UnwindSection`](trait.UnwindSection.html) trait.
+///
+/// See
+/// [`DebugFrame`](./struct.DebugFrame.html#differences-between-debug_frame-and-eh_frame)
+/// for some discussion on the differences between `.debug_frame` and
+/// `.eh_frame`.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct EhFrame<R: Reader> {
+ section: R,
+ address_size: u8,
+}
+
+impl<R: Reader> EhFrame<R> {
+ /// Set the size of a target address in bytes.
+ ///
+ /// This defaults to the native word size.
+ pub fn set_address_size(&mut self, address_size: u8) {
+ self.address_size = address_size
+ }
+}
+
+impl<'input, Endian> EhFrame<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `EhFrame` instance from the data in the
+ /// `.eh_frame` section.
+ ///
+ /// It is the caller's responsibility to read the section and present it as
+ /// a `&[u8]` slice. That means using some ELF loader on Linux, a Mach-O
+ /// loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{EhFrame, EndianSlice, NativeEndian};
+ ///
+ /// // Use with `.eh_frame`
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_eh_frame_section_somehow = || &buf;
+ /// let eh_frame = EhFrame::new(read_eh_frame_section_somehow(), NativeEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R: Reader> Section<R> for EhFrame<R> {
+ fn id() -> SectionId {
+ SectionId::EhFrame
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R: Reader> From<R> for EhFrame<R> {
+ fn from(section: R) -> Self {
+ // Default to native word size.
+ EhFrame {
+ section,
+ address_size: mem::size_of::<usize>() as u8,
+ }
+ }
+}
+
+// This has to be `pub` to silence a warning (that is deny(..)'d by default) in
+// rustc. Eventually, not having this `pub` will become a hard error.
+#[doc(hidden)]
+#[allow(missing_docs)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum CieOffsetEncoding {
+ U32,
+ U64,
+}
+
+/// An offset into an `UnwindSection`.
+//
+// Needed to avoid conflicting implementations of `Into<T>`.
+pub trait UnwindOffset<T = usize>: Copy + Debug + Eq + From<T>
+where
+ T: ReaderOffset,
+{
+ /// Convert an `UnwindOffset<T>` into a `T`.
+ fn into(self) -> T;
+}
+
+impl<T> UnwindOffset<T> for DebugFrameOffset<T>
+where
+ T: ReaderOffset,
+{
+ #[inline]
+ fn into(self) -> T {
+ self.0
+ }
+}
+
+impl<T> UnwindOffset<T> for EhFrameOffset<T>
+where
+ T: ReaderOffset,
+{
+ #[inline]
+ fn into(self) -> T {
+ self.0
+ }
+}
+
+/// This trait completely encapsulates everything that is different between
+/// `.eh_frame` and `.debug_frame`, as well as all the bits that can change
+/// between DWARF versions.
+#[doc(hidden)]
+pub trait _UnwindSectionPrivate<R: Reader> {
+ /// Get the underlying section data.
+ fn section(&self) -> &R;
+
+ /// Returns true if the given length value should be considered an
+ /// end-of-entries sentinel.
+ fn length_value_is_end_of_entries(length: R::Offset) -> bool;
+
+ /// Return true if the given offset if the CIE sentinel, false otherwise.
+ fn is_cie(format: Format, id: u64) -> bool;
+
+ /// Return the CIE offset/ID encoding used by this unwind section with the
+ /// given DWARF format.
+ fn cie_offset_encoding(format: Format) -> CieOffsetEncoding;
+
+ /// For `.eh_frame`, CIE offsets are relative to the current position. For
+ /// `.debug_frame`, they are relative to the start of the section. We always
+ /// internally store them relative to the section, so we handle translating
+ /// `.eh_frame`'s relative offsets in this method. If the offset calculation
+ /// underflows, return `None`.
+ fn resolve_cie_offset(&self, base: R::Offset, offset: R::Offset) -> Option<R::Offset>;
+
+ /// Does this version of this unwind section encode address and segment
+ /// sizes in its CIEs?
+ fn has_address_and_segment_sizes(version: u8) -> bool;
+
+ /// The address size to use if `has_address_and_segment_sizes` returns false.
+ fn address_size(&self) -> u8;
+
+ /// The segment size to use if `has_address_and_segment_sizes` returns false.
+ fn segment_size(&self) -> u8;
+}
+
+/// A section holding unwind information: either `.debug_frame` or
+/// `.eh_frame`. See [`DebugFrame`](./struct.DebugFrame.html) and
+/// [`EhFrame`](./struct.EhFrame.html) respectively.
+pub trait UnwindSection<R: Reader>: Clone + Debug + _UnwindSectionPrivate<R> {
+ /// The offset type associated with this CFI section. Either
+ /// `DebugFrameOffset` or `EhFrameOffset`.
+ type Offset: UnwindOffset<R::Offset>;
+
+ /// Iterate over the `CommonInformationEntry`s and `FrameDescriptionEntry`s
+ /// in this `.debug_frame` section.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ fn entries<'bases>(&self, bases: &'bases BaseAddresses) -> CfiEntriesIter<'bases, Self, R> {
+ CfiEntriesIter {
+ section: self.clone(),
+ bases,
+ input: self.section().clone(),
+ }
+ }
+
+ /// Parse the `CommonInformationEntry` at the given offset.
+ fn cie_from_offset(
+ &self,
+ bases: &BaseAddresses,
+ offset: Self::Offset,
+ ) -> Result<CommonInformationEntry<R>> {
+ let offset = UnwindOffset::into(offset);
+ let input = &mut self.section().clone();
+ input.skip(offset)?;
+ CommonInformationEntry::parse(bases, self, input)
+ }
+
+ /// Parse the `PartialFrameDescriptionEntry` at the given offset.
+ fn partial_fde_from_offset<'bases>(
+ &self,
+ bases: &'bases BaseAddresses,
+ offset: Self::Offset,
+ ) -> Result<PartialFrameDescriptionEntry<'bases, Self, R>> {
+ let offset = UnwindOffset::into(offset);
+ let input = &mut self.section().clone();
+ input.skip(offset)?;
+ PartialFrameDescriptionEntry::parse_partial(self, bases, input)
+ }
+
+ /// Parse the `FrameDescriptionEntry` at the given offset.
+ fn fde_from_offset<F>(
+ &self,
+ bases: &BaseAddresses,
+ offset: Self::Offset,
+ get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ F: FnMut(&Self, &BaseAddresses, Self::Offset) -> Result<CommonInformationEntry<R>>,
+ {
+ let partial = self.partial_fde_from_offset(bases, offset)?;
+ partial.parse(get_cie)
+ }
+
+ /// Find the `FrameDescriptionEntry` for the given address.
+ ///
+ /// If found, the FDE is returned. If not found,
+ /// `Err(gimli::Error::NoUnwindInfoForAddress)` is returned.
+ /// If parsing fails, the error is returned.
+ ///
+ /// You must provide a function to get its associated CIE. See
+ /// `PartialFrameDescriptionEntry::parse` for more information.
+ ///
+ /// Note: this iterates over all FDEs. If available, it is possible
+ /// to do a binary search with `EhFrameHdr::fde_for_address` instead.
+ fn fde_for_address<F>(
+ &self,
+ bases: &BaseAddresses,
+ address: u64,
+ mut get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ F: FnMut(&Self, &BaseAddresses, Self::Offset) -> Result<CommonInformationEntry<R>>,
+ {
+ let mut entries = self.entries(bases);
+ while let Some(entry) = entries.next()? {
+ match entry {
+ CieOrFde::Cie(_) => {}
+ CieOrFde::Fde(partial) => {
+ let fde = partial.parse(&mut get_cie)?;
+ if fde.contains(address) {
+ return Ok(fde);
+ }
+ }
+ }
+ }
+ Err(Error::NoUnwindInfoForAddress)
+ }
+
+ /// Find the frame unwind information for the given address.
+ ///
+ /// If found, the unwind information is returned. If not found,
+ /// `Err(gimli::Error::NoUnwindInfoForAddress)` is returned. If parsing or
+ /// CFI evaluation fails, the error is returned.
+ ///
+ /// ```
+ /// use gimli::{BaseAddresses, EhFrame, EndianSlice, NativeEndian, UnwindContext,
+ /// UnwindSection};
+ ///
+ /// # fn foo() -> gimli::Result<()> {
+ /// # let read_eh_frame_section = || unimplemented!();
+ /// // Get the `.eh_frame` section from the object file. Alternatively,
+ /// // use `EhFrame` with the `.eh_frame` section of the object file.
+ /// let eh_frame = EhFrame::new(read_eh_frame_section(), NativeEndian);
+ ///
+ /// # let get_frame_pc = || unimplemented!();
+ /// // Get the address of the PC for a frame you'd like to unwind.
+ /// let address = get_frame_pc();
+ ///
+ /// // This context is reusable, which cuts down on heap allocations.
+ /// let ctx = UnwindContext::new();
+ ///
+ /// // Optionally provide base addresses for any relative pointers. If a
+ /// // base address isn't provided and a pointer is found that is relative to
+ /// // it, we will return an `Err`.
+ /// # let address_of_text_section_in_memory = unimplemented!();
+ /// # let address_of_got_section_in_memory = unimplemented!();
+ /// let bases = BaseAddresses::default()
+ /// .set_text(address_of_text_section_in_memory)
+ /// .set_got(address_of_got_section_in_memory);
+ ///
+ /// let unwind_info = eh_frame.unwind_info_for_address(
+ /// &bases,
+ /// &mut ctx,
+ /// address,
+ /// EhFrame::cie_from_offset,
+ /// )?;
+ ///
+ /// # let do_stuff_with = |_| unimplemented!();
+ /// do_stuff_with(unwind_info);
+ /// # let _ = ctx;
+ /// # unreachable!()
+ /// # }
+ /// ```
+ #[inline]
+ fn unwind_info_for_address<'ctx, F, A: UnwindContextStorage<R>>(
+ &self,
+ bases: &BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ address: u64,
+ get_cie: F,
+ ) -> Result<&'ctx UnwindTableRow<R, A>>
+ where
+ F: FnMut(&Self, &BaseAddresses, Self::Offset) -> Result<CommonInformationEntry<R>>,
+ {
+ let fde = self.fde_for_address(bases, address, get_cie)?;
+ fde.unwind_info_for_address(self, bases, ctx, address)
+ }
+}
+
+impl<R: Reader> _UnwindSectionPrivate<R> for DebugFrame<R> {
+ fn section(&self) -> &R {
+ &self.section
+ }
+
+ fn length_value_is_end_of_entries(_: R::Offset) -> bool {
+ false
+ }
+
+ fn is_cie(format: Format, id: u64) -> bool {
+ match format {
+ Format::Dwarf32 => id == 0xffff_ffff,
+ Format::Dwarf64 => id == 0xffff_ffff_ffff_ffff,
+ }
+ }
+
+ fn cie_offset_encoding(format: Format) -> CieOffsetEncoding {
+ match format {
+ Format::Dwarf32 => CieOffsetEncoding::U32,
+ Format::Dwarf64 => CieOffsetEncoding::U64,
+ }
+ }
+
+ fn resolve_cie_offset(&self, _: R::Offset, offset: R::Offset) -> Option<R::Offset> {
+ Some(offset)
+ }
+
+ fn has_address_and_segment_sizes(version: u8) -> bool {
+ version == 4
+ }
+
+ fn address_size(&self) -> u8 {
+ self.address_size
+ }
+
+ fn segment_size(&self) -> u8 {
+ self.segment_size
+ }
+}
+
+impl<R: Reader> UnwindSection<R> for DebugFrame<R> {
+ type Offset = DebugFrameOffset<R::Offset>;
+}
+
+impl<R: Reader> _UnwindSectionPrivate<R> for EhFrame<R> {
+ fn section(&self) -> &R {
+ &self.section
+ }
+
+ fn length_value_is_end_of_entries(length: R::Offset) -> bool {
+ length.into_u64() == 0
+ }
+
+ fn is_cie(_: Format, id: u64) -> bool {
+ id == 0
+ }
+
+ fn cie_offset_encoding(_format: Format) -> CieOffsetEncoding {
+ // `.eh_frame` offsets are always 4 bytes, regardless of the DWARF
+ // format.
+ CieOffsetEncoding::U32
+ }
+
+ fn resolve_cie_offset(&self, base: R::Offset, offset: R::Offset) -> Option<R::Offset> {
+ base.checked_sub(offset)
+ }
+
+ fn has_address_and_segment_sizes(_version: u8) -> bool {
+ false
+ }
+
+ fn address_size(&self) -> u8 {
+ self.address_size
+ }
+
+ fn segment_size(&self) -> u8 {
+ 0
+ }
+}
+
+impl<R: Reader> UnwindSection<R> for EhFrame<R> {
+ type Offset = EhFrameOffset<R::Offset>;
+}
+
+/// Optional base addresses for the relative `DW_EH_PE_*` encoded pointers.
+///
+/// During CIE/FDE parsing, if a relative pointer is encountered for a base
+/// address that is unknown, an Err will be returned.
+///
+/// ```
+/// use gimli::BaseAddresses;
+///
+/// # fn foo() {
+/// # let address_of_eh_frame_hdr_section_in_memory = unimplemented!();
+/// # let address_of_eh_frame_section_in_memory = unimplemented!();
+/// # let address_of_text_section_in_memory = unimplemented!();
+/// # let address_of_got_section_in_memory = unimplemented!();
+/// # let address_of_the_start_of_current_func = unimplemented!();
+/// let bases = BaseAddresses::default()
+/// .set_eh_frame_hdr(address_of_eh_frame_hdr_section_in_memory)
+/// .set_eh_frame(address_of_eh_frame_section_in_memory)
+/// .set_text(address_of_text_section_in_memory)
+/// .set_got(address_of_got_section_in_memory);
+/// # let _ = bases;
+/// # }
+/// ```
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
+pub struct BaseAddresses {
+ /// The base addresses to use for pointers in the `.eh_frame_hdr` section.
+ pub eh_frame_hdr: SectionBaseAddresses,
+
+ /// The base addresses to use for pointers in the `.eh_frame` section.
+ pub eh_frame: SectionBaseAddresses,
+}
+
+/// Optional base addresses for the relative `DW_EH_PE_*` encoded pointers
+/// in a particular section.
+///
+/// See `BaseAddresses` for methods that are helpful in setting these addresses.
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
+pub struct SectionBaseAddresses {
+ /// The address of the section containing the pointer.
+ pub section: Option<u64>,
+
+ /// The base address for text relative pointers.
+ /// This is generally the address of the `.text` section.
+ pub text: Option<u64>,
+
+ /// The base address for data relative pointers.
+ ///
+ /// For pointers in the `.eh_frame_hdr` section, this is the address
+ /// of the `.eh_frame_hdr` section
+ ///
+ /// For pointers in the `.eh_frame` section, this is generally the
+ /// global pointer, such as the address of the `.got` section.
+ pub data: Option<u64>,
+}
+
+impl BaseAddresses {
+ /// Set the `.eh_frame_hdr` section base address.
+ #[inline]
+ pub fn set_eh_frame_hdr(mut self, addr: u64) -> Self {
+ self.eh_frame_hdr.section = Some(addr);
+ self.eh_frame_hdr.data = Some(addr);
+ self
+ }
+
+ /// Set the `.eh_frame` section base address.
+ #[inline]
+ pub fn set_eh_frame(mut self, addr: u64) -> Self {
+ self.eh_frame.section = Some(addr);
+ self
+ }
+
+ /// Set the `.text` section base address.
+ #[inline]
+ pub fn set_text(mut self, addr: u64) -> Self {
+ self.eh_frame_hdr.text = Some(addr);
+ self.eh_frame.text = Some(addr);
+ self
+ }
+
+ /// Set the `.got` section base address.
+ #[inline]
+ pub fn set_got(mut self, addr: u64) -> Self {
+ self.eh_frame.data = Some(addr);
+ self
+ }
+}
+
+/// An iterator over CIE and FDE entries in a `.debug_frame` or `.eh_frame`
+/// section.
+///
+/// Some pointers may be encoded relative to various base addresses. Use the
+/// [`BaseAddresses`](./struct.BaseAddresses.html) parameter to provide them. By
+/// default, none are provided. If a relative pointer is encountered for a base
+/// address that is unknown, an `Err` will be returned and iteration will abort.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+///
+/// ```
+/// use gimli::{BaseAddresses, EhFrame, EndianSlice, NativeEndian, UnwindSection};
+///
+/// # fn foo() -> gimli::Result<()> {
+/// # let read_eh_frame_somehow = || unimplemented!();
+/// let eh_frame = EhFrame::new(read_eh_frame_somehow(), NativeEndian);
+///
+/// # let address_of_eh_frame_hdr_section_in_memory = unimplemented!();
+/// # let address_of_eh_frame_section_in_memory = unimplemented!();
+/// # let address_of_text_section_in_memory = unimplemented!();
+/// # let address_of_got_section_in_memory = unimplemented!();
+/// # let address_of_the_start_of_current_func = unimplemented!();
+/// // Provide base addresses for relative pointers.
+/// let bases = BaseAddresses::default()
+/// .set_eh_frame_hdr(address_of_eh_frame_hdr_section_in_memory)
+/// .set_eh_frame(address_of_eh_frame_section_in_memory)
+/// .set_text(address_of_text_section_in_memory)
+/// .set_got(address_of_got_section_in_memory);
+///
+/// let mut entries = eh_frame.entries(&bases);
+///
+/// # let do_stuff_with = |_| unimplemented!();
+/// while let Some(entry) = entries.next()? {
+/// do_stuff_with(entry)
+/// }
+/// # unreachable!()
+/// # }
+/// ```
+#[derive(Clone, Debug)]
+pub struct CfiEntriesIter<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ section: Section,
+ bases: &'bases BaseAddresses,
+ input: R,
+}
+
+impl<'bases, Section, R> CfiEntriesIter<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ /// Advance the iterator to the next entry.
+ pub fn next(&mut self) -> Result<Option<CieOrFde<'bases, Section, R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match parse_cfi_entry(self.bases, &self.section, &mut self.input) {
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ Ok(None) => {
+ self.input.empty();
+ Ok(None)
+ }
+ Ok(Some(entry)) => Ok(Some(entry)),
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'bases, Section, R> fallible_iterator::FallibleIterator for CfiEntriesIter<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ type Item = CieOrFde<'bases, Section, R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ CfiEntriesIter::next(self)
+ }
+}
+
+/// Either a `CommonInformationEntry` (CIE) or a `FrameDescriptionEntry` (FDE).
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CieOrFde<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ /// This CFI entry is a `CommonInformationEntry`.
+ Cie(CommonInformationEntry<R>),
+ /// This CFI entry is a `FrameDescriptionEntry`, however fully parsing it
+ /// requires parsing its CIE first, so it is left in a partially parsed
+ /// state.
+ Fde(PartialFrameDescriptionEntry<'bases, Section, R>),
+}
+
+#[allow(clippy::type_complexity)]
+fn parse_cfi_entry<'bases, Section, R>(
+ bases: &'bases BaseAddresses,
+ section: &Section,
+ input: &mut R,
+) -> Result<Option<CieOrFde<'bases, Section, R>>>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ let (offset, length, format) = loop {
+ let offset = input.offset_from(section.section());
+ let (length, format) = input.read_initial_length()?;
+
+ if Section::length_value_is_end_of_entries(length) {
+ return Ok(None);
+ }
+
+ // Hack: skip zero padding inserted by buggy compilers/linkers.
+ // We require that the padding is a multiple of 32-bits, otherwise
+ // there is no reliable way to determine when the padding ends. This
+ // should be okay since CFI entries must be aligned to the address size.
+
+ if length.into_u64() != 0 || format != Format::Dwarf32 {
+ break (offset, length, format);
+ }
+ };
+
+ let mut rest = input.split(length)?;
+ let cie_offset_base = rest.offset_from(section.section());
+ let cie_id_or_offset = match Section::cie_offset_encoding(format) {
+ CieOffsetEncoding::U32 => rest.read_u32().map(u64::from)?,
+ CieOffsetEncoding::U64 => rest.read_u64()?,
+ };
+
+ if Section::is_cie(format, cie_id_or_offset) {
+ let cie = CommonInformationEntry::parse_rest(offset, length, format, bases, section, rest)?;
+ Ok(Some(CieOrFde::Cie(cie)))
+ } else {
+ let cie_offset = R::Offset::from_u64(cie_id_or_offset)?;
+ let cie_offset = match section.resolve_cie_offset(cie_offset_base, cie_offset) {
+ None => return Err(Error::OffsetOutOfBounds),
+ Some(cie_offset) => cie_offset,
+ };
+
+ let fde = PartialFrameDescriptionEntry {
+ offset,
+ length,
+ format,
+ cie_offset: cie_offset.into(),
+ rest,
+ section: section.clone(),
+ bases,
+ };
+
+ Ok(Some(CieOrFde::Fde(fde)))
+ }
+}
+
+/// We support the z-style augmentation [defined by `.eh_frame`][ehframe].
+///
+/// [ehframe]: https://refspecs.linuxfoundation.org/LSB_3.0.0/LSB-Core-generic/LSB-Core-generic/ehframechpt.html
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
+pub struct Augmentation {
+ /// > A 'L' may be present at any position after the first character of the
+ /// > string. This character may only be present if 'z' is the first character
+ /// > of the string. If present, it indicates the presence of one argument in
+ /// > the Augmentation Data of the CIE, and a corresponding argument in the
+ /// > Augmentation Data of the FDE. The argument in the Augmentation Data of
+ /// > the CIE is 1-byte and represents the pointer encoding used for the
+ /// > argument in the Augmentation Data of the FDE, which is the address of a
+ /// > language-specific data area (LSDA). The size of the LSDA pointer is
+ /// > specified by the pointer encoding used.
+ lsda: Option<constants::DwEhPe>,
+
+ /// > A 'P' may be present at any position after the first character of the
+ /// > string. This character may only be present if 'z' is the first character
+ /// > of the string. If present, it indicates the presence of two arguments in
+ /// > the Augmentation Data of the CIE. The first argument is 1-byte and
+ /// > represents the pointer encoding used for the second argument, which is
+ /// > the address of a personality routine handler. The size of the
+ /// > personality routine pointer is specified by the pointer encoding used.
+ personality: Option<(constants::DwEhPe, Pointer)>,
+
+ /// > A 'R' may be present at any position after the first character of the
+ /// > string. This character may only be present if 'z' is the first character
+ /// > of the string. If present, The Augmentation Data shall include a 1 byte
+ /// > argument that represents the pointer encoding for the address pointers
+ /// > used in the FDE.
+ fde_address_encoding: Option<constants::DwEhPe>,
+
+ /// True if this CIE's FDEs are trampolines for signal handlers.
+ is_signal_trampoline: bool,
+}
+
+impl Augmentation {
+ fn parse<Section, R>(
+ augmentation_str: &mut R,
+ bases: &BaseAddresses,
+ address_size: u8,
+ section: &Section,
+ input: &mut R,
+ ) -> Result<Augmentation>
+ where
+ R: Reader,
+ Section: UnwindSection<R>,
+ {
+ debug_assert!(
+ !augmentation_str.is_empty(),
+ "Augmentation::parse should only be called if we have an augmentation"
+ );
+
+ let mut augmentation = Augmentation::default();
+
+ let mut parsed_first = false;
+ let mut data = None;
+
+ while !augmentation_str.is_empty() {
+ let ch = augmentation_str.read_u8()?;
+ match ch {
+ b'z' => {
+ if parsed_first {
+ return Err(Error::UnknownAugmentation);
+ }
+
+ let augmentation_length = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ data = Some(input.split(augmentation_length)?);
+ }
+ b'L' => {
+ let rest = data.as_mut().ok_or(Error::UnknownAugmentation)?;
+ let encoding = parse_pointer_encoding(rest)?;
+ augmentation.lsda = Some(encoding);
+ }
+ b'P' => {
+ let rest = data.as_mut().ok_or(Error::UnknownAugmentation)?;
+ let encoding = parse_pointer_encoding(rest)?;
+ let parameters = PointerEncodingParameters {
+ bases: &bases.eh_frame,
+ func_base: None,
+ address_size,
+ section: section.section(),
+ };
+
+ let personality = parse_encoded_pointer(encoding, &parameters, rest)?;
+ augmentation.personality = Some((encoding, personality));
+ }
+ b'R' => {
+ let rest = data.as_mut().ok_or(Error::UnknownAugmentation)?;
+ let encoding = parse_pointer_encoding(rest)?;
+ augmentation.fde_address_encoding = Some(encoding);
+ }
+ b'S' => augmentation.is_signal_trampoline = true,
+ _ => return Err(Error::UnknownAugmentation),
+ }
+
+ parsed_first = true;
+ }
+
+ Ok(augmentation)
+ }
+}
+
+/// Parsed augmentation data for a `FrameDescriptEntry`.
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
+struct AugmentationData {
+ lsda: Option<Pointer>,
+}
+
+impl AugmentationData {
+ fn parse<R: Reader>(
+ augmentation: &Augmentation,
+ encoding_parameters: &PointerEncodingParameters<R>,
+ input: &mut R,
+ ) -> Result<AugmentationData> {
+ // In theory, we should be iterating over the original augmentation
+ // string, interpreting each character, and reading the appropriate bits
+ // out of the augmentation data as we go. However, the only character
+ // that defines augmentation data in the FDE is the 'L' character, so we
+ // can just check for its presence directly.
+
+ let aug_data_len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let rest = &mut input.split(aug_data_len)?;
+ let mut augmentation_data = AugmentationData::default();
+ if let Some(encoding) = augmentation.lsda {
+ let lsda = parse_encoded_pointer(encoding, encoding_parameters, rest)?;
+ augmentation_data.lsda = Some(lsda);
+ }
+ Ok(augmentation_data)
+ }
+}
+
+/// > A Common Information Entry holds information that is shared among many
+/// > Frame Description Entries. There is at least one CIE in every non-empty
+/// > `.debug_frame` section.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CommonInformationEntry<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// The offset of this entry from the start of its containing section.
+ offset: Offset,
+
+ /// > A constant that gives the number of bytes of the CIE structure, not
+ /// > including the length field itself (see Section 7.2.2). The size of the
+ /// > length field plus the value of length must be an integral multiple of
+ /// > the address size.
+ length: Offset,
+
+ format: Format,
+
+ /// > A version number (see Section 7.23). This number is specific to the
+ /// > call frame information and is independent of the DWARF version number.
+ version: u8,
+
+ /// The parsed augmentation, if any.
+ augmentation: Option<Augmentation>,
+
+ /// > The size of a target address in this CIE and any FDEs that use it, in
+ /// > bytes. If a compilation unit exists for this frame, its address size
+ /// > must match the address size here.
+ address_size: u8,
+
+ /// "The size of a segment selector in this CIE and any FDEs that use it, in
+ /// bytes."
+ segment_size: u8,
+
+ /// "A constant that is factored out of all advance location instructions
+ /// (see Section 6.4.2.1)."
+ code_alignment_factor: u64,
+
+ /// > A constant that is factored out of certain offset instructions (see
+ /// > below). The resulting value is (operand * data_alignment_factor).
+ data_alignment_factor: i64,
+
+ /// > An unsigned LEB128 constant that indicates which column in the rule
+ /// > table represents the return address of the function. Note that this
+ /// > column might not correspond to an actual machine register.
+ return_address_register: Register,
+
+ /// > A sequence of rules that are interpreted to create the initial setting
+ /// > of each column in the table.
+ ///
+ /// > The default rule for all columns before interpretation of the initial
+ /// > instructions is the undefined rule. However, an ABI authoring body or a
+ /// > compilation system authoring body may specify an alternate default
+ /// > value for any or all columns.
+ ///
+ /// This is followed by `DW_CFA_nop` padding until the end of `length` bytes
+ /// in the input.
+ initial_instructions: R,
+}
+
+impl<R: Reader> CommonInformationEntry<R> {
+ fn parse<Section: UnwindSection<R>>(
+ bases: &BaseAddresses,
+ section: &Section,
+ input: &mut R,
+ ) -> Result<CommonInformationEntry<R>> {
+ match parse_cfi_entry(bases, section, input)? {
+ Some(CieOrFde::Cie(cie)) => Ok(cie),
+ Some(CieOrFde::Fde(_)) => Err(Error::NotCieId),
+ None => Err(Error::NoEntryAtGivenOffset),
+ }
+ }
+
+ fn parse_rest<Section: UnwindSection<R>>(
+ offset: R::Offset,
+ length: R::Offset,
+ format: Format,
+ bases: &BaseAddresses,
+ section: &Section,
+ mut rest: R,
+ ) -> Result<CommonInformationEntry<R>> {
+ let version = rest.read_u8()?;
+
+ // Version 1 of `.debug_frame` corresponds to DWARF 2, and then for
+ // DWARF 3 and 4, I think they decided to just match the standard's
+ // version.
+ match version {
+ 1 | 3 | 4 => (),
+ _ => return Err(Error::UnknownVersion(u64::from(version))),
+ }
+
+ let mut augmentation_string = rest.read_null_terminated_slice()?;
+
+ let (address_size, segment_size) = if Section::has_address_and_segment_sizes(version) {
+ let address_size = rest.read_u8()?;
+ let segment_size = rest.read_u8()?;
+ (address_size, segment_size)
+ } else {
+ (section.address_size(), section.segment_size())
+ };
+
+ let code_alignment_factor = rest.read_uleb128()?;
+ let data_alignment_factor = rest.read_sleb128()?;
+
+ let return_address_register = if version == 1 {
+ Register(rest.read_u8()?.into())
+ } else {
+ rest.read_uleb128().and_then(Register::from_u64)?
+ };
+
+ let augmentation = if augmentation_string.is_empty() {
+ None
+ } else {
+ Some(Augmentation::parse(
+ &mut augmentation_string,
+ bases,
+ address_size,
+ section,
+ &mut rest,
+ )?)
+ };
+
+ let entry = CommonInformationEntry {
+ offset,
+ length,
+ format,
+ version,
+ augmentation,
+ address_size,
+ segment_size,
+ code_alignment_factor,
+ data_alignment_factor,
+ return_address_register,
+ initial_instructions: rest,
+ };
+
+ Ok(entry)
+ }
+}
+
+/// # Signal Safe Methods
+///
+/// These methods are guaranteed not to allocate, acquire locks, or perform any
+/// other signal-unsafe operations.
+impl<R: Reader> CommonInformationEntry<R> {
+ /// Get the offset of this entry from the start of its containing section.
+ pub fn offset(&self) -> R::Offset {
+ self.offset
+ }
+
+ /// Return the encoding parameters for this CIE.
+ pub fn encoding(&self) -> Encoding {
+ Encoding {
+ format: self.format,
+ version: u16::from(self.version),
+ address_size: self.address_size,
+ }
+ }
+
+ /// The size of addresses (in bytes) in this CIE.
+ pub fn address_size(&self) -> u8 {
+ self.address_size
+ }
+
+ /// Iterate over this CIE's initial instructions.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn instructions<'a, Section>(
+ &self,
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ) -> CallFrameInstructionIter<'a, R>
+ where
+ Section: UnwindSection<R>,
+ {
+ CallFrameInstructionIter {
+ input: self.initial_instructions.clone(),
+ address_encoding: None,
+ parameters: PointerEncodingParameters {
+ bases: &bases.eh_frame,
+ func_base: None,
+ address_size: self.address_size,
+ section: section.section(),
+ },
+ }
+ }
+
+ /// > A constant that gives the number of bytes of the CIE structure, not
+ /// > including the length field itself (see Section 7.2.2). The size of the
+ /// > length field plus the value of length must be an integral multiple of
+ /// > the address size.
+ pub fn entry_len(&self) -> R::Offset {
+ self.length
+ }
+
+ /// > A version number (see Section 7.23). This number is specific to the
+ /// > call frame information and is independent of the DWARF version number.
+ pub fn version(&self) -> u8 {
+ self.version
+ }
+
+ /// Get the augmentation data, if any exists.
+ ///
+ /// The only augmentation understood by `gimli` is that which is defined by
+ /// `.eh_frame`.
+ pub fn augmentation(&self) -> Option<&Augmentation> {
+ self.augmentation.as_ref()
+ }
+
+ /// True if this CIE's FDEs have a LSDA.
+ pub fn has_lsda(&self) -> bool {
+ self.augmentation.map_or(false, |a| a.lsda.is_some())
+ }
+
+ /// Return the encoding of the LSDA address for this CIE's FDEs.
+ pub fn lsda_encoding(&self) -> Option<constants::DwEhPe> {
+ self.augmentation.and_then(|a| a.lsda)
+ }
+
+ /// Return the encoding and address of the personality routine handler
+ /// for this CIE's FDEs.
+ pub fn personality_with_encoding(&self) -> Option<(constants::DwEhPe, Pointer)> {
+ self.augmentation.as_ref().and_then(|a| a.personality)
+ }
+
+ /// Return the address of the personality routine handler
+ /// for this CIE's FDEs.
+ pub fn personality(&self) -> Option<Pointer> {
+ self.augmentation
+ .as_ref()
+ .and_then(|a| a.personality)
+ .map(|(_, p)| p)
+ }
+
+ /// Return the encoding of the addresses for this CIE's FDEs.
+ pub fn fde_address_encoding(&self) -> Option<constants::DwEhPe> {
+ self.augmentation.and_then(|a| a.fde_address_encoding)
+ }
+
+ /// True if this CIE's FDEs are trampolines for signal handlers.
+ pub fn is_signal_trampoline(&self) -> bool {
+ self.augmentation.map_or(false, |a| a.is_signal_trampoline)
+ }
+
+ /// > A constant that is factored out of all advance location instructions
+ /// > (see Section 6.4.2.1).
+ pub fn code_alignment_factor(&self) -> u64 {
+ self.code_alignment_factor
+ }
+
+ /// > A constant that is factored out of certain offset instructions (see
+ /// > below). The resulting value is (operand * data_alignment_factor).
+ pub fn data_alignment_factor(&self) -> i64 {
+ self.data_alignment_factor
+ }
+
+ /// > An unsigned ... constant that indicates which column in the rule
+ /// > table represents the return address of the function. Note that this
+ /// > column might not correspond to an actual machine register.
+ pub fn return_address_register(&self) -> Register {
+ self.return_address_register
+ }
+}
+
+/// A partially parsed `FrameDescriptionEntry`.
+///
+/// Fully parsing this FDE requires first parsing its CIE.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct PartialFrameDescriptionEntry<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ offset: R::Offset,
+ length: R::Offset,
+ format: Format,
+ cie_offset: Section::Offset,
+ rest: R,
+ section: Section,
+ bases: &'bases BaseAddresses,
+}
+
+impl<'bases, Section, R> PartialFrameDescriptionEntry<'bases, Section, R>
+where
+ R: Reader,
+ Section: UnwindSection<R>,
+{
+ fn parse_partial(
+ section: &Section,
+ bases: &'bases BaseAddresses,
+ input: &mut R,
+ ) -> Result<PartialFrameDescriptionEntry<'bases, Section, R>> {
+ match parse_cfi_entry(bases, section, input)? {
+ Some(CieOrFde::Cie(_)) => Err(Error::NotFdePointer),
+ Some(CieOrFde::Fde(partial)) => Ok(partial),
+ None => Err(Error::NoEntryAtGivenOffset),
+ }
+ }
+
+ /// Fully parse this FDE.
+ ///
+ /// You must provide a function get its associated CIE (either by parsing it
+ /// on demand, or looking it up in some table mapping offsets to CIEs that
+ /// you've already parsed, etc.)
+ pub fn parse<F>(&self, get_cie: F) -> Result<FrameDescriptionEntry<R>>
+ where
+ F: FnMut(&Section, &BaseAddresses, Section::Offset) -> Result<CommonInformationEntry<R>>,
+ {
+ FrameDescriptionEntry::parse_rest(
+ self.offset,
+ self.length,
+ self.format,
+ self.cie_offset,
+ self.rest.clone(),
+ &self.section,
+ self.bases,
+ get_cie,
+ )
+ }
+}
+
+/// A `FrameDescriptionEntry` is a set of CFA instructions for an address range.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct FrameDescriptionEntry<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// The start of this entry within its containing section.
+ offset: Offset,
+
+ /// > A constant that gives the number of bytes of the header and
+ /// > instruction stream for this function, not including the length field
+ /// > itself (see Section 7.2.2). The size of the length field plus the value
+ /// > of length must be an integral multiple of the address size.
+ length: Offset,
+
+ format: Format,
+
+ /// "A constant offset into the .debug_frame section that denotes the CIE
+ /// that is associated with this FDE."
+ ///
+ /// This is the CIE at that offset.
+ cie: CommonInformationEntry<R, Offset>,
+
+ /// > The address of the first location associated with this table entry. If
+ /// > the segment_size field of this FDE's CIE is non-zero, the initial
+ /// > location is preceded by a segment selector of the given length.
+ initial_segment: u64,
+ initial_address: u64,
+
+ /// "The number of bytes of program instructions described by this entry."
+ address_range: u64,
+
+ /// The parsed augmentation data, if we have any.
+ augmentation: Option<AugmentationData>,
+
+ /// "A sequence of table defining instructions that are described below."
+ ///
+ /// This is followed by `DW_CFA_nop` padding until `length` bytes of the
+ /// input are consumed.
+ instructions: R,
+}
+
+impl<R: Reader> FrameDescriptionEntry<R> {
+ #[allow(clippy::too_many_arguments)]
+ fn parse_rest<Section, F>(
+ offset: R::Offset,
+ length: R::Offset,
+ format: Format,
+ cie_pointer: Section::Offset,
+ mut rest: R,
+ section: &Section,
+ bases: &BaseAddresses,
+ mut get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ Section: UnwindSection<R>,
+ F: FnMut(&Section, &BaseAddresses, Section::Offset) -> Result<CommonInformationEntry<R>>,
+ {
+ let cie = get_cie(section, bases, cie_pointer)?;
+
+ let initial_segment = if cie.segment_size > 0 {
+ rest.read_address(cie.segment_size)?
+ } else {
+ 0
+ };
+
+ let mut parameters = PointerEncodingParameters {
+ bases: &bases.eh_frame,
+ func_base: None,
+ address_size: cie.address_size,
+ section: section.section(),
+ };
+
+ let (initial_address, address_range) = Self::parse_addresses(&mut rest, &cie, &parameters)?;
+ parameters.func_base = Some(initial_address);
+
+ let aug_data = if let Some(ref augmentation) = cie.augmentation {
+ Some(AugmentationData::parse(
+ augmentation,
+ &parameters,
+ &mut rest,
+ )?)
+ } else {
+ None
+ };
+
+ let entry = FrameDescriptionEntry {
+ offset,
+ length,
+ format,
+ cie,
+ initial_segment,
+ initial_address,
+ address_range,
+ augmentation: aug_data,
+ instructions: rest,
+ };
+
+ Ok(entry)
+ }
+
+ fn parse_addresses(
+ input: &mut R,
+ cie: &CommonInformationEntry<R>,
+ parameters: &PointerEncodingParameters<R>,
+ ) -> Result<(u64, u64)> {
+ let encoding = cie.augmentation().and_then(|a| a.fde_address_encoding);
+ if let Some(encoding) = encoding {
+ let initial_address = parse_encoded_pointer(encoding, parameters, input)?;
+
+ // Ignore indirection.
+ let initial_address = initial_address.into();
+
+ // Address ranges cannot be relative to anything, so just grab the
+ // data format bits from the encoding.
+ let address_range = parse_encoded_pointer(encoding.format(), parameters, input)?;
+ Ok((initial_address, address_range.into()))
+ } else {
+ let initial_address = input.read_address(cie.address_size)?;
+ let address_range = input.read_address(cie.address_size)?;
+ Ok((initial_address, address_range))
+ }
+ }
+
+ /// Return the table of unwind information for this FDE.
+ #[inline]
+ pub fn rows<'a, 'ctx, Section: UnwindSection<R>, A: UnwindContextStorage<R>>(
+ &self,
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ ) -> Result<UnwindTable<'a, 'ctx, R, A>> {
+ UnwindTable::new(section, bases, ctx, self)
+ }
+
+ /// Find the frame unwind information for the given address.
+ ///
+ /// If found, the unwind information is returned along with the reset
+ /// context in the form `Ok((unwind_info, context))`. If not found,
+ /// `Err(gimli::Error::NoUnwindInfoForAddress)` is returned. If parsing or
+ /// CFI evaluation fails, the error is returned.
+ pub fn unwind_info_for_address<'ctx, Section: UnwindSection<R>, A: UnwindContextStorage<R>>(
+ &self,
+ section: &Section,
+ bases: &BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ address: u64,
+ ) -> Result<&'ctx UnwindTableRow<R, A>> {
+ let mut table = self.rows(section, bases, ctx)?;
+ while let Some(row) = table.next_row()? {
+ if row.contains(address) {
+ return Ok(table.ctx.row());
+ }
+ }
+ Err(Error::NoUnwindInfoForAddress)
+ }
+}
+
+/// # Signal Safe Methods
+///
+/// These methods are guaranteed not to allocate, acquire locks, or perform any
+/// other signal-unsafe operations.
+#[allow(clippy::len_without_is_empty)]
+impl<R: Reader> FrameDescriptionEntry<R> {
+ /// Get the offset of this entry from the start of its containing section.
+ pub fn offset(&self) -> R::Offset {
+ self.offset
+ }
+
+ /// Get a reference to this FDE's CIE.
+ pub fn cie(&self) -> &CommonInformationEntry<R> {
+ &self.cie
+ }
+
+ /// > A constant that gives the number of bytes of the header and
+ /// > instruction stream for this function, not including the length field
+ /// > itself (see Section 7.2.2). The size of the length field plus the value
+ /// > of length must be an integral multiple of the address size.
+ pub fn entry_len(&self) -> R::Offset {
+ self.length
+ }
+
+ /// Iterate over this FDE's instructions.
+ ///
+ /// Will not include the CIE's initial instructions, if you want those do
+ /// `fde.cie().instructions()` first.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn instructions<'a, Section>(
+ &self,
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ) -> CallFrameInstructionIter<'a, R>
+ where
+ Section: UnwindSection<R>,
+ {
+ CallFrameInstructionIter {
+ input: self.instructions.clone(),
+ address_encoding: self.cie.augmentation().and_then(|a| a.fde_address_encoding),
+ parameters: PointerEncodingParameters {
+ bases: &bases.eh_frame,
+ func_base: None,
+ address_size: self.cie.address_size,
+ section: section.section(),
+ },
+ }
+ }
+
+ /// The first address for which this entry has unwind information for.
+ pub fn initial_address(&self) -> u64 {
+ self.initial_address
+ }
+
+ /// The number of bytes of instructions that this entry has unwind
+ /// information for.
+ pub fn len(&self) -> u64 {
+ self.address_range
+ }
+
+ /// Return `true` if the given address is within this FDE, `false`
+ /// otherwise.
+ ///
+ /// This is equivalent to `entry.initial_address() <= address <
+ /// entry.initial_address() + entry.len()`.
+ pub fn contains(&self, address: u64) -> bool {
+ let start = self.initial_address();
+ let end = start + self.len();
+ start <= address && address < end
+ }
+
+ /// The address of this FDE's language-specific data area (LSDA), if it has
+ /// any.
+ pub fn lsda(&self) -> Option<Pointer> {
+ self.augmentation.as_ref().and_then(|a| a.lsda)
+ }
+
+ /// Return true if this FDE's function is a trampoline for a signal handler.
+ #[inline]
+ pub fn is_signal_trampoline(&self) -> bool {
+ self.cie().is_signal_trampoline()
+ }
+
+ /// Return the address of the FDE's function's personality routine
+ /// handler. The personality routine does language-specific clean up when
+ /// unwinding the stack frames with the intent to not run them again.
+ #[inline]
+ pub fn personality(&self) -> Option<Pointer> {
+ self.cie().personality()
+ }
+}
+
+/// Specification of what storage should be used for [`UnwindContext`].
+///
+#[cfg_attr(
+ feature = "read",
+ doc = "
+Normally you would only need to use [`StoreOnHeap`], which places the stack
+on the heap using [`Vec`]. This is the default storage type parameter for [`UnwindContext`].
+"
+)]
+///
+/// If you need to avoid [`UnwindContext`] from allocating memory, e.g. for signal safety,
+/// you can provide you own storage specification:
+/// ```rust,no_run
+/// # use gimli::*;
+/// #
+/// # fn foo<'a>(some_fde: gimli::FrameDescriptionEntry<gimli::EndianSlice<'a, gimli::LittleEndian>>)
+/// # -> gimli::Result<()> {
+/// # let eh_frame: gimli::EhFrame<_> = unreachable!();
+/// # let bases = unimplemented!();
+/// #
+/// struct StoreOnStack;
+///
+/// impl<R: Reader> UnwindContextStorage<R> for StoreOnStack {
+/// type Rules = [(Register, RegisterRule<R>); 192];
+/// type Stack = [UnwindTableRow<R, Self>; 4];
+/// }
+///
+/// let mut ctx = UnwindContext::<_, StoreOnStack>::new_in();
+///
+/// // Initialize the context by evaluating the CIE's initial instruction program,
+/// // and generate the unwind table.
+/// let mut table = some_fde.rows(&eh_frame, &bases, &mut ctx)?;
+/// while let Some(row) = table.next_row()? {
+/// // Do stuff with each row...
+/// # let _ = row;
+/// }
+/// # unreachable!()
+/// # }
+/// ```
+pub trait UnwindContextStorage<R: Reader>: Sized {
+ /// The storage used for register rules in a unwind table row.
+ ///
+ /// Note that this is nested within the stack.
+ type Rules: ArrayLike<Item = (Register, RegisterRule<R>)>;
+
+ /// The storage used for unwind table row stack.
+ type Stack: ArrayLike<Item = UnwindTableRow<R, Self>>;
+}
+
+#[cfg(feature = "read")]
+const MAX_RULES: usize = 192;
+
+#[cfg(feature = "read")]
+impl<R: Reader> UnwindContextStorage<R> for StoreOnHeap {
+ type Rules = [(Register, RegisterRule<R>); MAX_RULES];
+ type Stack = Vec<UnwindTableRow<R, Self>>;
+}
+
+/// Common context needed when evaluating the call frame unwinding information.
+///
+/// This structure can be large so it is advisable to place it on the heap.
+/// To avoid re-allocating the context multiple times when evaluating multiple
+/// CFI programs, it can be reused.
+///
+/// ```
+/// use gimli::{UnwindContext, UnwindTable};
+///
+/// # fn foo<'a>(some_fde: gimli::FrameDescriptionEntry<gimli::EndianSlice<'a, gimli::LittleEndian>>)
+/// # -> gimli::Result<()> {
+/// # let eh_frame: gimli::EhFrame<_> = unreachable!();
+/// # let bases = unimplemented!();
+/// // An uninitialized context.
+/// let mut ctx = Box::new(UnwindContext::new());
+///
+/// // Initialize the context by evaluating the CIE's initial instruction program,
+/// // and generate the unwind table.
+/// let mut table = some_fde.rows(&eh_frame, &bases, &mut ctx)?;
+/// while let Some(row) = table.next_row()? {
+/// // Do stuff with each row...
+/// # let _ = row;
+/// }
+/// # unreachable!()
+/// # }
+/// ```
+#[derive(Clone, PartialEq, Eq)]
+pub struct UnwindContext<R: Reader, A: UnwindContextStorage<R> = StoreOnHeap> {
+ // Stack of rows. The last row is the row currently being built by the
+ // program. There is always at least one row. The vast majority of CFI
+ // programs will only ever have one row on the stack.
+ stack: ArrayVec<A::Stack>,
+
+ // If we are evaluating an FDE's instructions, then `is_initialized` will be
+ // `true`. If `initial_rule` is `Some`, then the initial register rules are either
+ // all default rules or have just 1 non-default rule, stored in `initial_rule`.
+ // If it's `None`, `stack[0]` will contain the initial register rules
+ // described by the CIE's initial instructions. These rules are used by
+ // `DW_CFA_restore`. Otherwise, when we are currently evaluating a CIE's
+ // initial instructions, `is_initialized` will be `false` and initial rules
+ // cannot be read.
+ initial_rule: Option<(Register, RegisterRule<R>)>,
+
+ is_initialized: bool,
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Debug for UnwindContext<R, S> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("UnwindContext")
+ .field("stack", &self.stack)
+ .field("initial_rule", &self.initial_rule)
+ .field("is_initialized", &self.is_initialized)
+ .finish()
+ }
+}
+
+impl<R: Reader, A: UnwindContextStorage<R>> Default for UnwindContext<R, A> {
+ fn default() -> Self {
+ Self::new_in()
+ }
+}
+
+#[cfg(feature = "read")]
+impl<R: Reader> UnwindContext<R> {
+ /// Construct a new call frame unwinding context.
+ pub fn new() -> Self {
+ Self::new_in()
+ }
+}
+
+/// # Signal Safe Methods
+///
+/// These methods are guaranteed not to allocate, acquire locks, or perform any
+/// other signal-unsafe operations, if an non-allocating storage is used.
+impl<R: Reader, A: UnwindContextStorage<R>> UnwindContext<R, A> {
+ /// Construct a new call frame unwinding context.
+ pub fn new_in() -> Self {
+ let mut ctx = UnwindContext {
+ stack: Default::default(),
+ initial_rule: None,
+ is_initialized: false,
+ };
+ ctx.reset();
+ ctx
+ }
+
+ /// Run the CIE's initial instructions and initialize this `UnwindContext`.
+ fn initialize<Section: UnwindSection<R>>(
+ &mut self,
+ section: &Section,
+ bases: &BaseAddresses,
+ cie: &CommonInformationEntry<R>,
+ ) -> Result<()> {
+ if self.is_initialized {
+ self.reset();
+ }
+
+ let mut table = UnwindTable::new_for_cie(section, bases, self, cie);
+ while let Some(_) = table.next_row()? {}
+
+ self.save_initial_rules()?;
+ Ok(())
+ }
+
+ fn reset(&mut self) {
+ self.stack.clear();
+ self.stack.try_push(UnwindTableRow::default()).unwrap();
+ debug_assert!(self.stack[0].is_default());
+ self.initial_rule = None;
+ self.is_initialized = false;
+ }
+
+ fn row(&self) -> &UnwindTableRow<R, A> {
+ self.stack.last().unwrap()
+ }
+
+ fn row_mut(&mut self) -> &mut UnwindTableRow<R, A> {
+ self.stack.last_mut().unwrap()
+ }
+
+ fn save_initial_rules(&mut self) -> Result<()> {
+ assert_eq!(self.is_initialized, false);
+ self.initial_rule = match *self.stack.last().unwrap().registers.rules {
+ // All rules are default (undefined). In this case just synthesize
+ // an undefined rule.
+ [] => Some((Register(0), RegisterRule::Undefined)),
+ [ref rule] => Some(rule.clone()),
+ _ => {
+ let rules = self.stack.last().unwrap().clone();
+ self.stack
+ .try_insert(0, rules)
+ .map_err(|_| Error::StackFull)?;
+ None
+ }
+ };
+ self.is_initialized = true;
+ Ok(())
+ }
+
+ fn start_address(&self) -> u64 {
+ self.row().start_address
+ }
+
+ fn set_start_address(&mut self, start_address: u64) {
+ let row = self.row_mut();
+ row.start_address = start_address;
+ }
+
+ fn set_register_rule(&mut self, register: Register, rule: RegisterRule<R>) -> Result<()> {
+ let row = self.row_mut();
+ row.registers.set(register, rule)
+ }
+
+ /// Returns `None` if we have not completed evaluation of a CIE's initial
+ /// instructions.
+ fn get_initial_rule(&self, register: Register) -> Option<RegisterRule<R>> {
+ if !self.is_initialized {
+ return None;
+ }
+ Some(match self.initial_rule {
+ None => self.stack[0].registers.get(register),
+ Some((r, ref rule)) if r == register => rule.clone(),
+ _ => RegisterRule::Undefined,
+ })
+ }
+
+ fn set_cfa(&mut self, cfa: CfaRule<R>) {
+ self.row_mut().cfa = cfa;
+ }
+
+ fn cfa_mut(&mut self) -> &mut CfaRule<R> {
+ &mut self.row_mut().cfa
+ }
+
+ fn push_row(&mut self) -> Result<()> {
+ let new_row = self.row().clone();
+ self.stack.try_push(new_row).map_err(|_| Error::StackFull)
+ }
+
+ fn pop_row(&mut self) -> Result<()> {
+ let min_size = if self.is_initialized && self.initial_rule.is_none() {
+ 2
+ } else {
+ 1
+ };
+ if self.stack.len() <= min_size {
+ return Err(Error::PopWithEmptyStack);
+ }
+ self.stack.pop().unwrap();
+ Ok(())
+ }
+}
+
+/// The `UnwindTable` iteratively evaluates a `FrameDescriptionEntry`'s
+/// `CallFrameInstruction` program, yielding the each row one at a time.
+///
+/// > 6.4.1 Structure of Call Frame Information
+/// >
+/// > DWARF supports virtual unwinding by defining an architecture independent
+/// > basis for recording how procedures save and restore registers during their
+/// > lifetimes. This basis must be augmented on some machines with specific
+/// > information that is defined by an architecture specific ABI authoring
+/// > committee, a hardware vendor, or a compiler producer. The body defining a
+/// > specific augmentation is referred to below as the “augmenter.”
+/// >
+/// > Abstractly, this mechanism describes a very large table that has the
+/// > following structure:
+/// >
+/// > <table>
+/// > <tr>
+/// > <th>LOC</th><th>CFA</th><th>R0</th><th>R1</th><td>...</td><th>RN</th>
+/// > </tr>
+/// > <tr>
+/// > <th>L0</th> <td></td> <td></td> <td></td> <td></td> <td></td>
+/// > </tr>
+/// > <tr>
+/// > <th>L1</th> <td></td> <td></td> <td></td> <td></td> <td></td>
+/// > </tr>
+/// > <tr>
+/// > <td>...</td><td></td> <td></td> <td></td> <td></td> <td></td>
+/// > </tr>
+/// > <tr>
+/// > <th>LN</th> <td></td> <td></td> <td></td> <td></td> <td></td>
+/// > </tr>
+/// > </table>
+/// >
+/// > The first column indicates an address for every location that contains code
+/// > in a program. (In shared objects, this is an object-relative offset.) The
+/// > remaining columns contain virtual unwinding rules that are associated with
+/// > the indicated location.
+/// >
+/// > The CFA column defines the rule which computes the Canonical Frame Address
+/// > value; it may be either a register and a signed offset that are added
+/// > together, or a DWARF expression that is evaluated.
+/// >
+/// > The remaining columns are labeled by register number. This includes some
+/// > registers that have special designation on some architectures such as the PC
+/// > and the stack pointer register. (The actual mapping of registers for a
+/// > particular architecture is defined by the augmenter.) The register columns
+/// > contain rules that describe whether a given register has been saved and the
+/// > rule to find the value for the register in the previous frame.
+/// >
+/// > ...
+/// >
+/// > This table would be extremely large if actually constructed as
+/// > described. Most of the entries at any point in the table are identical to
+/// > the ones above them. The whole table can be represented quite compactly by
+/// > recording just the differences starting at the beginning address of each
+/// > subroutine in the program.
+#[derive(Debug)]
+pub struct UnwindTable<'a, 'ctx, R: Reader, A: UnwindContextStorage<R> = StoreOnHeap> {
+ code_alignment_factor: Wrapping<u64>,
+ data_alignment_factor: Wrapping<i64>,
+ next_start_address: u64,
+ last_end_address: u64,
+ returned_last_row: bool,
+ current_row_valid: bool,
+ instructions: CallFrameInstructionIter<'a, R>,
+ ctx: &'ctx mut UnwindContext<R, A>,
+}
+
+/// # Signal Safe Methods
+///
+/// These methods are guaranteed not to allocate, acquire locks, or perform any
+/// other signal-unsafe operations.
+impl<'a, 'ctx, R: Reader, A: UnwindContextStorage<R>> UnwindTable<'a, 'ctx, R, A> {
+ /// Construct a new `UnwindTable` for the given
+ /// `FrameDescriptionEntry`'s CFI unwinding program.
+ pub fn new<Section: UnwindSection<R>>(
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ fde: &FrameDescriptionEntry<R>,
+ ) -> Result<Self> {
+ ctx.initialize(section, bases, fde.cie())?;
+ Ok(Self::new_for_fde(section, bases, ctx, fde))
+ }
+
+ fn new_for_fde<Section: UnwindSection<R>>(
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ fde: &FrameDescriptionEntry<R>,
+ ) -> Self {
+ assert!(ctx.stack.len() >= 1);
+ UnwindTable {
+ code_alignment_factor: Wrapping(fde.cie().code_alignment_factor()),
+ data_alignment_factor: Wrapping(fde.cie().data_alignment_factor()),
+ next_start_address: fde.initial_address(),
+ last_end_address: fde.initial_address().wrapping_add(fde.len()),
+ returned_last_row: false,
+ current_row_valid: false,
+ instructions: fde.instructions(section, bases),
+ ctx,
+ }
+ }
+
+ fn new_for_cie<Section: UnwindSection<R>>(
+ section: &'a Section,
+ bases: &'a BaseAddresses,
+ ctx: &'ctx mut UnwindContext<R, A>,
+ cie: &CommonInformationEntry<R>,
+ ) -> Self {
+ assert!(ctx.stack.len() >= 1);
+ UnwindTable {
+ code_alignment_factor: Wrapping(cie.code_alignment_factor()),
+ data_alignment_factor: Wrapping(cie.data_alignment_factor()),
+ next_start_address: 0,
+ last_end_address: 0,
+ returned_last_row: false,
+ current_row_valid: false,
+ instructions: cie.instructions(section, bases),
+ ctx,
+ }
+ }
+
+ /// Evaluate call frame instructions until the next row of the table is
+ /// completed, and return it.
+ ///
+ /// Unfortunately, this cannot be used with `FallibleIterator` because of
+ /// the restricted lifetime of the yielded item.
+ pub fn next_row(&mut self) -> Result<Option<&UnwindTableRow<R, A>>> {
+ assert!(self.ctx.stack.len() >= 1);
+ self.ctx.set_start_address(self.next_start_address);
+ self.current_row_valid = false;
+
+ loop {
+ match self.instructions.next() {
+ Err(e) => return Err(e),
+
+ Ok(None) => {
+ if self.returned_last_row {
+ return Ok(None);
+ }
+
+ let row = self.ctx.row_mut();
+ row.end_address = self.last_end_address;
+
+ self.returned_last_row = true;
+ self.current_row_valid = true;
+ return Ok(Some(row));
+ }
+
+ Ok(Some(instruction)) => {
+ if self.evaluate(instruction)? {
+ self.current_row_valid = true;
+ return Ok(Some(self.ctx.row()));
+ }
+ }
+ };
+ }
+ }
+
+ /// Returns the current row with the lifetime of the context.
+ pub fn into_current_row(self) -> Option<&'ctx UnwindTableRow<R, A>> {
+ if self.current_row_valid {
+ Some(self.ctx.row())
+ } else {
+ None
+ }
+ }
+
+ /// Evaluate one call frame instruction. Return `Ok(true)` if the row is
+ /// complete, `Ok(false)` otherwise.
+ fn evaluate(&mut self, instruction: CallFrameInstruction<R>) -> Result<bool> {
+ use crate::CallFrameInstruction::*;
+
+ match instruction {
+ // Instructions that complete the current row and advance the
+ // address for the next row.
+ SetLoc { address } => {
+ if address < self.ctx.start_address() {
+ return Err(Error::InvalidAddressRange);
+ }
+
+ self.next_start_address = address;
+ self.ctx.row_mut().end_address = self.next_start_address;
+ return Ok(true);
+ }
+ AdvanceLoc { delta } => {
+ let delta = Wrapping(u64::from(delta)) * self.code_alignment_factor;
+ self.next_start_address = (Wrapping(self.ctx.start_address()) + delta).0;
+ self.ctx.row_mut().end_address = self.next_start_address;
+ return Ok(true);
+ }
+
+ // Instructions that modify the CFA.
+ DefCfa { register, offset } => {
+ self.ctx.set_cfa(CfaRule::RegisterAndOffset {
+ register,
+ offset: offset as i64,
+ });
+ }
+ DefCfaSf {
+ register,
+ factored_offset,
+ } => {
+ let data_align = self.data_alignment_factor;
+ self.ctx.set_cfa(CfaRule::RegisterAndOffset {
+ register,
+ offset: (Wrapping(factored_offset) * data_align).0,
+ });
+ }
+ DefCfaRegister { register } => {
+ if let CfaRule::RegisterAndOffset {
+ register: ref mut reg,
+ ..
+ } = *self.ctx.cfa_mut()
+ {
+ *reg = register;
+ } else {
+ return Err(Error::CfiInstructionInInvalidContext);
+ }
+ }
+ DefCfaOffset { offset } => {
+ if let CfaRule::RegisterAndOffset {
+ offset: ref mut off,
+ ..
+ } = *self.ctx.cfa_mut()
+ {
+ *off = offset as i64;
+ } else {
+ return Err(Error::CfiInstructionInInvalidContext);
+ }
+ }
+ DefCfaOffsetSf { factored_offset } => {
+ if let CfaRule::RegisterAndOffset {
+ offset: ref mut off,
+ ..
+ } = *self.ctx.cfa_mut()
+ {
+ let data_align = self.data_alignment_factor;
+ *off = (Wrapping(factored_offset) * data_align).0;
+ } else {
+ return Err(Error::CfiInstructionInInvalidContext);
+ }
+ }
+ DefCfaExpression { expression } => {
+ self.ctx.set_cfa(CfaRule::Expression(expression));
+ }
+
+ // Instructions that define register rules.
+ Undefined { register } => {
+ self.ctx
+ .set_register_rule(register, RegisterRule::Undefined)?;
+ }
+ SameValue { register } => {
+ self.ctx
+ .set_register_rule(register, RegisterRule::SameValue)?;
+ }
+ Offset {
+ register,
+ factored_offset,
+ } => {
+ let offset = Wrapping(factored_offset as i64) * self.data_alignment_factor;
+ self.ctx
+ .set_register_rule(register, RegisterRule::Offset(offset.0))?;
+ }
+ OffsetExtendedSf {
+ register,
+ factored_offset,
+ } => {
+ let offset = Wrapping(factored_offset) * self.data_alignment_factor;
+ self.ctx
+ .set_register_rule(register, RegisterRule::Offset(offset.0))?;
+ }
+ ValOffset {
+ register,
+ factored_offset,
+ } => {
+ let offset = Wrapping(factored_offset as i64) * self.data_alignment_factor;
+ self.ctx
+ .set_register_rule(register, RegisterRule::ValOffset(offset.0))?;
+ }
+ ValOffsetSf {
+ register,
+ factored_offset,
+ } => {
+ let offset = Wrapping(factored_offset) * self.data_alignment_factor;
+ self.ctx
+ .set_register_rule(register, RegisterRule::ValOffset(offset.0))?;
+ }
+ Register {
+ dest_register,
+ src_register,
+ } => {
+ self.ctx
+ .set_register_rule(dest_register, RegisterRule::Register(src_register))?;
+ }
+ Expression {
+ register,
+ expression,
+ } => {
+ let expression = RegisterRule::Expression(expression);
+ self.ctx.set_register_rule(register, expression)?;
+ }
+ ValExpression {
+ register,
+ expression,
+ } => {
+ let expression = RegisterRule::ValExpression(expression);
+ self.ctx.set_register_rule(register, expression)?;
+ }
+ Restore { register } => {
+ let initial_rule = if let Some(rule) = self.ctx.get_initial_rule(register) {
+ rule
+ } else {
+ // Can't restore the initial rule when we are
+ // evaluating the initial rules!
+ return Err(Error::CfiInstructionInInvalidContext);
+ };
+
+ self.ctx.set_register_rule(register, initial_rule)?;
+ }
+
+ // Row push and pop instructions.
+ RememberState => {
+ self.ctx.push_row()?;
+ }
+ RestoreState => {
+ // Pop state while preserving current location.
+ let start_address = self.ctx.start_address();
+ self.ctx.pop_row()?;
+ self.ctx.set_start_address(start_address);
+ }
+
+ // GNU Extension. Save the size somewhere so the unwinder can use
+ // it when restoring IP
+ ArgsSize { size } => {
+ self.ctx.row_mut().saved_args_size = size;
+ }
+
+ // No operation.
+ Nop => {}
+ };
+
+ Ok(false)
+ }
+}
+
+// We tend to have very few register rules: usually only a couple. Even if we
+// have a rule for every register, on x86-64 with SSE and everything we're
+// talking about ~100 rules. So rather than keeping the rules in a hash map, or
+// a vector indexed by register number (which would lead to filling lots of
+// empty entries), we store them as a vec of (register number, register rule)
+// pairs.
+//
+// Additionally, because every register's default rule is implicitly
+// `RegisterRule::Undefined`, we never store a register's rule in this vec if it
+// is undefined and save a little bit more space and do a little fewer
+// comparisons that way.
+//
+// The maximum number of rules preallocated by libunwind is 97 for AArch64, 128
+// for ARM, and even 188 for MIPS. It is extremely unlikely to encounter this
+// many register rules in practice.
+//
+// See:
+// - https://github.com/libunwind/libunwind/blob/11fd461095ea98f4b3e3a361f5a8a558519363fa/include/tdep-x86_64/dwarf-config.h#L36
+// - https://github.com/libunwind/libunwind/blob/11fd461095ea98f4b3e3a361f5a8a558519363fa/include/tdep-aarch64/dwarf-config.h#L32
+// - https://github.com/libunwind/libunwind/blob/11fd461095ea98f4b3e3a361f5a8a558519363fa/include/tdep-arm/dwarf-config.h#L31
+// - https://github.com/libunwind/libunwind/blob/11fd461095ea98f4b3e3a361f5a8a558519363fa/include/tdep-mips/dwarf-config.h#L31
+struct RegisterRuleMap<R: Reader, S: UnwindContextStorage<R> = StoreOnHeap> {
+ rules: ArrayVec<S::Rules>,
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Debug for RegisterRuleMap<R, S> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("RegisterRuleMap")
+ .field("rules", &self.rules)
+ .finish()
+ }
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Clone for RegisterRuleMap<R, S> {
+ fn clone(&self) -> Self {
+ Self {
+ rules: self.rules.clone(),
+ }
+ }
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Default for RegisterRuleMap<R, S> {
+ fn default() -> Self {
+ RegisterRuleMap {
+ rules: Default::default(),
+ }
+ }
+}
+
+/// # Signal Safe Methods
+///
+/// These methods are guaranteed not to allocate, acquire locks, or perform any
+/// other signal-unsafe operations.
+impl<R: Reader, S: UnwindContextStorage<R>> RegisterRuleMap<R, S> {
+ fn is_default(&self) -> bool {
+ self.rules.is_empty()
+ }
+
+ fn get(&self, register: Register) -> RegisterRule<R> {
+ self.rules
+ .iter()
+ .find(|rule| rule.0 == register)
+ .map(|r| {
+ debug_assert!(r.1.is_defined());
+ r.1.clone()
+ })
+ .unwrap_or(RegisterRule::Undefined)
+ }
+
+ fn set(&mut self, register: Register, rule: RegisterRule<R>) -> Result<()> {
+ if !rule.is_defined() {
+ let idx = self
+ .rules
+ .iter()
+ .enumerate()
+ .find(|&(_, r)| r.0 == register)
+ .map(|(i, _)| i);
+ if let Some(idx) = idx {
+ self.rules.swap_remove(idx);
+ }
+ return Ok(());
+ }
+
+ for &mut (reg, ref mut old_rule) in &mut *self.rules {
+ debug_assert!(old_rule.is_defined());
+ if reg == register {
+ *old_rule = rule;
+ return Ok(());
+ }
+ }
+
+ self.rules
+ .try_push((register, rule))
+ .map_err(|_| Error::TooManyRegisterRules)
+ }
+
+ fn iter(&self) -> RegisterRuleIter<R> {
+ RegisterRuleIter(self.rules.iter())
+ }
+}
+
+impl<'a, R, S: UnwindContextStorage<R>> FromIterator<&'a (Register, RegisterRule<R>)>
+ for RegisterRuleMap<R, S>
+where
+ R: 'a + Reader,
+{
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = &'a (Register, RegisterRule<R>)>,
+ {
+ let iter = iter.into_iter();
+ let mut rules = RegisterRuleMap::default();
+ for &(reg, ref rule) in iter.filter(|r| r.1.is_defined()) {
+ rules.set(reg, rule.clone()).expect(
+ "This is only used in tests, impl isn't exposed publicly.
+ If you trip this, fix your test",
+ );
+ }
+ rules
+ }
+}
+
+impl<R, S: UnwindContextStorage<R>> PartialEq for RegisterRuleMap<R, S>
+where
+ R: Reader + PartialEq,
+{
+ fn eq(&self, rhs: &Self) -> bool {
+ for &(reg, ref rule) in &*self.rules {
+ debug_assert!(rule.is_defined());
+ if *rule != rhs.get(reg) {
+ return false;
+ }
+ }
+
+ for &(reg, ref rhs_rule) in &*rhs.rules {
+ debug_assert!(rhs_rule.is_defined());
+ if *rhs_rule != self.get(reg) {
+ return false;
+ }
+ }
+
+ true
+ }
+}
+
+impl<R, S: UnwindContextStorage<R>> Eq for RegisterRuleMap<R, S> where R: Reader + Eq {}
+
+/// An unordered iterator for register rules.
+#[derive(Debug, Clone)]
+pub struct RegisterRuleIter<'iter, R>(::core::slice::Iter<'iter, (Register, RegisterRule<R>)>)
+where
+ R: Reader;
+
+impl<'iter, R: Reader> Iterator for RegisterRuleIter<'iter, R> {
+ type Item = &'iter (Register, RegisterRule<R>);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+/// A row in the virtual unwind table that describes how to find the values of
+/// the registers in the *previous* frame for a range of PC addresses.
+#[derive(PartialEq, Eq)]
+pub struct UnwindTableRow<R: Reader, S: UnwindContextStorage<R> = StoreOnHeap> {
+ start_address: u64,
+ end_address: u64,
+ saved_args_size: u64,
+ cfa: CfaRule<R>,
+ registers: RegisterRuleMap<R, S>,
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Debug for UnwindTableRow<R, S> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("UnwindTableRow")
+ .field("start_address", &self.start_address)
+ .field("end_address", &self.end_address)
+ .field("saved_args_size", &self.saved_args_size)
+ .field("cfa", &self.cfa)
+ .field("registers", &self.registers)
+ .finish()
+ }
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Clone for UnwindTableRow<R, S> {
+ fn clone(&self) -> Self {
+ Self {
+ start_address: self.start_address,
+ end_address: self.end_address,
+ saved_args_size: self.saved_args_size,
+ cfa: self.cfa.clone(),
+ registers: self.registers.clone(),
+ }
+ }
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> Default for UnwindTableRow<R, S> {
+ fn default() -> Self {
+ UnwindTableRow {
+ start_address: 0,
+ end_address: 0,
+ saved_args_size: 0,
+ cfa: Default::default(),
+ registers: Default::default(),
+ }
+ }
+}
+
+impl<R: Reader, S: UnwindContextStorage<R>> UnwindTableRow<R, S> {
+ fn is_default(&self) -> bool {
+ self.start_address == 0
+ && self.end_address == 0
+ && self.cfa.is_default()
+ && self.registers.is_default()
+ }
+
+ /// Get the starting PC address that this row applies to.
+ pub fn start_address(&self) -> u64 {
+ self.start_address
+ }
+
+ /// Get the end PC address where this row's register rules become
+ /// unapplicable.
+ ///
+ /// In other words, this row describes how to recover the last frame's
+ /// registers for all PCs where `row.start_address() <= PC <
+ /// row.end_address()`. This row does NOT describe how to recover registers
+ /// when `PC == row.end_address()`.
+ pub fn end_address(&self) -> u64 {
+ self.end_address
+ }
+
+ /// Return `true` if the given `address` is within this row's address range,
+ /// `false` otherwise.
+ pub fn contains(&self, address: u64) -> bool {
+ self.start_address <= address && address < self.end_address
+ }
+
+ /// Returns the amount of args currently on the stack.
+ ///
+ /// When unwinding, if the personality function requested a change in IP,
+ /// the SP needs to be adjusted by saved_args_size.
+ pub fn saved_args_size(&self) -> u64 {
+ self.saved_args_size
+ }
+
+ /// Get the canonical frame address (CFA) recovery rule for this row.
+ pub fn cfa(&self) -> &CfaRule<R> {
+ &self.cfa
+ }
+
+ /// Get the register recovery rule for the given register number.
+ ///
+ /// The register number mapping is architecture dependent. For example, in
+ /// the x86-64 ABI the register number mapping is defined in Figure 3.36:
+ ///
+ /// > Figure 3.36: DWARF Register Number Mapping
+ /// >
+ /// > <table>
+ /// > <tr><th>Register Name</th> <th>Number</th> <th>Abbreviation</th></tr>
+ /// > <tr><td>General Purpose Register RAX</td> <td>0</td> <td>%rax</td></tr>
+ /// > <tr><td>General Purpose Register RDX</td> <td>1</td> <td>%rdx</td></tr>
+ /// > <tr><td>General Purpose Register RCX</td> <td>2</td> <td>%rcx</td></tr>
+ /// > <tr><td>General Purpose Register RBX</td> <td>3</td> <td>%rbx</td></tr>
+ /// > <tr><td>General Purpose Register RSI</td> <td>4</td> <td>%rsi</td></tr>
+ /// > <tr><td>General Purpose Register RDI</td> <td>5</td> <td>%rdi</td></tr>
+ /// > <tr><td>General Purpose Register RBP</td> <td>6</td> <td>%rbp</td></tr>
+ /// > <tr><td>Stack Pointer Register RSP</td> <td>7</td> <td>%rsp</td></tr>
+ /// > <tr><td>Extended Integer Registers 8-15</td> <td>8-15</td> <td>%r8-%r15</td></tr>
+ /// > <tr><td>Return Address RA</td> <td>16</td> <td></td></tr>
+ /// > <tr><td>Vector Registers 0–7</td> <td>17-24</td> <td>%xmm0–%xmm7</td></tr>
+ /// > <tr><td>Extended Vector Registers 8–15</td> <td>25-32</td> <td>%xmm8–%xmm15</td></tr>
+ /// > <tr><td>Floating Point Registers 0–7</td> <td>33-40</td> <td>%st0–%st7</td></tr>
+ /// > <tr><td>MMX Registers 0–7</td> <td>41-48</td> <td>%mm0–%mm7</td></tr>
+ /// > <tr><td>Flag Register</td> <td>49</td> <td>%rFLAGS</td></tr>
+ /// > <tr><td>Segment Register ES</td> <td>50</td> <td>%es</td></tr>
+ /// > <tr><td>Segment Register CS</td> <td>51</td> <td>%cs</td></tr>
+ /// > <tr><td>Segment Register SS</td> <td>52</td> <td>%ss</td></tr>
+ /// > <tr><td>Segment Register DS</td> <td>53</td> <td>%ds</td></tr>
+ /// > <tr><td>Segment Register FS</td> <td>54</td> <td>%fs</td></tr>
+ /// > <tr><td>Segment Register GS</td> <td>55</td> <td>%gs</td></tr>
+ /// > <tr><td>Reserved</td> <td>56-57</td> <td></td></tr>
+ /// > <tr><td>FS Base address</td> <td>58</td> <td>%fs.base</td></tr>
+ /// > <tr><td>GS Base address</td> <td>59</td> <td>%gs.base</td></tr>
+ /// > <tr><td>Reserved</td> <td>60-61</td> <td></td></tr>
+ /// > <tr><td>Task Register</td> <td>62</td> <td>%tr</td></tr>
+ /// > <tr><td>LDT Register</td> <td>63</td> <td>%ldtr</td></tr>
+ /// > <tr><td>128-bit Media Control and Status</td> <td>64</td> <td>%mxcsr</td></tr>
+ /// > <tr><td>x87 Control Word</td> <td>65</td> <td>%fcw</td></tr>
+ /// > <tr><td>x87 Status Word</td> <td>66</td> <td>%fsw</td></tr>
+ /// > <tr><td>Upper Vector Registers 16–31</td> <td>67-82</td> <td>%xmm16–%xmm31</td></tr>
+ /// > <tr><td>Reserved</td> <td>83-117</td> <td></td></tr>
+ /// > <tr><td>Vector Mask Registers 0–7</td> <td>118-125</td> <td>%k0–%k7</td></tr>
+ /// > <tr><td>Reserved</td> <td>126-129</td> <td></td></tr>
+ /// > </table>
+ pub fn register(&self, register: Register) -> RegisterRule<R> {
+ self.registers.get(register)
+ }
+
+ /// Iterate over all defined register `(number, rule)` pairs.
+ ///
+ /// The rules are not iterated in any guaranteed order. Any register that
+ /// does not make an appearance in the iterator implicitly has the rule
+ /// `RegisterRule::Undefined`.
+ ///
+ /// ```
+ /// # use gimli::{EndianSlice, LittleEndian, UnwindTableRow};
+ /// # fn foo<'input>(unwind_table_row: UnwindTableRow<EndianSlice<'input, LittleEndian>>) {
+ /// for &(register, ref rule) in unwind_table_row.registers() {
+ /// // ...
+ /// # drop(register); drop(rule);
+ /// }
+ /// # }
+ /// ```
+ pub fn registers(&self) -> RegisterRuleIter<R> {
+ self.registers.iter()
+ }
+}
+
+/// The canonical frame address (CFA) recovery rules.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CfaRule<R: Reader> {
+ /// The CFA is given offset from the given register's value.
+ RegisterAndOffset {
+ /// The register containing the base value.
+ register: Register,
+ /// The offset from the register's base value.
+ offset: i64,
+ },
+ /// The CFA is obtained by evaluating this `Reader` as a DWARF expression
+ /// program.
+ Expression(Expression<R>),
+}
+
+impl<R: Reader> Default for CfaRule<R> {
+ fn default() -> Self {
+ CfaRule::RegisterAndOffset {
+ register: Register(0),
+ offset: 0,
+ }
+ }
+}
+
+impl<R: Reader> CfaRule<R> {
+ fn is_default(&self) -> bool {
+ match *self {
+ CfaRule::RegisterAndOffset { register, offset } => {
+ register == Register(0) && offset == 0
+ }
+ _ => false,
+ }
+ }
+}
+
+/// An entry in the abstract CFI table that describes how to find the value of a
+/// register.
+///
+/// "The register columns contain rules that describe whether a given register
+/// has been saved and the rule to find the value for the register in the
+/// previous frame."
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum RegisterRule<R: Reader> {
+ /// > A register that has this rule has no recoverable value in the previous
+ /// > frame. (By convention, it is not preserved by a callee.)
+ Undefined,
+
+ /// > This register has not been modified from the previous frame. (By
+ /// > convention, it is preserved by the callee, but the callee has not
+ /// > modified it.)
+ SameValue,
+
+ /// "The previous value of this register is saved at the address CFA+N where
+ /// CFA is the current CFA value and N is a signed offset."
+ Offset(i64),
+
+ /// "The previous value of this register is the value CFA+N where CFA is the
+ /// current CFA value and N is a signed offset."
+ ValOffset(i64),
+
+ /// "The previous value of this register is stored in another register
+ /// numbered R."
+ Register(Register),
+
+ /// "The previous value of this register is located at the address produced
+ /// by executing the DWARF expression."
+ Expression(Expression<R>),
+
+ /// "The previous value of this register is the value produced by executing
+ /// the DWARF expression."
+ ValExpression(Expression<R>),
+
+ /// "The rule is defined externally to this specification by the augmenter."
+ Architectural,
+}
+
+impl<R: Reader> RegisterRule<R> {
+ fn is_defined(&self) -> bool {
+ match *self {
+ RegisterRule::Undefined => false,
+ _ => true,
+ }
+ }
+}
+
+/// A parsed call frame instruction.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CallFrameInstruction<R: Reader> {
+ // 6.4.2.1 Row Creation Methods
+ /// > 1. DW_CFA_set_loc
+ /// >
+ /// > The DW_CFA_set_loc instruction takes a single operand that represents
+ /// > a target address. The required action is to create a new table row
+ /// > using the specified address as the location. All other values in the
+ /// > new row are initially identical to the current row. The new location
+ /// > value is always greater than the current one. If the segment_size
+ /// > field of this FDE's CIE is non- zero, the initial location is preceded
+ /// > by a segment selector of the given length.
+ SetLoc {
+ /// The target address.
+ address: u64,
+ },
+
+ /// The `AdvanceLoc` instruction is used for all of `DW_CFA_advance_loc` and
+ /// `DW_CFA_advance_loc{1,2,4}`.
+ ///
+ /// > 2. DW_CFA_advance_loc
+ /// >
+ /// > The DW_CFA_advance instruction takes a single operand (encoded with
+ /// > the opcode) that represents a constant delta. The required action is
+ /// > to create a new table row with a location value that is computed by
+ /// > taking the current entry’s location value and adding the value of
+ /// > delta * code_alignment_factor. All other values in the new row are
+ /// > initially identical to the current row.
+ AdvanceLoc {
+ /// The delta to be added to the current address.
+ delta: u32,
+ },
+
+ // 6.4.2.2 CFA Definition Methods
+ /// > 1. DW_CFA_def_cfa
+ /// >
+ /// > The DW_CFA_def_cfa instruction takes two unsigned LEB128 operands
+ /// > representing a register number and a (non-factored) offset. The
+ /// > required action is to define the current CFA rule to use the provided
+ /// > register and offset.
+ DefCfa {
+ /// The target register's number.
+ register: Register,
+ /// The non-factored offset.
+ offset: u64,
+ },
+
+ /// > 2. DW_CFA_def_cfa_sf
+ /// >
+ /// > The DW_CFA_def_cfa_sf instruction takes two operands: an unsigned
+ /// > LEB128 value representing a register number and a signed LEB128
+ /// > factored offset. This instruction is identical to DW_CFA_def_cfa
+ /// > except that the second operand is signed and factored. The resulting
+ /// > offset is factored_offset * data_alignment_factor.
+ DefCfaSf {
+ /// The target register's number.
+ register: Register,
+ /// The factored offset.
+ factored_offset: i64,
+ },
+
+ /// > 3. DW_CFA_def_cfa_register
+ /// >
+ /// > The DW_CFA_def_cfa_register instruction takes a single unsigned LEB128
+ /// > operand representing a register number. The required action is to
+ /// > define the current CFA rule to use the provided register (but to keep
+ /// > the old offset). This operation is valid only if the current CFA rule
+ /// > is defined to use a register and offset.
+ DefCfaRegister {
+ /// The target register's number.
+ register: Register,
+ },
+
+ /// > 4. DW_CFA_def_cfa_offset
+ /// >
+ /// > The DW_CFA_def_cfa_offset instruction takes a single unsigned LEB128
+ /// > operand representing a (non-factored) offset. The required action is
+ /// > to define the current CFA rule to use the provided offset (but to keep
+ /// > the old register). This operation is valid only if the current CFA
+ /// > rule is defined to use a register and offset.
+ DefCfaOffset {
+ /// The non-factored offset.
+ offset: u64,
+ },
+
+ /// > 5. DW_CFA_def_cfa_offset_sf
+ /// >
+ /// > The DW_CFA_def_cfa_offset_sf instruction takes a signed LEB128 operand
+ /// > representing a factored offset. This instruction is identical to
+ /// > DW_CFA_def_cfa_offset except that the operand is signed and
+ /// > factored. The resulting offset is factored_offset *
+ /// > data_alignment_factor. This operation is valid only if the current CFA
+ /// > rule is defined to use a register and offset.
+ DefCfaOffsetSf {
+ /// The factored offset.
+ factored_offset: i64,
+ },
+
+ /// > 6. DW_CFA_def_cfa_expression
+ /// >
+ /// > The DW_CFA_def_cfa_expression instruction takes a single operand
+ /// > encoded as a DW_FORM_exprloc value representing a DWARF
+ /// > expression. The required action is to establish that expression as the
+ /// > means by which the current CFA is computed.
+ DefCfaExpression {
+ /// The DWARF expression.
+ expression: Expression<R>,
+ },
+
+ // 6.4.2.3 Register Rule Instructions
+ /// > 1. DW_CFA_undefined
+ /// >
+ /// > The DW_CFA_undefined instruction takes a single unsigned LEB128
+ /// > operand that represents a register number. The required action is to
+ /// > set the rule for the specified register to “undefined.”
+ Undefined {
+ /// The target register's number.
+ register: Register,
+ },
+
+ /// > 2. DW_CFA_same_value
+ /// >
+ /// > The DW_CFA_same_value instruction takes a single unsigned LEB128
+ /// > operand that represents a register number. The required action is to
+ /// > set the rule for the specified register to “same value.”
+ SameValue {
+ /// The target register's number.
+ register: Register,
+ },
+
+ /// The `Offset` instruction represents both `DW_CFA_offset` and
+ /// `DW_CFA_offset_extended`.
+ ///
+ /// > 3. DW_CFA_offset
+ /// >
+ /// > The DW_CFA_offset instruction takes two operands: a register number
+ /// > (encoded with the opcode) and an unsigned LEB128 constant representing
+ /// > a factored offset. The required action is to change the rule for the
+ /// > register indicated by the register number to be an offset(N) rule
+ /// > where the value of N is factored offset * data_alignment_factor.
+ Offset {
+ /// The target register's number.
+ register: Register,
+ /// The factored offset.
+ factored_offset: u64,
+ },
+
+ /// > 5. DW_CFA_offset_extended_sf
+ /// >
+ /// > The DW_CFA_offset_extended_sf instruction takes two operands: an
+ /// > unsigned LEB128 value representing a register number and a signed
+ /// > LEB128 factored offset. This instruction is identical to
+ /// > DW_CFA_offset_extended except that the second operand is signed and
+ /// > factored. The resulting offset is factored_offset *
+ /// > data_alignment_factor.
+ OffsetExtendedSf {
+ /// The target register's number.
+ register: Register,
+ /// The factored offset.
+ factored_offset: i64,
+ },
+
+ /// > 6. DW_CFA_val_offset
+ /// >
+ /// > The DW_CFA_val_offset instruction takes two unsigned LEB128 operands
+ /// > representing a register number and a factored offset. The required
+ /// > action is to change the rule for the register indicated by the
+ /// > register number to be a val_offset(N) rule where the value of N is
+ /// > factored_offset * data_alignment_factor.
+ ValOffset {
+ /// The target register's number.
+ register: Register,
+ /// The factored offset.
+ factored_offset: u64,
+ },
+
+ /// > 7. DW_CFA_val_offset_sf
+ /// >
+ /// > The DW_CFA_val_offset_sf instruction takes two operands: an unsigned
+ /// > LEB128 value representing a register number and a signed LEB128
+ /// > factored offset. This instruction is identical to DW_CFA_val_offset
+ /// > except that the second operand is signed and factored. The resulting
+ /// > offset is factored_offset * data_alignment_factor.
+ ValOffsetSf {
+ /// The target register's number.
+ register: Register,
+ /// The factored offset.
+ factored_offset: i64,
+ },
+
+ /// > 8. DW_CFA_register
+ /// >
+ /// > The DW_CFA_register instruction takes two unsigned LEB128 operands
+ /// > representing register numbers. The required action is to set the rule
+ /// > for the first register to be register(R) where R is the second
+ /// > register.
+ Register {
+ /// The number of the register whose rule is being changed.
+ dest_register: Register,
+ /// The number of the register where the other register's value can be
+ /// found.
+ src_register: Register,
+ },
+
+ /// > 9. DW_CFA_expression
+ /// >
+ /// > The DW_CFA_expression instruction takes two operands: an unsigned
+ /// > LEB128 value representing a register number, and a DW_FORM_block value
+ /// > representing a DWARF expression. The required action is to change the
+ /// > rule for the register indicated by the register number to be an
+ /// > expression(E) rule where E is the DWARF expression. That is, the DWARF
+ /// > expression computes the address. The value of the CFA is pushed on the
+ /// > DWARF evaluation stack prior to execution of the DWARF expression.
+ Expression {
+ /// The target register's number.
+ register: Register,
+ /// The DWARF expression.
+ expression: Expression<R>,
+ },
+
+ /// > 10. DW_CFA_val_expression
+ /// >
+ /// > The DW_CFA_val_expression instruction takes two operands: an unsigned
+ /// > LEB128 value representing a register number, and a DW_FORM_block value
+ /// > representing a DWARF expression. The required action is to change the
+ /// > rule for the register indicated by the register number to be a
+ /// > val_expression(E) rule where E is the DWARF expression. That is, the
+ /// > DWARF expression computes the value of the given register. The value
+ /// > of the CFA is pushed on the DWARF evaluation stack prior to execution
+ /// > of the DWARF expression.
+ ValExpression {
+ /// The target register's number.
+ register: Register,
+ /// The DWARF expression.
+ expression: Expression<R>,
+ },
+
+ /// The `Restore` instruction represents both `DW_CFA_restore` and
+ /// `DW_CFA_restore_extended`.
+ ///
+ /// > 11. DW_CFA_restore
+ /// >
+ /// > The DW_CFA_restore instruction takes a single operand (encoded with
+ /// > the opcode) that represents a register number. The required action is
+ /// > to change the rule for the indicated register to the rule assigned it
+ /// > by the initial_instructions in the CIE.
+ Restore {
+ /// The register to be reset.
+ register: Register,
+ },
+
+ // 6.4.2.4 Row State Instructions
+ /// > 1. DW_CFA_remember_state
+ /// >
+ /// > The DW_CFA_remember_state instruction takes no operands. The required
+ /// > action is to push the set of rules for every register onto an implicit
+ /// > stack.
+ RememberState,
+
+ /// > 2. DW_CFA_restore_state
+ /// >
+ /// > The DW_CFA_restore_state instruction takes no operands. The required
+ /// > action is to pop the set of rules off the implicit stack and place
+ /// > them in the current row.
+ RestoreState,
+
+ /// > DW_CFA_GNU_args_size
+ /// >
+ /// > GNU Extension
+ /// >
+ /// > The DW_CFA_GNU_args_size instruction takes an unsigned LEB128 operand
+ /// > representing an argument size. This instruction specifies the total of
+ /// > the size of the arguments which have been pushed onto the stack.
+ ArgsSize {
+ /// The size of the arguments which have been pushed onto the stack
+ size: u64,
+ },
+
+ // 6.4.2.5 Padding Instruction
+ /// > 1. DW_CFA_nop
+ /// >
+ /// > The DW_CFA_nop instruction has no operands and no required actions. It
+ /// > is used as padding to make a CIE or FDE an appropriate size.
+ Nop,
+}
+
+const CFI_INSTRUCTION_HIGH_BITS_MASK: u8 = 0b1100_0000;
+const CFI_INSTRUCTION_LOW_BITS_MASK: u8 = !CFI_INSTRUCTION_HIGH_BITS_MASK;
+
+impl<R: Reader> CallFrameInstruction<R> {
+ fn parse(
+ input: &mut R,
+ address_encoding: Option<DwEhPe>,
+ parameters: &PointerEncodingParameters<R>,
+ ) -> Result<CallFrameInstruction<R>> {
+ let instruction = input.read_u8()?;
+ let high_bits = instruction & CFI_INSTRUCTION_HIGH_BITS_MASK;
+
+ if high_bits == constants::DW_CFA_advance_loc.0 {
+ let delta = instruction & CFI_INSTRUCTION_LOW_BITS_MASK;
+ return Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(delta),
+ });
+ }
+
+ if high_bits == constants::DW_CFA_offset.0 {
+ let register = Register((instruction & CFI_INSTRUCTION_LOW_BITS_MASK).into());
+ let offset = input.read_uleb128()?;
+ return Ok(CallFrameInstruction::Offset {
+ register,
+ factored_offset: offset,
+ });
+ }
+
+ if high_bits == constants::DW_CFA_restore.0 {
+ let register = Register((instruction & CFI_INSTRUCTION_LOW_BITS_MASK).into());
+ return Ok(CallFrameInstruction::Restore { register });
+ }
+
+ debug_assert_eq!(high_bits, 0);
+ let instruction = constants::DwCfa(instruction);
+
+ match instruction {
+ constants::DW_CFA_nop => Ok(CallFrameInstruction::Nop),
+
+ constants::DW_CFA_set_loc => {
+ let address = if let Some(encoding) = address_encoding {
+ match parse_encoded_pointer(encoding, parameters, input)? {
+ Pointer::Direct(x) => x,
+ _ => return Err(Error::UnsupportedPointerEncoding),
+ }
+ } else {
+ input.read_address(parameters.address_size)?
+ };
+ Ok(CallFrameInstruction::SetLoc { address })
+ }
+
+ constants::DW_CFA_advance_loc1 => {
+ let delta = input.read_u8()?;
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(delta),
+ })
+ }
+
+ constants::DW_CFA_advance_loc2 => {
+ let delta = input.read_u16()?;
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(delta),
+ })
+ }
+
+ constants::DW_CFA_advance_loc4 => {
+ let delta = input.read_u32()?;
+ Ok(CallFrameInstruction::AdvanceLoc { delta })
+ }
+
+ constants::DW_CFA_offset_extended => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_uleb128()?;
+ Ok(CallFrameInstruction::Offset {
+ register,
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_restore_extended => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ Ok(CallFrameInstruction::Restore { register })
+ }
+
+ constants::DW_CFA_undefined => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ Ok(CallFrameInstruction::Undefined { register })
+ }
+
+ constants::DW_CFA_same_value => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ Ok(CallFrameInstruction::SameValue { register })
+ }
+
+ constants::DW_CFA_register => {
+ let dest = input.read_uleb128().and_then(Register::from_u64)?;
+ let src = input.read_uleb128().and_then(Register::from_u64)?;
+ Ok(CallFrameInstruction::Register {
+ dest_register: dest,
+ src_register: src,
+ })
+ }
+
+ constants::DW_CFA_remember_state => Ok(CallFrameInstruction::RememberState),
+
+ constants::DW_CFA_restore_state => Ok(CallFrameInstruction::RestoreState),
+
+ constants::DW_CFA_def_cfa => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_uleb128()?;
+ Ok(CallFrameInstruction::DefCfa { register, offset })
+ }
+
+ constants::DW_CFA_def_cfa_register => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ Ok(CallFrameInstruction::DefCfaRegister { register })
+ }
+
+ constants::DW_CFA_def_cfa_offset => {
+ let offset = input.read_uleb128()?;
+ Ok(CallFrameInstruction::DefCfaOffset { offset })
+ }
+
+ constants::DW_CFA_def_cfa_expression => {
+ let len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let expression = input.split(len)?;
+ Ok(CallFrameInstruction::DefCfaExpression {
+ expression: Expression(expression),
+ })
+ }
+
+ constants::DW_CFA_expression => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let expression = input.split(len)?;
+ Ok(CallFrameInstruction::Expression {
+ register,
+ expression: Expression(expression),
+ })
+ }
+
+ constants::DW_CFA_offset_extended_sf => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_sleb128()?;
+ Ok(CallFrameInstruction::OffsetExtendedSf {
+ register,
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_def_cfa_sf => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_sleb128()?;
+ Ok(CallFrameInstruction::DefCfaSf {
+ register,
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_def_cfa_offset_sf => {
+ let offset = input.read_sleb128()?;
+ Ok(CallFrameInstruction::DefCfaOffsetSf {
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_val_offset => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_uleb128()?;
+ Ok(CallFrameInstruction::ValOffset {
+ register,
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_val_offset_sf => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let offset = input.read_sleb128()?;
+ Ok(CallFrameInstruction::ValOffsetSf {
+ register,
+ factored_offset: offset,
+ })
+ }
+
+ constants::DW_CFA_val_expression => {
+ let register = input.read_uleb128().and_then(Register::from_u64)?;
+ let len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let expression = input.split(len)?;
+ Ok(CallFrameInstruction::ValExpression {
+ register,
+ expression: Expression(expression),
+ })
+ }
+
+ constants::DW_CFA_GNU_args_size => {
+ let size = input.read_uleb128()?;
+ Ok(CallFrameInstruction::ArgsSize { size })
+ }
+
+ otherwise => Err(Error::UnknownCallFrameInstruction(otherwise)),
+ }
+ }
+}
+
+/// A lazy iterator parsing call frame instructions.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+#[derive(Clone, Debug)]
+pub struct CallFrameInstructionIter<'a, R: Reader> {
+ input: R,
+ address_encoding: Option<constants::DwEhPe>,
+ parameters: PointerEncodingParameters<'a, R>,
+}
+
+impl<'a, R: Reader> CallFrameInstructionIter<'a, R> {
+ /// Parse the next call frame instruction.
+ pub fn next(&mut self) -> Result<Option<CallFrameInstruction<R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match CallFrameInstruction::parse(&mut self.input, self.address_encoding, &self.parameters)
+ {
+ Ok(instruction) => Ok(Some(instruction)),
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'a, R: Reader> fallible_iterator::FallibleIterator for CallFrameInstructionIter<'a, R> {
+ type Item = CallFrameInstruction<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ CallFrameInstructionIter::next(self)
+ }
+}
+
+/// Parse a `DW_EH_PE_*` pointer encoding.
+#[doc(hidden)]
+#[inline]
+fn parse_pointer_encoding<R: Reader>(input: &mut R) -> Result<constants::DwEhPe> {
+ let eh_pe = input.read_u8()?;
+ let eh_pe = constants::DwEhPe(eh_pe);
+
+ if eh_pe.is_valid_encoding() {
+ Ok(eh_pe)
+ } else {
+ Err(Error::UnknownPointerEncoding)
+ }
+}
+
+/// A decoded pointer.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Pointer {
+ /// This value is the decoded pointer value.
+ Direct(u64),
+
+ /// This value is *not* the pointer value, but points to the address of
+ /// where the real pointer value lives. In other words, deref this pointer
+ /// to get the real pointer value.
+ ///
+ /// Chase this pointer at your own risk: do you trust the DWARF data it came
+ /// from?
+ Indirect(u64),
+}
+
+impl Default for Pointer {
+ #[inline]
+ fn default() -> Self {
+ Pointer::Direct(0)
+ }
+}
+
+impl Into<u64> for Pointer {
+ #[inline]
+ fn into(self) -> u64 {
+ match self {
+ Pointer::Direct(p) | Pointer::Indirect(p) => p,
+ }
+ }
+}
+
+impl Pointer {
+ #[inline]
+ fn new(encoding: constants::DwEhPe, address: u64) -> Pointer {
+ if encoding.is_indirect() {
+ Pointer::Indirect(address)
+ } else {
+ Pointer::Direct(address)
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+struct PointerEncodingParameters<'a, R: Reader> {
+ bases: &'a SectionBaseAddresses,
+ func_base: Option<u64>,
+ address_size: u8,
+ section: &'a R,
+}
+
+fn parse_encoded_pointer<R: Reader>(
+ encoding: constants::DwEhPe,
+ parameters: &PointerEncodingParameters<R>,
+ input: &mut R,
+) -> Result<Pointer> {
+ // TODO: check this once only in parse_pointer_encoding
+ if !encoding.is_valid_encoding() {
+ return Err(Error::UnknownPointerEncoding);
+ }
+
+ if encoding == constants::DW_EH_PE_omit {
+ return Err(Error::CannotParseOmitPointerEncoding);
+ }
+
+ let base = match encoding.application() {
+ constants::DW_EH_PE_absptr => 0,
+ constants::DW_EH_PE_pcrel => {
+ if let Some(section_base) = parameters.bases.section {
+ let offset_from_section = input.offset_from(parameters.section);
+ section_base.wrapping_add(offset_from_section.into_u64())
+ } else {
+ return Err(Error::PcRelativePointerButSectionBaseIsUndefined);
+ }
+ }
+ constants::DW_EH_PE_textrel => {
+ if let Some(text) = parameters.bases.text {
+ text
+ } else {
+ return Err(Error::TextRelativePointerButTextBaseIsUndefined);
+ }
+ }
+ constants::DW_EH_PE_datarel => {
+ if let Some(data) = parameters.bases.data {
+ data
+ } else {
+ return Err(Error::DataRelativePointerButDataBaseIsUndefined);
+ }
+ }
+ constants::DW_EH_PE_funcrel => {
+ if let Some(func) = parameters.func_base {
+ func
+ } else {
+ return Err(Error::FuncRelativePointerInBadContext);
+ }
+ }
+ constants::DW_EH_PE_aligned => return Err(Error::UnsupportedPointerEncoding),
+ _ => unreachable!(),
+ };
+
+ let offset = match encoding.format() {
+ // Unsigned variants.
+ constants::DW_EH_PE_absptr => input.read_address(parameters.address_size),
+ constants::DW_EH_PE_uleb128 => input.read_uleb128(),
+ constants::DW_EH_PE_udata2 => input.read_u16().map(u64::from),
+ constants::DW_EH_PE_udata4 => input.read_u32().map(u64::from),
+ constants::DW_EH_PE_udata8 => input.read_u64(),
+
+ // Signed variants. Here we sign extend the values (happens by
+ // default when casting a signed integer to a larger range integer
+ // in Rust), return them as u64, and rely on wrapping addition to do
+ // the right thing when adding these offsets to their bases.
+ constants::DW_EH_PE_sleb128 => input.read_sleb128().map(|a| a as u64),
+ constants::DW_EH_PE_sdata2 => input.read_i16().map(|a| a as u64),
+ constants::DW_EH_PE_sdata4 => input.read_i32().map(|a| a as u64),
+ constants::DW_EH_PE_sdata8 => input.read_i64().map(|a| a as u64),
+
+ // That was all of the valid encoding formats.
+ _ => unreachable!(),
+ }?;
+
+ Ok(Pointer::new(encoding, base.wrapping_add(offset)))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use super::{parse_cfi_entry, AugmentationData, RegisterRuleMap, UnwindContext};
+ use crate::common::Format;
+ use crate::constants;
+ use crate::endianity::{BigEndian, Endianity, LittleEndian, NativeEndian};
+ use crate::read::{
+ EndianSlice, Error, Expression, Pointer, ReaderOffsetId, Result, Section as ReadSection,
+ };
+ use crate::test_util::GimliSectionMethods;
+ use alloc::boxed::Box;
+ use alloc::vec::Vec;
+ use core::marker::PhantomData;
+ use core::mem;
+ use core::u64;
+ use test_assembler::{Endian, Label, LabelMaker, LabelOrNum, Section, ToLabelOrNum};
+
+ // Ensure each test tries to read the same section kind that it wrote.
+ #[derive(Clone, Copy)]
+ struct SectionKind<Section>(PhantomData<Section>);
+
+ impl<T> SectionKind<T> {
+ fn endian<'input, E>(self) -> Endian
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset<usize>,
+ {
+ if E::default().is_big_endian() {
+ Endian::Big
+ } else {
+ Endian::Little
+ }
+ }
+
+ fn section<'input, E>(self, contents: &'input [u8]) -> T
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>> + ReadSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset<usize>,
+ {
+ EndianSlice::new(contents, E::default()).into()
+ }
+ }
+
+ fn debug_frame_le<'a>() -> SectionKind<DebugFrame<EndianSlice<'a, LittleEndian>>> {
+ SectionKind(PhantomData)
+ }
+
+ fn debug_frame_be<'a>() -> SectionKind<DebugFrame<EndianSlice<'a, BigEndian>>> {
+ SectionKind(PhantomData)
+ }
+
+ fn eh_frame_le<'a>() -> SectionKind<EhFrame<EndianSlice<'a, LittleEndian>>> {
+ SectionKind(PhantomData)
+ }
+
+ fn parse_fde<Section, O, F, R>(
+ section: Section,
+ input: &mut R,
+ get_cie: F,
+ ) -> Result<FrameDescriptionEntry<R>>
+ where
+ R: Reader,
+ Section: UnwindSection<R, Offset = O>,
+ O: UnwindOffset<R::Offset>,
+ F: FnMut(&Section, &BaseAddresses, O) -> Result<CommonInformationEntry<R>>,
+ {
+ let bases = Default::default();
+ match parse_cfi_entry(&bases, &section, input) {
+ Ok(Some(CieOrFde::Fde(partial))) => partial.parse(get_cie),
+ Ok(_) => Err(Error::NoEntryAtGivenOffset),
+ Err(e) => Err(e),
+ }
+ }
+
+ // Mixin methods for `Section` to help define binary test data.
+
+ trait CfiSectionMethods: GimliSectionMethods {
+ fn cie<'aug, 'input, E, T>(
+ self,
+ _kind: SectionKind<T>,
+ augmentation: Option<&'aug str>,
+ cie: &mut CommonInformationEntry<EndianSlice<'input, E>>,
+ ) -> Self
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset;
+ fn fde<'a, 'input, E, T, L>(
+ self,
+ _kind: SectionKind<T>,
+ cie_offset: L,
+ fde: &mut FrameDescriptionEntry<EndianSlice<'input, E>>,
+ ) -> Self
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset,
+ L: ToLabelOrNum<'a, u64>;
+ }
+
+ impl CfiSectionMethods for Section {
+ fn cie<'aug, 'input, E, T>(
+ self,
+ _kind: SectionKind<T>,
+ augmentation: Option<&'aug str>,
+ cie: &mut CommonInformationEntry<EndianSlice<'input, E>>,
+ ) -> Self
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset,
+ {
+ cie.offset = self.size() as _;
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = match cie.format {
+ Format::Dwarf32 => self.D32(&length).mark(&start).D32(0xffff_ffff),
+ Format::Dwarf64 => {
+ let section = self.D32(0xffff_ffff);
+ section.D64(&length).mark(&start).D64(0xffff_ffff_ffff_ffff)
+ }
+ };
+
+ let mut section = section.D8(cie.version);
+
+ if let Some(augmentation) = augmentation {
+ section = section.append_bytes(augmentation.as_bytes());
+ }
+
+ // Null terminator for augmentation string.
+ let section = section.D8(0);
+
+ let section = if T::has_address_and_segment_sizes(cie.version) {
+ section.D8(cie.address_size).D8(cie.segment_size)
+ } else {
+ section
+ };
+
+ let section = section
+ .uleb(cie.code_alignment_factor)
+ .sleb(cie.data_alignment_factor)
+ .uleb(cie.return_address_register.0.into())
+ .append_bytes(cie.initial_instructions.into())
+ .mark(&end);
+
+ cie.length = (&end - &start) as usize;
+ length.set_const(cie.length as u64);
+
+ section
+ }
+
+ fn fde<'a, 'input, E, T, L>(
+ self,
+ _kind: SectionKind<T>,
+ cie_offset: L,
+ fde: &mut FrameDescriptionEntry<EndianSlice<'input, E>>,
+ ) -> Self
+ where
+ E: Endianity,
+ T: UnwindSection<EndianSlice<'input, E>>,
+ T::Offset: UnwindOffset,
+ L: ToLabelOrNum<'a, u64>,
+ {
+ fde.offset = self.size() as _;
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ assert_eq!(fde.format, fde.cie.format);
+
+ let section = match T::cie_offset_encoding(fde.format) {
+ CieOffsetEncoding::U32 => {
+ let section = self.D32(&length).mark(&start);
+ match cie_offset.to_labelornum() {
+ LabelOrNum::Label(ref l) => section.D32(l),
+ LabelOrNum::Num(o) => section.D32(o as u32),
+ }
+ }
+ CieOffsetEncoding::U64 => {
+ let section = self.D32(0xffff_ffff);
+ section.D64(&length).mark(&start).D64(cie_offset)
+ }
+ };
+
+ let section = match fde.cie.segment_size {
+ 0 => section,
+ 4 => section.D32(fde.initial_segment as u32),
+ 8 => section.D64(fde.initial_segment),
+ x => panic!("Unsupported test segment size: {}", x),
+ };
+
+ let section = match fde.cie.address_size {
+ 4 => section
+ .D32(fde.initial_address() as u32)
+ .D32(fde.len() as u32),
+ 8 => section.D64(fde.initial_address()).D64(fde.len()),
+ x => panic!("Unsupported address size: {}", x),
+ };
+
+ let section = if let Some(ref augmentation) = fde.augmentation {
+ let cie_aug = fde
+ .cie
+ .augmentation
+ .expect("FDE has augmentation, but CIE doesn't");
+
+ if let Some(lsda) = augmentation.lsda {
+ // We only support writing `DW_EH_PE_absptr` here.
+ assert_eq!(
+ cie_aug
+ .lsda
+ .expect("FDE has lsda, but CIE doesn't")
+ .format(),
+ constants::DW_EH_PE_absptr
+ );
+
+ // Augmentation data length
+ let section = section.uleb(u64::from(fde.cie.address_size));
+ match fde.cie.address_size {
+ 4 => section.D32({
+ let x: u64 = lsda.into();
+ x as u32
+ }),
+ 8 => section.D64({
+ let x: u64 = lsda.into();
+ x
+ }),
+ x => panic!("Unsupported address size: {}", x),
+ }
+ } else {
+ // Even if we don't have any augmentation data, if there is
+ // an augmentation defined, we need to put the length in.
+ section.uleb(0)
+ }
+ } else {
+ section
+ };
+
+ let section = section.append_bytes(fde.instructions.into()).mark(&end);
+
+ fde.length = (&end - &start) as usize;
+ length.set_const(fde.length as u64);
+
+ section
+ }
+ }
+
+ trait ResultExt {
+ fn map_eof(self, input: &[u8]) -> Self;
+ }
+
+ impl<T> ResultExt for Result<T> {
+ fn map_eof(self, input: &[u8]) -> Self {
+ match self {
+ Err(Error::UnexpectedEof(id)) => {
+ let id = ReaderOffsetId(id.0 - input.as_ptr() as u64);
+ Err(Error::UnexpectedEof(id))
+ }
+ r => r,
+ }
+ }
+ }
+
+ #[allow(clippy::type_complexity)]
+ #[allow(clippy::needless_pass_by_value)]
+ fn assert_parse_cie<'input, E>(
+ kind: SectionKind<DebugFrame<EndianSlice<'input, E>>>,
+ section: Section,
+ address_size: u8,
+ expected: Result<(
+ EndianSlice<'input, E>,
+ CommonInformationEntry<EndianSlice<'input, E>>,
+ )>,
+ ) where
+ E: Endianity,
+ {
+ let section = section.get_contents().unwrap();
+ let mut debug_frame = kind.section(&section);
+ debug_frame.set_address_size(address_size);
+ let input = &mut EndianSlice::new(&section, E::default());
+ let bases = Default::default();
+ let result = CommonInformationEntry::parse(&bases, &debug_frame, input);
+ let result = result.map(|cie| (*input, cie)).map_eof(&section);
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn test_parse_cie_incomplete_length_32() {
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian()).L16(5);
+ assert_parse_cie(
+ kind,
+ section,
+ 8,
+ Err(Error::UnexpectedEof(ReaderOffsetId(0))),
+ );
+ }
+
+ #[test]
+ fn test_parse_cie_incomplete_length_64() {
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .L32(0xffff_ffff)
+ .L32(12345);
+ assert_parse_cie(
+ kind,
+ section,
+ 8,
+ Err(Error::UnexpectedEof(ReaderOffsetId(4))),
+ );
+ }
+
+ #[test]
+ fn test_parse_cie_incomplete_id_32() {
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ // The length is not large enough to contain the ID.
+ .B32(3)
+ .B32(0xffff_ffff);
+ assert_parse_cie(
+ kind,
+ section,
+ 8,
+ Err(Error::UnexpectedEof(ReaderOffsetId(4))),
+ );
+ }
+
+ #[test]
+ fn test_parse_cie_bad_id_32() {
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ // Initial length
+ .B32(4)
+ // Not the CIE Id.
+ .B32(0xbad1_bad2);
+ assert_parse_cie(kind, section, 8, Err(Error::NotCieId));
+ }
+
+ #[test]
+ fn test_parse_cie_32_bad_version() {
+ let mut cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 99,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 2,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian()).cie(kind, None, &mut cie);
+ assert_parse_cie(kind, section, 4, Err(Error::UnknownVersion(99)));
+ }
+
+ #[test]
+ fn test_parse_cie_unknown_augmentation() {
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let augmentation = Some("replicant");
+ let expected_rest = [1, 2, 3];
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ // Initial length
+ .L32(&length)
+ .mark(&start)
+ // CIE Id
+ .L32(0xffff_ffff)
+ // Version
+ .D8(4)
+ // Augmentation
+ .append_bytes(augmentation.unwrap().as_bytes())
+ // Null terminator
+ .D8(0)
+ // Extra augmented data that we can't understand.
+ .L32(1)
+ .L32(2)
+ .L32(3)
+ .L32(4)
+ .L32(5)
+ .L32(6)
+ .mark(&end)
+ .append_bytes(&expected_rest);
+
+ let expected_length = (&end - &start) as u64;
+ length.set_const(expected_length);
+
+ assert_parse_cie(kind, section, 8, Err(Error::UnknownAugmentation));
+ }
+
+ fn test_parse_cie(format: Format, version: u8, address_size: u8) {
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let expected_instrs: Vec<_> = (0..4).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format,
+ version,
+ augmentation: None,
+ address_size,
+ segment_size: 0,
+ code_alignment_factor: 16,
+ data_alignment_factor: 32,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&expected_instrs, LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .cie(kind, None, &mut cie)
+ .append_bytes(&expected_rest);
+
+ assert_parse_cie(
+ kind,
+ section,
+ address_size,
+ Ok((EndianSlice::new(&expected_rest, LittleEndian), cie)),
+ );
+ }
+
+ #[test]
+ fn test_parse_cie_32_ok() {
+ test_parse_cie(Format::Dwarf32, 1, 4);
+ test_parse_cie(Format::Dwarf32, 1, 8);
+ test_parse_cie(Format::Dwarf32, 4, 4);
+ test_parse_cie(Format::Dwarf32, 4, 8);
+ }
+
+ #[test]
+ fn test_parse_cie_64_ok() {
+ test_parse_cie(Format::Dwarf64, 1, 4);
+ test_parse_cie(Format::Dwarf64, 1, 8);
+ test_parse_cie(Format::Dwarf64, 4, 4);
+ test_parse_cie(Format::Dwarf64, 4, 8);
+ }
+
+ #[test]
+ fn test_parse_cie_length_too_big() {
+ let expected_instrs: Vec<_> = (0..13).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 0,
+ data_alignment_factor: 0,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&expected_instrs, LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian()).cie(kind, None, &mut cie);
+
+ let mut contents = section.get_contents().unwrap();
+
+ // Overwrite the length to be too big.
+ contents[0] = 0;
+ contents[1] = 0;
+ contents[2] = 0;
+ contents[3] = 255;
+
+ let debug_frame = DebugFrame::new(&contents, LittleEndian);
+ let bases = Default::default();
+ assert_eq!(
+ CommonInformationEntry::parse(
+ &bases,
+ &debug_frame,
+ &mut EndianSlice::new(&contents, LittleEndian)
+ )
+ .map_eof(&contents),
+ Err(Error::UnexpectedEof(ReaderOffsetId(4)))
+ );
+ }
+
+ #[test]
+ fn test_parse_fde_incomplete_length_32() {
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian()).L16(5);
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+ assert_eq!(
+ parse_fde(debug_frame, rest, UnwindSection::cie_from_offset).map_eof(&section),
+ Err(Error::UnexpectedEof(ReaderOffsetId(0)))
+ );
+ }
+
+ #[test]
+ fn test_parse_fde_incomplete_length_64() {
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .L32(0xffff_ffff)
+ .L32(12345);
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+ assert_eq!(
+ parse_fde(debug_frame, rest, UnwindSection::cie_from_offset).map_eof(&section),
+ Err(Error::UnexpectedEof(ReaderOffsetId(4)))
+ );
+ }
+
+ #[test]
+ fn test_parse_fde_incomplete_cie_pointer_32() {
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ // The length is not large enough to contain the CIE pointer.
+ .B32(3)
+ .B32(1994);
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, BigEndian);
+ assert_eq!(
+ parse_fde(debug_frame, rest, UnwindSection::cie_from_offset).map_eof(&section),
+ Err(Error::UnexpectedEof(ReaderOffsetId(4)))
+ );
+ }
+
+ #[test]
+ fn test_parse_fde_32_ok() {
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let cie_offset = 0xbad0_bad1;
+ let expected_instrs: Vec<_> = (0..7).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 100,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ // DWARF32 with a 64 bit address size! Holy moly!
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 3,
+ data_alignment_factor: 2,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 39,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs, LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&expected_rest);
+
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie_offset as usize));
+ Ok(cie.clone())
+ };
+
+ assert_eq!(parse_fde(debug_frame, rest, get_cie), Ok(fde));
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_fde_32_with_segment_ok() {
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let cie_offset = 0xbad0_bad1;
+ let expected_instrs: Vec<_> = (0..92).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 100,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 4,
+ code_alignment_factor: 3,
+ data_alignment_factor: 2,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0xbadb_ad11,
+ initial_address: 0xfeed_beef,
+ address_range: 999,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs, LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&expected_rest);
+
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie_offset as usize));
+ Ok(cie.clone())
+ };
+
+ assert_eq!(parse_fde(debug_frame, rest, get_cie), Ok(fde));
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_fde_64_ok() {
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let cie_offset = 0xbad0_bad1;
+ let expected_instrs: Vec<_> = (0..7).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 100,
+ format: Format::Dwarf64,
+ version: 4,
+ augmentation: None,
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 3,
+ data_alignment_factor: 2,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf64,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 999,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs, LittleEndian),
+ };
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&expected_rest);
+
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie_offset as usize));
+ Ok(cie.clone())
+ };
+
+ assert_eq!(parse_fde(debug_frame, rest, get_cie), Ok(fde));
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_entry_on_cie_32_ok() {
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let expected_instrs: Vec<_> = (0..4).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 16,
+ data_alignment_factor: 32,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&expected_instrs, BigEndian),
+ };
+
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ .cie(kind, None, &mut cie)
+ .append_bytes(&expected_rest);
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, BigEndian);
+
+ let bases = Default::default();
+ assert_eq!(
+ parse_cfi_entry(&bases, &debug_frame, rest),
+ Ok(Some(CieOrFde::Cie(cie)))
+ );
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, BigEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_entry_on_fde_32_ok() {
+ let cie_offset = 0x1234_5678;
+ let expected_rest = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let expected_instrs: Vec<_> = (0..4).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 16,
+ data_alignment_factor: 32,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&[], BigEndian),
+ };
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 39,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs, BigEndian),
+ };
+
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&expected_rest);
+
+ let section = section.get_contents().unwrap();
+ let debug_frame = kind.section(&section);
+ let rest = &mut EndianSlice::new(&section, BigEndian);
+
+ let bases = Default::default();
+ match parse_cfi_entry(&bases, &debug_frame, rest) {
+ Ok(Some(CieOrFde::Fde(partial))) => {
+ assert_eq!(*rest, EndianSlice::new(&expected_rest, BigEndian));
+
+ assert_eq!(partial.length, fde.length);
+ assert_eq!(partial.format, fde.format);
+ assert_eq!(partial.cie_offset, DebugFrameOffset(cie_offset as usize));
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie_offset as usize));
+ Ok(cie.clone())
+ };
+
+ assert_eq!(partial.parse(get_cie), Ok(fde));
+ }
+ otherwise => panic!("Unexpected result: {:#?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_cfi_entries_iter() {
+ let expected_instrs1: Vec<_> = (0..4).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let expected_instrs2: Vec<_> = (0..8).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let expected_instrs3: Vec<_> = (0..12).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let expected_instrs4: Vec<_> = (0..16).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie1 = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 2,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&expected_instrs1, BigEndian),
+ };
+
+ let mut cie2 = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 3,
+ data_alignment_factor: 2,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&expected_instrs2, BigEndian),
+ };
+
+ let cie1_location = Label::new();
+ let cie2_location = Label::new();
+
+ // Write the CIEs first so that their length gets set before we clone
+ // them into the FDEs and our equality assertions down the line end up
+ // with all the CIEs always having he correct length.
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ .mark(&cie1_location)
+ .cie(kind, None, &mut cie1)
+ .mark(&cie2_location)
+ .cie(kind, None, &mut cie2);
+
+ let mut fde1 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie1.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 39,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs3, BigEndian),
+ };
+
+ let mut fde2 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie2.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: None,
+ instructions: EndianSlice::new(&expected_instrs4, BigEndian),
+ };
+
+ let section =
+ section
+ .fde(kind, &cie1_location, &mut fde1)
+ .fde(kind, &cie2_location, &mut fde2);
+
+ section.start().set_const(0);
+
+ let cie1_offset = cie1_location.value().unwrap() as usize;
+ let cie2_offset = cie2_location.value().unwrap() as usize;
+
+ let contents = section.get_contents().unwrap();
+ let debug_frame = kind.section(&contents);
+
+ let bases = Default::default();
+ let mut entries = debug_frame.entries(&bases);
+
+ assert_eq!(entries.next(), Ok(Some(CieOrFde::Cie(cie1.clone()))));
+ assert_eq!(entries.next(), Ok(Some(CieOrFde::Cie(cie2.clone()))));
+
+ match entries.next() {
+ Ok(Some(CieOrFde::Fde(partial))) => {
+ assert_eq!(partial.length, fde1.length);
+ assert_eq!(partial.format, fde1.format);
+ assert_eq!(partial.cie_offset, DebugFrameOffset(cie1_offset));
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie1_offset));
+ Ok(cie1.clone())
+ };
+ assert_eq!(partial.parse(get_cie), Ok(fde1));
+ }
+ otherwise => panic!("Unexpected result: {:#?}", otherwise),
+ }
+
+ match entries.next() {
+ Ok(Some(CieOrFde::Fde(partial))) => {
+ assert_eq!(partial.length, fde2.length);
+ assert_eq!(partial.format, fde2.format);
+ assert_eq!(partial.cie_offset, DebugFrameOffset(cie2_offset));
+
+ let get_cie = |_: &_, _: &_, offset| {
+ assert_eq!(offset, DebugFrameOffset(cie2_offset));
+ Ok(cie2.clone())
+ };
+ assert_eq!(partial.parse(get_cie), Ok(fde2));
+ }
+ otherwise => panic!("Unexpected result: {:#?}", otherwise),
+ }
+
+ assert_eq!(entries.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_parse_cie_from_offset() {
+ let filler = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let instrs: Vec<_> = (0..5).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf64,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 4,
+ data_alignment_factor: 8,
+ return_address_register: Register(12),
+ initial_instructions: EndianSlice::new(&instrs, LittleEndian),
+ };
+
+ let cie_location = Label::new();
+
+ let kind = debug_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .append_bytes(&filler)
+ .mark(&cie_location)
+ .cie(kind, None, &mut cie)
+ .append_bytes(&filler);
+
+ section.start().set_const(0);
+
+ let cie_offset = DebugFrameOffset(cie_location.value().unwrap() as usize);
+
+ let contents = section.get_contents().unwrap();
+ let debug_frame = kind.section(&contents);
+ let bases = Default::default();
+
+ assert_eq!(debug_frame.cie_from_offset(&bases, cie_offset), Ok(cie));
+ }
+
+ fn parse_cfi_instruction<R: Reader + Default>(
+ input: &mut R,
+ address_size: u8,
+ ) -> Result<CallFrameInstruction<R>> {
+ let parameters = &PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size,
+ section: &R::default(),
+ };
+ CallFrameInstruction::parse(input, None, parameters)
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_advance_loc() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_delta = 42;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_advance_loc.0 | expected_delta)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(expected_delta),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_offset() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 3;
+ let expected_offset = 1997;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_offset.0 | expected_reg)
+ .uleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Offset {
+ register: Register(expected_reg.into()),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_restore() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 3;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_restore.0 | expected_reg)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Restore {
+ register: Register(expected_reg.into()),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_nop() {
+ let expected_rest = [1, 2, 3, 4];
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_nop.0)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Nop)
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_set_loc() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_addr = 0xdead_beef;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_set_loc.0)
+ .L64(expected_addr)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::SetLoc {
+ address: expected_addr,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_set_loc_encoding() {
+ let text_base = 0xfeed_face;
+ let addr_offset = 0xbeef;
+ let expected_addr = text_base + addr_offset;
+ let expected_rest = [1, 2, 3, 4];
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_set_loc.0)
+ .L64(addr_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ let parameters = &PointerEncodingParameters {
+ bases: &BaseAddresses::default().set_text(text_base).eh_frame,
+ func_base: None,
+ address_size: 8,
+ section: &EndianSlice::new(&[], LittleEndian),
+ };
+ assert_eq!(
+ CallFrameInstruction::parse(input, Some(constants::DW_EH_PE_textrel), parameters),
+ Ok(CallFrameInstruction::SetLoc {
+ address: expected_addr,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_advance_loc1() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_delta = 8;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(expected_delta)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(expected_delta),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_advance_loc2() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_delta = 500;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_advance_loc2.0)
+ .L16(expected_delta)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(expected_delta),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_advance_loc4() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_delta = 1 << 20;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_advance_loc4.0)
+ .L32(expected_delta)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::AdvanceLoc {
+ delta: expected_delta,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_offset_extended() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 7;
+ let expected_offset = 33;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_offset_extended.0)
+ .uleb(expected_reg.into())
+ .uleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Offset {
+ register: Register(expected_reg),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_restore_extended() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 7;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_restore_extended.0)
+ .uleb(expected_reg.into())
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Restore {
+ register: Register(expected_reg),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_undefined() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 7;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_undefined.0)
+ .uleb(expected_reg.into())
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Undefined {
+ register: Register(expected_reg),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_same_value() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 7;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_same_value.0)
+ .uleb(expected_reg.into())
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::SameValue {
+ register: Register(expected_reg),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_register() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_dest_reg = 7;
+ let expected_src_reg = 8;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_register.0)
+ .uleb(expected_dest_reg.into())
+ .uleb(expected_src_reg.into())
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Register {
+ dest_register: Register(expected_dest_reg),
+ src_register: Register(expected_src_reg),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_remember_state() {
+ let expected_rest = [1, 2, 3, 4];
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_remember_state.0)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::RememberState)
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_restore_state() {
+ let expected_rest = [1, 2, 3, 4];
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_restore_state.0)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::RestoreState)
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 2;
+ let expected_offset = 0;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa.0)
+ .uleb(expected_reg.into())
+ .uleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfa {
+ register: Register(expected_reg),
+ offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa_register() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 2;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa_register.0)
+ .uleb(expected_reg.into())
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfaRegister {
+ register: Register(expected_reg),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa_offset() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_offset = 23;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa_offset.0)
+ .uleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfaOffset {
+ offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa_expression() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_expr = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa_expression.0)
+ .D8(&length)
+ .mark(&start)
+ .append_bytes(&expected_expr)
+ .mark(&end)
+ .append_bytes(&expected_rest);
+
+ length.set_const((&end - &start) as u64);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfaExpression {
+ expression: Expression(EndianSlice::new(&expected_expr, LittleEndian)),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_expression() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 99;
+ let expected_expr = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_expression.0)
+ .uleb(expected_reg.into())
+ .D8(&length)
+ .mark(&start)
+ .append_bytes(&expected_expr)
+ .mark(&end)
+ .append_bytes(&expected_rest);
+
+ length.set_const((&end - &start) as u64);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::Expression {
+ register: Register(expected_reg),
+ expression: Expression(EndianSlice::new(&expected_expr, LittleEndian)),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_offset_extended_sf() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 7;
+ let expected_offset = -33;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_offset_extended_sf.0)
+ .uleb(expected_reg.into())
+ .sleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::OffsetExtendedSf {
+ register: Register(expected_reg),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa_sf() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 2;
+ let expected_offset = -9999;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa_sf.0)
+ .uleb(expected_reg.into())
+ .sleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfaSf {
+ register: Register(expected_reg),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_def_cfa_offset_sf() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_offset = -123;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_def_cfa_offset_sf.0)
+ .sleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::DefCfaOffsetSf {
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_val_offset() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 50;
+ let expected_offset = 23;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_val_offset.0)
+ .uleb(expected_reg.into())
+ .uleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::ValOffset {
+ register: Register(expected_reg),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_val_offset_sf() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 50;
+ let expected_offset = -23;
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_val_offset_sf.0)
+ .uleb(expected_reg.into())
+ .sleb(expected_offset)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::ValOffsetSf {
+ register: Register(expected_reg),
+ factored_offset: expected_offset,
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_val_expression() {
+ let expected_rest = [1, 2, 3, 4];
+ let expected_reg = 50;
+ let expected_expr = [2, 2, 1, 1, 5, 5];
+
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = Section::with_endian(Endian::Little)
+ .D8(constants::DW_CFA_val_expression.0)
+ .uleb(expected_reg.into())
+ .D8(&length)
+ .mark(&start)
+ .append_bytes(&expected_expr)
+ .mark(&end)
+ .append_bytes(&expected_rest);
+
+ length.set_const((&end - &start) as u64);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Ok(CallFrameInstruction::ValExpression {
+ register: Register(expected_reg),
+ expression: Expression(EndianSlice::new(&expected_expr, LittleEndian)),
+ })
+ );
+ assert_eq!(*input, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_cfi_instruction_unknown_instruction() {
+ let expected_rest = [1, 2, 3, 4];
+ let unknown_instr = constants::DwCfa(0b0011_1111);
+ let section = Section::with_endian(Endian::Little)
+ .D8(unknown_instr.0)
+ .append_bytes(&expected_rest);
+ let contents = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&contents, LittleEndian);
+ assert_eq!(
+ parse_cfi_instruction(input, 8),
+ Err(Error::UnknownCallFrameInstruction(unknown_instr))
+ );
+ }
+
+ #[test]
+ fn test_call_frame_instruction_iter_ok() {
+ let expected_reg = 50;
+ let expected_expr = [2, 2, 1, 1, 5, 5];
+ let expected_delta = 230;
+
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = Section::with_endian(Endian::Big)
+ .D8(constants::DW_CFA_val_expression.0)
+ .uleb(expected_reg.into())
+ .D8(&length)
+ .mark(&start)
+ .append_bytes(&expected_expr)
+ .mark(&end)
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(expected_delta);
+
+ length.set_const((&end - &start) as u64);
+ let contents = section.get_contents().unwrap();
+ let input = EndianSlice::new(&contents, BigEndian);
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 8,
+ section: &EndianSlice::default(),
+ };
+ let mut iter = CallFrameInstructionIter {
+ input,
+ address_encoding: None,
+ parameters,
+ };
+
+ assert_eq!(
+ iter.next(),
+ Ok(Some(CallFrameInstruction::ValExpression {
+ register: Register(expected_reg),
+ expression: Expression(EndianSlice::new(&expected_expr, BigEndian)),
+ }))
+ );
+
+ assert_eq!(
+ iter.next(),
+ Ok(Some(CallFrameInstruction::AdvanceLoc {
+ delta: u32::from(expected_delta),
+ }))
+ );
+
+ assert_eq!(iter.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_call_frame_instruction_iter_err() {
+ // DW_CFA_advance_loc1 without an operand.
+ let section = Section::with_endian(Endian::Big).D8(constants::DW_CFA_advance_loc1.0);
+
+ let contents = section.get_contents().unwrap();
+ let input = EndianSlice::new(&contents, BigEndian);
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 8,
+ section: &EndianSlice::default(),
+ };
+ let mut iter = CallFrameInstructionIter {
+ input,
+ address_encoding: None,
+ parameters,
+ };
+
+ assert_eq!(
+ iter.next().map_eof(&contents),
+ Err(Error::UnexpectedEof(ReaderOffsetId(1)))
+ );
+ assert_eq!(iter.next(), Ok(None));
+ }
+
+ #[allow(clippy::needless_pass_by_value)]
+ fn assert_eval<'a, I>(
+ mut initial_ctx: UnwindContext<EndianSlice<'a, LittleEndian>>,
+ expected_ctx: UnwindContext<EndianSlice<'a, LittleEndian>>,
+ cie: CommonInformationEntry<EndianSlice<'a, LittleEndian>>,
+ fde: Option<FrameDescriptionEntry<EndianSlice<'a, LittleEndian>>>,
+ instructions: I,
+ ) where
+ I: AsRef<
+ [(
+ Result<bool>,
+ CallFrameInstruction<EndianSlice<'a, LittleEndian>>,
+ )],
+ >,
+ {
+ {
+ let section = &DebugFrame::from(EndianSlice::default());
+ let bases = &BaseAddresses::default();
+ let mut table = match fde {
+ Some(fde) => UnwindTable::new_for_fde(section, bases, &mut initial_ctx, &fde),
+ None => UnwindTable::new_for_cie(section, bases, &mut initial_ctx, &cie),
+ };
+ for &(ref expected_result, ref instruction) in instructions.as_ref() {
+ assert_eq!(*expected_result, table.evaluate(instruction.clone()));
+ }
+ }
+
+ assert_eq!(expected_ctx, initial_ctx);
+ }
+
+ fn make_test_cie<'a>() -> CommonInformationEntry<EndianSlice<'a, LittleEndian>> {
+ CommonInformationEntry {
+ offset: 0,
+ format: Format::Dwarf64,
+ length: 0,
+ return_address_register: Register(0),
+ version: 4,
+ address_size: mem::size_of::<usize>() as u8,
+ initial_instructions: EndianSlice::new(&[], LittleEndian),
+ augmentation: None,
+ segment_size: 0,
+ data_alignment_factor: 2,
+ code_alignment_factor: 3,
+ }
+ }
+
+ #[test]
+ fn test_eval_set_loc() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected.row_mut().end_address = 42;
+ let instructions = [(Ok(true), CallFrameInstruction::SetLoc { address: 42 })];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_set_loc_backwards() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.row_mut().start_address = 999;
+ let expected = ctx.clone();
+ let instructions = [(
+ Err(Error::InvalidAddressRange),
+ CallFrameInstruction::SetLoc { address: 42 },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_advance_loc() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.row_mut().start_address = 3;
+ let mut expected = ctx.clone();
+ expected.row_mut().end_address = 3 + 2 * cie.code_alignment_factor;
+ let instructions = [(Ok(true), CallFrameInstruction::AdvanceLoc { delta: 2 })];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_advance_loc_overflow() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.row_mut().start_address = u64::MAX;
+ let mut expected = ctx.clone();
+ expected.row_mut().end_address = 42 * cie.code_alignment_factor - 1;
+ let instructions = [(Ok(true), CallFrameInstruction::AdvanceLoc { delta: 42 })];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(42),
+ offset: 36,
+ });
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::DefCfa {
+ register: Register(42),
+ offset: 36,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_sf() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(42),
+ offset: 36 * cie.data_alignment_factor as i64,
+ });
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::DefCfaSf {
+ register: Register(42),
+ factored_offset: 36,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_register() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(3),
+ offset: 8,
+ });
+ let mut expected = ctx.clone();
+ expected.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(42),
+ offset: 8,
+ });
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::DefCfaRegister {
+ register: Register(42),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_register_invalid_context() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.set_cfa(CfaRule::Expression(Expression(EndianSlice::new(
+ &[],
+ LittleEndian,
+ ))));
+ let expected = ctx.clone();
+ let instructions = [(
+ Err(Error::CfiInstructionInInvalidContext),
+ CallFrameInstruction::DefCfaRegister {
+ register: Register(42),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_offset() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(3),
+ offset: 8,
+ });
+ let mut expected = ctx.clone();
+ expected.set_cfa(CfaRule::RegisterAndOffset {
+ register: Register(3),
+ offset: 42,
+ });
+ let instructions = [(Ok(false), CallFrameInstruction::DefCfaOffset { offset: 42 })];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_offset_invalid_context() {
+ let cie = make_test_cie();
+ let mut ctx = UnwindContext::new();
+ ctx.set_cfa(CfaRule::Expression(Expression(EndianSlice::new(
+ &[],
+ LittleEndian,
+ ))));
+ let expected = ctx.clone();
+ let instructions = [(
+ Err(Error::CfiInstructionInInvalidContext),
+ CallFrameInstruction::DefCfaOffset { offset: 1993 },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_def_cfa_expression() {
+ let expr = [1, 2, 3, 4];
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected.set_cfa(CfaRule::Expression(Expression(EndianSlice::new(
+ &expr,
+ LittleEndian,
+ ))));
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::DefCfaExpression {
+ expression: Expression(EndianSlice::new(&expr, LittleEndian)),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_undefined() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(Register(5), RegisterRule::Undefined)
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::Undefined {
+ register: Register(5),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_same_value() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(Register(0), RegisterRule::SameValue)
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::SameValue {
+ register: Register(0),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_offset() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(2),
+ RegisterRule::Offset(3 * cie.data_alignment_factor),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::Offset {
+ register: Register(2),
+ factored_offset: 3,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_offset_extended_sf() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(4),
+ RegisterRule::Offset(-3 * cie.data_alignment_factor),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::OffsetExtendedSf {
+ register: Register(4),
+ factored_offset: -3,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_val_offset() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(5),
+ RegisterRule::ValOffset(7 * cie.data_alignment_factor),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::ValOffset {
+ register: Register(5),
+ factored_offset: 7,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_val_offset_sf() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(5),
+ RegisterRule::ValOffset(-7 * cie.data_alignment_factor),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::ValOffsetSf {
+ register: Register(5),
+ factored_offset: -7,
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_expression() {
+ let expr = [1, 2, 3, 4];
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(9),
+ RegisterRule::Expression(Expression(EndianSlice::new(&expr, LittleEndian))),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::Expression {
+ register: Register(9),
+ expression: Expression(EndianSlice::new(&expr, LittleEndian)),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_val_expression() {
+ let expr = [1, 2, 3, 4];
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected
+ .set_register_rule(
+ Register(9),
+ RegisterRule::ValExpression(Expression(EndianSlice::new(&expr, LittleEndian))),
+ )
+ .unwrap();
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::ValExpression {
+ register: Register(9),
+ expression: Expression(EndianSlice::new(&expr, LittleEndian)),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_restore() {
+ let cie = make_test_cie();
+ let fde = FrameDescriptionEntry {
+ offset: 0,
+ format: Format::Dwarf64,
+ length: 0,
+ address_range: 0,
+ augmentation: None,
+ initial_address: 0,
+ initial_segment: 0,
+ cie: cie.clone(),
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let mut ctx = UnwindContext::new();
+ ctx.set_register_rule(Register(0), RegisterRule::Offset(1))
+ .unwrap();
+ ctx.save_initial_rules().unwrap();
+ let expected = ctx.clone();
+ ctx.set_register_rule(Register(0), RegisterRule::Offset(2))
+ .unwrap();
+
+ let instructions = [(
+ Ok(false),
+ CallFrameInstruction::Restore {
+ register: Register(0),
+ },
+ )];
+ assert_eval(ctx, expected, cie, Some(fde), instructions);
+ }
+
+ #[test]
+ fn test_eval_restore_havent_saved_initial_context() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let expected = ctx.clone();
+ let instructions = [(
+ Err(Error::CfiInstructionInInvalidContext),
+ CallFrameInstruction::Restore {
+ register: Register(0),
+ },
+ )];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_remember_state() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let mut expected = ctx.clone();
+ expected.push_row().unwrap();
+ let instructions = [(Ok(false), CallFrameInstruction::RememberState)];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_restore_state() {
+ let cie = make_test_cie();
+
+ let mut ctx = UnwindContext::new();
+ ctx.set_start_address(1);
+ ctx.set_register_rule(Register(0), RegisterRule::SameValue)
+ .unwrap();
+ let mut expected = ctx.clone();
+ ctx.push_row().unwrap();
+ ctx.set_start_address(2);
+ ctx.set_register_rule(Register(0), RegisterRule::Offset(16))
+ .unwrap();
+
+ // Restore state should preserve current location.
+ expected.set_start_address(2);
+
+ let instructions = [
+ // First one pops just fine.
+ (Ok(false), CallFrameInstruction::RestoreState),
+ // Second pop would try to pop out of bounds.
+ (
+ Err(Error::PopWithEmptyStack),
+ CallFrameInstruction::RestoreState,
+ ),
+ ];
+
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_eval_nop() {
+ let cie = make_test_cie();
+ let ctx = UnwindContext::new();
+ let expected = ctx.clone();
+ let instructions = [(Ok(false), CallFrameInstruction::Nop)];
+ assert_eval(ctx, expected, cie, None, instructions);
+ }
+
+ #[test]
+ fn test_unwind_table_cie_no_rule() {
+ #[allow(clippy::identity_op)]
+ let initial_instructions = Section::with_endian(Endian::Little)
+ // The CFA is -12 from register 4.
+ .D8(constants::DW_CFA_def_cfa_sf.0)
+ .uleb(4)
+ .sleb(-12)
+ .append_repeated(constants::DW_CFA_nop.0, 4);
+ let initial_instructions = initial_instructions.get_contents().unwrap();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 1,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&initial_instructions, LittleEndian),
+ };
+
+ let instructions = Section::with_endian(Endian::Little)
+ // A bunch of nop padding.
+ .append_repeated(constants::DW_CFA_nop.0, 8);
+ let instructions = instructions.get_contents().unwrap();
+
+ let fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0,
+ address_range: 100,
+ augmentation: None,
+ instructions: EndianSlice::new(&instructions, LittleEndian),
+ };
+
+ let section = &DebugFrame::from(EndianSlice::default());
+ let bases = &BaseAddresses::default();
+ let mut ctx = Box::new(UnwindContext::new());
+
+ let mut table = fde
+ .rows(section, bases, &mut ctx)
+ .expect("Should run initial program OK");
+ assert!(table.ctx.is_initialized);
+ let expected_initial_rule = (Register(0), RegisterRule::Undefined);
+ assert_eq!(table.ctx.initial_rule, Some(expected_initial_rule));
+
+ {
+ let row = table.next_row().expect("Should evaluate first row OK");
+ let expected = UnwindTableRow {
+ start_address: 0,
+ end_address: 100,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [].iter().collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ // All done!
+ assert_eq!(Ok(None), table.next_row());
+ assert_eq!(Ok(None), table.next_row());
+ }
+
+ #[test]
+ fn test_unwind_table_cie_single_rule() {
+ #[allow(clippy::identity_op)]
+ let initial_instructions = Section::with_endian(Endian::Little)
+ // The CFA is -12 from register 4.
+ .D8(constants::DW_CFA_def_cfa_sf.0)
+ .uleb(4)
+ .sleb(-12)
+ // Register 3 is 4 from the CFA.
+ .D8(constants::DW_CFA_offset.0 | 3)
+ .uleb(4)
+ .append_repeated(constants::DW_CFA_nop.0, 4);
+ let initial_instructions = initial_instructions.get_contents().unwrap();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 1,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&initial_instructions, LittleEndian),
+ };
+
+ let instructions = Section::with_endian(Endian::Little)
+ // A bunch of nop padding.
+ .append_repeated(constants::DW_CFA_nop.0, 8);
+ let instructions = instructions.get_contents().unwrap();
+
+ let fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0,
+ address_range: 100,
+ augmentation: None,
+ instructions: EndianSlice::new(&instructions, LittleEndian),
+ };
+
+ let section = &DebugFrame::from(EndianSlice::default());
+ let bases = &BaseAddresses::default();
+ let mut ctx = Box::new(UnwindContext::new());
+
+ let mut table = fde
+ .rows(section, bases, &mut ctx)
+ .expect("Should run initial program OK");
+ assert!(table.ctx.is_initialized);
+ let expected_initial_rule = (Register(3), RegisterRule::Offset(4));
+ assert_eq!(table.ctx.initial_rule, Some(expected_initial_rule));
+
+ {
+ let row = table.next_row().expect("Should evaluate first row OK");
+ let expected = UnwindTableRow {
+ start_address: 0,
+ end_address: 100,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [(Register(3), RegisterRule::Offset(4))].iter().collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ // All done!
+ assert_eq!(Ok(None), table.next_row());
+ assert_eq!(Ok(None), table.next_row());
+ }
+
+ #[test]
+ fn test_unwind_table_next_row() {
+ #[allow(clippy::identity_op)]
+ let initial_instructions = Section::with_endian(Endian::Little)
+ // The CFA is -12 from register 4.
+ .D8(constants::DW_CFA_def_cfa_sf.0)
+ .uleb(4)
+ .sleb(-12)
+ // Register 0 is 8 from the CFA.
+ .D8(constants::DW_CFA_offset.0 | 0)
+ .uleb(8)
+ // Register 3 is 4 from the CFA.
+ .D8(constants::DW_CFA_offset.0 | 3)
+ .uleb(4)
+ .append_repeated(constants::DW_CFA_nop.0, 4);
+ let initial_instructions = initial_instructions.get_contents().unwrap();
+
+ let cie = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 1,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&initial_instructions, LittleEndian),
+ };
+
+ let instructions = Section::with_endian(Endian::Little)
+ // Initial instructions form a row, advance the address by 1.
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(1)
+ // Register 0 is -16 from the CFA.
+ .D8(constants::DW_CFA_offset_extended_sf.0)
+ .uleb(0)
+ .sleb(-16)
+ // Finish this row, advance the address by 32.
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(32)
+ // Register 3 is -4 from the CFA.
+ .D8(constants::DW_CFA_offset_extended_sf.0)
+ .uleb(3)
+ .sleb(-4)
+ // Finish this row, advance the address by 64.
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(64)
+ // Register 5 is 4 from the CFA.
+ .D8(constants::DW_CFA_offset.0 | 5)
+ .uleb(4)
+ // A bunch of nop padding.
+ .append_repeated(constants::DW_CFA_nop.0, 8);
+ let instructions = instructions.get_contents().unwrap();
+
+ let fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0,
+ address_range: 100,
+ augmentation: None,
+ instructions: EndianSlice::new(&instructions, LittleEndian),
+ };
+
+ let section = &DebugFrame::from(EndianSlice::default());
+ let bases = &BaseAddresses::default();
+ let mut ctx = Box::new(UnwindContext::new());
+
+ let mut table = fde
+ .rows(section, bases, &mut ctx)
+ .expect("Should run initial program OK");
+ assert!(table.ctx.is_initialized);
+ assert!(table.ctx.initial_rule.is_none());
+ let expected_initial_rules: RegisterRuleMap<_> = [
+ (Register(0), RegisterRule::Offset(8)),
+ (Register(3), RegisterRule::Offset(4)),
+ ]
+ .iter()
+ .collect();
+ assert_eq!(table.ctx.stack[0].registers, expected_initial_rules);
+
+ {
+ let row = table.next_row().expect("Should evaluate first row OK");
+ let expected = UnwindTableRow {
+ start_address: 0,
+ end_address: 1,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [
+ (Register(0), RegisterRule::Offset(8)),
+ (Register(3), RegisterRule::Offset(4)),
+ ]
+ .iter()
+ .collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ {
+ let row = table.next_row().expect("Should evaluate second row OK");
+ let expected = UnwindTableRow {
+ start_address: 1,
+ end_address: 33,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [
+ (Register(0), RegisterRule::Offset(-16)),
+ (Register(3), RegisterRule::Offset(4)),
+ ]
+ .iter()
+ .collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ {
+ let row = table.next_row().expect("Should evaluate third row OK");
+ let expected = UnwindTableRow {
+ start_address: 33,
+ end_address: 97,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [
+ (Register(0), RegisterRule::Offset(-16)),
+ (Register(3), RegisterRule::Offset(-4)),
+ ]
+ .iter()
+ .collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ {
+ let row = table.next_row().expect("Should evaluate fourth row OK");
+ let expected = UnwindTableRow {
+ start_address: 97,
+ end_address: 100,
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [
+ (Register(0), RegisterRule::Offset(-16)),
+ (Register(3), RegisterRule::Offset(-4)),
+ (Register(5), RegisterRule::Offset(4)),
+ ]
+ .iter()
+ .collect(),
+ };
+ assert_eq!(Some(&expected), row);
+ }
+
+ // All done!
+ assert_eq!(Ok(None), table.next_row());
+ assert_eq!(Ok(None), table.next_row());
+ }
+
+ #[test]
+ fn test_unwind_info_for_address_ok() {
+ let instrs1 = Section::with_endian(Endian::Big)
+ // The CFA is -12 from register 4.
+ .D8(constants::DW_CFA_def_cfa_sf.0)
+ .uleb(4)
+ .sleb(-12);
+ let instrs1 = instrs1.get_contents().unwrap();
+
+ let instrs2: Vec<_> = (0..8).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let instrs3 = Section::with_endian(Endian::Big)
+ // Initial instructions form a row, advance the address by 100.
+ .D8(constants::DW_CFA_advance_loc1.0)
+ .D8(100)
+ // Register 0 is -16 from the CFA.
+ .D8(constants::DW_CFA_offset_extended_sf.0)
+ .uleb(0)
+ .sleb(-16);
+ let instrs3 = instrs3.get_contents().unwrap();
+
+ let instrs4: Vec<_> = (0..16).map(|_| constants::DW_CFA_nop.0).collect();
+
+ let mut cie1 = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 8,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 1,
+ return_address_register: Register(3),
+ initial_instructions: EndianSlice::new(&instrs1, BigEndian),
+ };
+
+ let mut cie2 = CommonInformationEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ version: 4,
+ augmentation: None,
+ address_size: 4,
+ segment_size: 0,
+ code_alignment_factor: 1,
+ data_alignment_factor: 1,
+ return_address_register: Register(1),
+ initial_instructions: EndianSlice::new(&instrs2, BigEndian),
+ };
+
+ let cie1_location = Label::new();
+ let cie2_location = Label::new();
+
+ // Write the CIEs first so that their length gets set before we clone
+ // them into the FDEs and our equality assertions down the line end up
+ // with all the CIEs always having he correct length.
+ let kind = debug_frame_be();
+ let section = Section::with_endian(kind.endian())
+ .mark(&cie1_location)
+ .cie(kind, None, &mut cie1)
+ .mark(&cie2_location)
+ .cie(kind, None, &mut cie2);
+
+ let mut fde1 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie1.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 200,
+ augmentation: None,
+ instructions: EndianSlice::new(&instrs3, BigEndian),
+ };
+
+ let mut fde2 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie2.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: None,
+ instructions: EndianSlice::new(&instrs4, BigEndian),
+ };
+
+ let section =
+ section
+ .fde(kind, &cie1_location, &mut fde1)
+ .fde(kind, &cie2_location, &mut fde2);
+ section.start().set_const(0);
+
+ let contents = section.get_contents().unwrap();
+ let debug_frame = kind.section(&contents);
+
+ // Get the second row of the unwind table in `instrs3`.
+ let bases = Default::default();
+ let mut ctx = Box::new(UnwindContext::new());
+ let result = debug_frame.unwind_info_for_address(
+ &bases,
+ &mut ctx,
+ 0xfeed_beef + 150,
+ DebugFrame::cie_from_offset,
+ );
+ assert!(result.is_ok());
+ let unwind_info = result.unwrap();
+
+ assert_eq!(
+ *unwind_info,
+ UnwindTableRow {
+ start_address: fde1.initial_address() + 100,
+ end_address: fde1.initial_address() + fde1.len(),
+ saved_args_size: 0,
+ cfa: CfaRule::RegisterAndOffset {
+ register: Register(4),
+ offset: -12,
+ },
+ registers: [(Register(0), RegisterRule::Offset(-16))].iter().collect(),
+ }
+ );
+ }
+
+ #[test]
+ fn test_unwind_info_for_address_not_found() {
+ let debug_frame = DebugFrame::new(&[], NativeEndian);
+ let bases = Default::default();
+ let mut ctx = Box::new(UnwindContext::new());
+ let result = debug_frame.unwind_info_for_address(
+ &bases,
+ &mut ctx,
+ 0xbadb_ad99,
+ DebugFrame::cie_from_offset,
+ );
+ assert!(result.is_err());
+ assert_eq!(result.unwrap_err(), Error::NoUnwindInfoForAddress);
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_unknown_version() {
+ let bases = BaseAddresses::default();
+ let buf = &[42];
+ let result = EhFrameHdr::new(buf, NativeEndian).parse(&bases, 8);
+ assert!(result.is_err());
+ assert_eq!(result.unwrap_err(), Error::UnknownVersion(42));
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_omit_ehptr() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0xff)
+ .L8(0x03)
+ .L8(0x0b)
+ .L32(2)
+ .L32(10)
+ .L32(1)
+ .L32(20)
+ .L32(2)
+ .L32(0);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_err());
+ assert_eq!(result.unwrap_err(), Error::CannotParseOmitPointerEncoding);
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_omit_count() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x0b)
+ .L8(0xff)
+ .L8(0x0b)
+ .L32(0x12345);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_ok());
+ let result = result.unwrap();
+ assert_eq!(result.eh_frame_ptr(), Pointer::Direct(0x12345));
+ assert!(result.table().is_none());
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_omit_table() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x0b)
+ .L8(0x03)
+ .L8(0xff)
+ .L32(0x12345)
+ .L32(2);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_ok());
+ let result = result.unwrap();
+ assert_eq!(result.eh_frame_ptr(), Pointer::Direct(0x12345));
+ assert!(result.table().is_none());
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_varlen_table() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x0b)
+ .L8(0x03)
+ .L8(0x01)
+ .L32(0x12345)
+ .L32(2);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_ok());
+ let result = result.unwrap();
+ assert_eq!(result.eh_frame_ptr(), Pointer::Direct(0x12345));
+ let table = result.table();
+ assert!(table.is_some());
+ let table = table.unwrap();
+ assert_eq!(
+ table.lookup(0, &bases),
+ Err(Error::VariableLengthSearchTable)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_indirect_length() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x0b)
+ .L8(0x83)
+ .L8(0x0b)
+ .L32(0x12345)
+ .L32(2);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_err());
+ assert_eq!(result.unwrap_err(), Error::UnsupportedPointerEncoding);
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_indirect_ptrs() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x8b)
+ .L8(0x03)
+ .L8(0x8b)
+ .L32(0x12345)
+ .L32(2)
+ .L32(10)
+ .L32(1)
+ .L32(20)
+ .L32(2);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_ok());
+ let result = result.unwrap();
+ assert_eq!(result.eh_frame_ptr(), Pointer::Indirect(0x12345));
+ let table = result.table();
+ assert!(table.is_some());
+ let table = table.unwrap();
+ assert_eq!(
+ table.lookup(0, &bases),
+ Err(Error::UnsupportedPointerEncoding)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_hdr_good() {
+ let section = Section::with_endian(Endian::Little)
+ .L8(1)
+ .L8(0x0b)
+ .L8(0x03)
+ .L8(0x0b)
+ .L32(0x12345)
+ .L32(2)
+ .L32(10)
+ .L32(1)
+ .L32(20)
+ .L32(2);
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let result = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(result.is_ok());
+ let result = result.unwrap();
+ assert_eq!(result.eh_frame_ptr(), Pointer::Direct(0x12345));
+ let table = result.table();
+ assert!(table.is_some());
+ let table = table.unwrap();
+ assert_eq!(table.lookup(0, &bases), Ok(Pointer::Direct(1)));
+ assert_eq!(table.lookup(9, &bases), Ok(Pointer::Direct(1)));
+ assert_eq!(table.lookup(10, &bases), Ok(Pointer::Direct(1)));
+ assert_eq!(table.lookup(11, &bases), Ok(Pointer::Direct(1)));
+ assert_eq!(table.lookup(19, &bases), Ok(Pointer::Direct(1)));
+ assert_eq!(table.lookup(20, &bases), Ok(Pointer::Direct(2)));
+ assert_eq!(table.lookup(21, &bases), Ok(Pointer::Direct(2)));
+ assert_eq!(table.lookup(100_000, &bases), Ok(Pointer::Direct(2)));
+ }
+
+ #[test]
+ fn test_eh_frame_fde_for_address_good() {
+ // First, setup eh_frame
+ // Write the CIE first so that its length gets set before we clone it
+ // into the FDE.
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+
+ let start_of_cie = Label::new();
+ let end_of_cie = Label::new();
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .append_repeated(0, 16)
+ .mark(&start_of_cie)
+ .cie(kind, None, &mut cie)
+ .mark(&end_of_cie);
+
+ let mut fde1 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 9,
+ address_range: 4,
+ augmentation: None,
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+ let mut fde2 = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 20,
+ address_range: 8,
+ augmentation: None,
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let start_of_fde1 = Label::new();
+ let start_of_fde2 = Label::new();
+
+ let section = section
+ // +4 for the FDE length before the CIE offset.
+ .mark(&start_of_fde1)
+ .fde(kind, (&start_of_fde1 - &start_of_cie + 4) as u64, &mut fde1)
+ .mark(&start_of_fde2)
+ .fde(kind, (&start_of_fde2 - &start_of_cie + 4) as u64, &mut fde2);
+
+ section.start().set_const(0);
+ let section = section.get_contents().unwrap();
+ let section = EndianSlice::new(&section, LittleEndian);
+ let eh_frame = kind.section(&section);
+
+ // Setup eh_frame_hdr
+ let section = Section::with_endian(kind.endian())
+ .L8(1)
+ .L8(0x0b)
+ .L8(0x03)
+ .L8(0x0b)
+ .L32(0x12345)
+ .L32(2)
+ .L32(10)
+ .L32(0x12345 + start_of_fde1.value().unwrap() as u32)
+ .L32(20)
+ .L32(0x12345 + start_of_fde2.value().unwrap() as u32);
+
+ let section = section.get_contents().unwrap();
+ let bases = BaseAddresses::default();
+ let eh_frame_hdr = EhFrameHdr::new(&section, LittleEndian).parse(&bases, 8);
+ assert!(eh_frame_hdr.is_ok());
+ let eh_frame_hdr = eh_frame_hdr.unwrap();
+
+ let table = eh_frame_hdr.table();
+ assert!(table.is_some());
+ let table = table.unwrap();
+
+ let bases = Default::default();
+ let mut iter = table.iter(&bases);
+ assert_eq!(
+ iter.next(),
+ Ok(Some((
+ Pointer::Direct(10),
+ Pointer::Direct(0x12345 + start_of_fde1.value().unwrap() as u64)
+ )))
+ );
+ assert_eq!(
+ iter.next(),
+ Ok(Some((
+ Pointer::Direct(20),
+ Pointer::Direct(0x12345 + start_of_fde2.value().unwrap() as u64)
+ )))
+ );
+ assert_eq!(iter.next(), Ok(None));
+
+ assert_eq!(
+ table.iter(&bases).nth(0),
+ Ok(Some((
+ Pointer::Direct(10),
+ Pointer::Direct(0x12345 + start_of_fde1.value().unwrap() as u64)
+ )))
+ );
+
+ assert_eq!(
+ table.iter(&bases).nth(1),
+ Ok(Some((
+ Pointer::Direct(20),
+ Pointer::Direct(0x12345 + start_of_fde2.value().unwrap() as u64)
+ )))
+ );
+ assert_eq!(table.iter(&bases).nth(2), Ok(None));
+
+ let f = |_: &_, _: &_, o: EhFrameOffset| {
+ assert_eq!(o, EhFrameOffset(start_of_cie.value().unwrap() as usize));
+ Ok(cie.clone())
+ };
+ assert_eq!(
+ table.fde_for_address(&eh_frame, &bases, 9, f),
+ Ok(fde1.clone())
+ );
+ assert_eq!(
+ table.fde_for_address(&eh_frame, &bases, 10, f),
+ Ok(fde1.clone())
+ );
+ assert_eq!(table.fde_for_address(&eh_frame, &bases, 11, f), Ok(fde1));
+ assert_eq!(
+ table.fde_for_address(&eh_frame, &bases, 19, f),
+ Err(Error::NoUnwindInfoForAddress)
+ );
+ assert_eq!(
+ table.fde_for_address(&eh_frame, &bases, 20, f),
+ Ok(fde2.clone())
+ );
+ assert_eq!(table.fde_for_address(&eh_frame, &bases, 21, f), Ok(fde2));
+ assert_eq!(
+ table.fde_for_address(&eh_frame, &bases, 100_000, f),
+ Err(Error::NoUnwindInfoForAddress)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_stops_at_zero_length() {
+ let section = Section::with_endian(Endian::Little).L32(0);
+ let section = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&section, LittleEndian);
+ let bases = Default::default();
+
+ assert_eq!(
+ parse_cfi_entry(&bases, &EhFrame::new(&*section, LittleEndian), rest),
+ Ok(None)
+ );
+
+ assert_eq!(
+ EhFrame::new(&section, LittleEndian).cie_from_offset(&bases, EhFrameOffset(0)),
+ Err(Error::NoEntryAtGivenOffset)
+ );
+ }
+
+ fn resolve_cie_offset(buf: &[u8], cie_offset: usize) -> Result<usize> {
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf64,
+ cie: make_test_cie(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 39,
+ augmentation: None,
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .append_bytes(&buf)
+ .fde(kind, cie_offset as u64, &mut fde)
+ .append_bytes(&buf);
+
+ let section = section.get_contents().unwrap();
+ let eh_frame = kind.section(&section);
+ let input = &mut EndianSlice::new(&section[buf.len()..], LittleEndian);
+
+ let bases = Default::default();
+ match parse_cfi_entry(&bases, &eh_frame, input) {
+ Ok(Some(CieOrFde::Fde(partial))) => Ok(partial.cie_offset.0),
+ Err(e) => Err(e),
+ otherwise => panic!("Unexpected result: {:#?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_eh_frame_resolve_cie_offset_ok() {
+ let buf = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let cie_offset = 2;
+ // + 4 for size of length field
+ assert_eq!(
+ resolve_cie_offset(&buf, buf.len() + 4 - cie_offset),
+ Ok(cie_offset)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_resolve_cie_offset_out_of_bounds() {
+ let buf = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+ assert_eq!(
+ resolve_cie_offset(&buf, buf.len() + 4 + 2),
+ Err(Error::OffsetOutOfBounds)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_resolve_cie_offset_underflow() {
+ let buf = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+ assert_eq!(
+ resolve_cie_offset(&buf, ::core::usize::MAX),
+ Err(Error::OffsetOutOfBounds)
+ );
+ }
+
+ #[test]
+ fn test_eh_frame_fde_ok() {
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+
+ let start_of_cie = Label::new();
+ let end_of_cie = Label::new();
+
+ // Write the CIE first so that its length gets set before we clone it
+ // into the FDE.
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .append_repeated(0, 16)
+ .mark(&start_of_cie)
+ .cie(kind, None, &mut cie)
+ .mark(&end_of_cie);
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 999,
+ augmentation: None,
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let section = section
+ // +4 for the FDE length before the CIE offset.
+ .fde(kind, (&end_of_cie - &start_of_cie + 4) as u64, &mut fde);
+
+ section.start().set_const(0);
+ let section = section.get_contents().unwrap();
+ let eh_frame = kind.section(&section);
+ let section = EndianSlice::new(&section, LittleEndian);
+
+ let mut offset = None;
+ match parse_fde(
+ eh_frame,
+ &mut section.range_from(end_of_cie.value().unwrap() as usize..),
+ |_, _, o| {
+ offset = Some(o);
+ assert_eq!(o, EhFrameOffset(start_of_cie.value().unwrap() as usize));
+ Ok(cie.clone())
+ },
+ ) {
+ Ok(actual) => assert_eq!(actual, fde),
+ otherwise => panic!("Unexpected result {:?}", otherwise),
+ }
+ assert!(offset.is_some());
+ }
+
+ #[test]
+ fn test_eh_frame_fde_out_of_bounds() {
+ let mut cie = make_test_cie();
+ cie.version = 1;
+
+ let end_of_cie = Label::new();
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf64,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_beef,
+ address_range: 999,
+ augmentation: None,
+ instructions: EndianSlice::new(&[], LittleEndian),
+ };
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .cie(kind, None, &mut cie)
+ .mark(&end_of_cie)
+ .fde(kind, 99_999_999_999_999, &mut fde);
+
+ section.start().set_const(0);
+ let section = section.get_contents().unwrap();
+ let eh_frame = kind.section(&section);
+ let section = EndianSlice::new(&section, LittleEndian);
+
+ let result = parse_fde(
+ eh_frame,
+ &mut section.range_from(end_of_cie.value().unwrap() as usize..),
+ UnwindSection::cie_from_offset,
+ );
+ assert_eq!(result, Err(Error::OffsetOutOfBounds));
+ }
+
+ #[test]
+ fn test_augmentation_parse_not_z_augmentation() {
+ let augmentation = &mut EndianSlice::new(b"wtf", NativeEndian);
+ let bases = Default::default();
+ let address_size = 8;
+ let section = EhFrame::new(&[], NativeEndian);
+ let input = &mut EndianSlice::new(&[], NativeEndian);
+ assert_eq!(
+ Augmentation::parse(augmentation, &bases, address_size, &section, input),
+ Err(Error::UnknownAugmentation)
+ );
+ }
+
+ #[test]
+ fn test_augmentation_parse_just_signal_trampoline() {
+ let aug_str = &mut EndianSlice::new(b"S", LittleEndian);
+ let bases = Default::default();
+ let address_size = 8;
+ let section = EhFrame::new(&[], LittleEndian);
+ let input = &mut EndianSlice::new(&[], LittleEndian);
+
+ let mut augmentation = Augmentation::default();
+ augmentation.is_signal_trampoline = true;
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ }
+
+ #[test]
+ fn test_augmentation_parse_unknown_part_of_z_augmentation() {
+ // The 'Z' character is not defined by the z-style augmentation.
+ let bases = Default::default();
+ let address_size = 8;
+ let section = Section::with_endian(Endian::Little)
+ .uleb(4)
+ .append_repeated(4, 4)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let augmentation = &mut EndianSlice::new(b"zZ", LittleEndian);
+ assert_eq!(
+ Augmentation::parse(augmentation, &bases, address_size, &section, input),
+ Err(Error::UnknownAugmentation)
+ );
+ }
+
+ #[test]
+ #[allow(non_snake_case)]
+ fn test_augmentation_parse_L() {
+ let bases = Default::default();
+ let address_size = 8;
+ let rest = [9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let section = Section::with_endian(Endian::Little)
+ .uleb(1)
+ .D8(constants::DW_EH_PE_uleb128.0)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let aug_str = &mut EndianSlice::new(b"zL", LittleEndian);
+
+ let mut augmentation = Augmentation::default();
+ augmentation.lsda = Some(constants::DW_EH_PE_uleb128);
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ #[allow(non_snake_case)]
+ fn test_augmentation_parse_P() {
+ let bases = Default::default();
+ let address_size = 8;
+ let rest = [9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let section = Section::with_endian(Endian::Little)
+ .uleb(9)
+ .D8(constants::DW_EH_PE_udata8.0)
+ .L64(0xf00d_f00d)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let aug_str = &mut EndianSlice::new(b"zP", LittleEndian);
+
+ let mut augmentation = Augmentation::default();
+ augmentation.personality = Some((constants::DW_EH_PE_udata8, Pointer::Direct(0xf00d_f00d)));
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ #[allow(non_snake_case)]
+ fn test_augmentation_parse_R() {
+ let bases = Default::default();
+ let address_size = 8;
+ let rest = [9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let section = Section::with_endian(Endian::Little)
+ .uleb(1)
+ .D8(constants::DW_EH_PE_udata4.0)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let aug_str = &mut EndianSlice::new(b"zR", LittleEndian);
+
+ let mut augmentation = Augmentation::default();
+ augmentation.fde_address_encoding = Some(constants::DW_EH_PE_udata4);
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ #[allow(non_snake_case)]
+ fn test_augmentation_parse_S() {
+ let bases = Default::default();
+ let address_size = 8;
+ let rest = [9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let section = Section::with_endian(Endian::Little)
+ .uleb(0)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let aug_str = &mut EndianSlice::new(b"zS", LittleEndian);
+
+ let mut augmentation = Augmentation::default();
+ augmentation.is_signal_trampoline = true;
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_augmentation_parse_all() {
+ let bases = Default::default();
+ let address_size = 8;
+ let rest = [9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let section = Section::with_endian(Endian::Little)
+ .uleb(1 + 9 + 1)
+ // L
+ .D8(constants::DW_EH_PE_uleb128.0)
+ // P
+ .D8(constants::DW_EH_PE_udata8.0)
+ .L64(0x1bad_f00d)
+ // R
+ .D8(constants::DW_EH_PE_uleb128.0)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+ let input = &mut section.section().clone();
+ let aug_str = &mut EndianSlice::new(b"zLPRS", LittleEndian);
+
+ let augmentation = Augmentation {
+ lsda: Some(constants::DW_EH_PE_uleb128),
+ personality: Some((constants::DW_EH_PE_udata8, Pointer::Direct(0x1bad_f00d))),
+ fde_address_encoding: Some(constants::DW_EH_PE_uleb128),
+ is_signal_trampoline: true,
+ };
+
+ assert_eq!(
+ Augmentation::parse(aug_str, &bases, address_size, &section, input),
+ Ok(augmentation)
+ );
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_eh_frame_fde_no_augmentation() {
+ let instrs = [1, 2, 3, 4];
+ let cie_offset = 1;
+
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: None,
+ instructions: EndianSlice::new(&instrs, LittleEndian),
+ };
+
+ let rest = [1, 2, 3, 4];
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = kind.section(&section);
+ let input = &mut section.section().clone();
+
+ let result = parse_fde(section, input, |_, _, _| Ok(cie.clone()));
+ assert_eq!(result, Ok(fde));
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_eh_frame_fde_empty_augmentation() {
+ let instrs = [1, 2, 3, 4];
+ let cie_offset = 1;
+
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+ cie.augmentation = Some(Augmentation::default());
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: Some(AugmentationData::default()),
+ instructions: EndianSlice::new(&instrs, LittleEndian),
+ };
+
+ let rest = [1, 2, 3, 4];
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = kind.section(&section);
+ let input = &mut section.section().clone();
+
+ let result = parse_fde(section, input, |_, _, _| Ok(cie.clone()));
+ assert_eq!(result, Ok(fde));
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_eh_frame_fde_lsda_augmentation() {
+ let instrs = [1, 2, 3, 4];
+ let cie_offset = 1;
+
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+ cie.augmentation = Some(Augmentation::default());
+ cie.augmentation.as_mut().unwrap().lsda = Some(constants::DW_EH_PE_absptr);
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: Some(AugmentationData {
+ lsda: Some(Pointer::Direct(0x1122_3344)),
+ }),
+ instructions: EndianSlice::new(&instrs, LittleEndian),
+ };
+
+ let rest = [1, 2, 3, 4];
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = kind.section(&section);
+ let input = &mut section.section().clone();
+
+ let result = parse_fde(section, input, |_, _, _| Ok(cie.clone()));
+ assert_eq!(result, Ok(fde));
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_eh_frame_fde_lsda_function_relative() {
+ let instrs = [1, 2, 3, 4];
+ let cie_offset = 1;
+
+ let mut cie = make_test_cie();
+ cie.format = Format::Dwarf32;
+ cie.version = 1;
+ cie.augmentation = Some(Augmentation::default());
+ cie.augmentation.as_mut().unwrap().lsda = Some(constants::DwEhPe(
+ constants::DW_EH_PE_funcrel.0 | constants::DW_EH_PE_absptr.0,
+ ));
+
+ let mut fde = FrameDescriptionEntry {
+ offset: 0,
+ length: 0,
+ format: Format::Dwarf32,
+ cie: cie.clone(),
+ initial_segment: 0,
+ initial_address: 0xfeed_face,
+ address_range: 9000,
+ augmentation: Some(AugmentationData {
+ lsda: Some(Pointer::Direct(0xbeef)),
+ }),
+ instructions: EndianSlice::new(&instrs, LittleEndian),
+ };
+
+ let rest = [1, 2, 3, 4];
+
+ let kind = eh_frame_le();
+ let section = Section::with_endian(kind.endian())
+ .append_repeated(10, 10)
+ .fde(kind, cie_offset, &mut fde)
+ .append_bytes(&rest)
+ .get_contents()
+ .unwrap();
+ let section = kind.section(&section);
+ let input = &mut section.section().range_from(10..);
+
+ // Adjust the FDE's augmentation to be relative to the function.
+ fde.augmentation.as_mut().unwrap().lsda = Some(Pointer::Direct(0xfeed_face + 0xbeef));
+
+ let result = parse_fde(section, input, |_, _, _| Ok(cie.clone()));
+ assert_eq!(result, Ok(fde));
+ assert_eq!(*input, EndianSlice::new(&rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_eh_frame_cie_personality_function_relative_bad_context() {
+ let instrs = [1, 2, 3, 4];
+
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let aug_len = Label::new();
+ let aug_start = Label::new();
+ let aug_end = Label::new();
+
+ let section = Section::with_endian(Endian::Little)
+ // Length
+ .L32(&length)
+ .mark(&start)
+ // CIE ID
+ .L32(0)
+ // Version
+ .D8(1)
+ // Augmentation
+ .append_bytes(b"zP\0")
+ // Code alignment factor
+ .uleb(1)
+ // Data alignment factor
+ .sleb(1)
+ // Return address register
+ .uleb(1)
+ // Augmentation data length. This is a uleb, be we rely on the value
+ // being less than 2^7 and therefore a valid uleb (can't use Label
+ // with uleb).
+ .D8(&aug_len)
+ .mark(&aug_start)
+ // Augmentation data. Personality encoding and then encoded pointer.
+ .D8(constants::DW_EH_PE_funcrel.0 | constants::DW_EH_PE_uleb128.0)
+ .uleb(1)
+ .mark(&aug_end)
+ // Initial instructions
+ .append_bytes(&instrs)
+ .mark(&end);
+
+ length.set_const((&end - &start) as u64);
+ aug_len.set_const((&aug_end - &aug_start) as u64);
+
+ let section = section.get_contents().unwrap();
+ let section = EhFrame::new(&section, LittleEndian);
+
+ let bases = BaseAddresses::default();
+ let mut iter = section.entries(&bases);
+ assert_eq!(iter.next(), Err(Error::FuncRelativePointerInBadContext));
+ }
+
+ #[test]
+ fn register_rule_map_eq() {
+ // Different order, but still equal.
+ let map1: RegisterRuleMap<EndianSlice<LittleEndian>> = [
+ (Register(0), RegisterRule::SameValue),
+ (Register(3), RegisterRule::Offset(1)),
+ ]
+ .iter()
+ .collect();
+ let map2: RegisterRuleMap<EndianSlice<LittleEndian>> = [
+ (Register(3), RegisterRule::Offset(1)),
+ (Register(0), RegisterRule::SameValue),
+ ]
+ .iter()
+ .collect();
+ assert_eq!(map1, map2);
+ assert_eq!(map2, map1);
+
+ // Not equal.
+ let map3: RegisterRuleMap<EndianSlice<LittleEndian>> = [
+ (Register(0), RegisterRule::SameValue),
+ (Register(2), RegisterRule::Offset(1)),
+ ]
+ .iter()
+ .collect();
+ let map4: RegisterRuleMap<EndianSlice<LittleEndian>> = [
+ (Register(3), RegisterRule::Offset(1)),
+ (Register(0), RegisterRule::SameValue),
+ ]
+ .iter()
+ .collect();
+ assert!(map3 != map4);
+ assert!(map4 != map3);
+
+ // One has undefined explicitly set, other implicitly has undefined.
+ let mut map5 = RegisterRuleMap::<EndianSlice<LittleEndian>>::default();
+ map5.set(Register(0), RegisterRule::SameValue).unwrap();
+ map5.set(Register(0), RegisterRule::Undefined).unwrap();
+ let map6 = RegisterRuleMap::<EndianSlice<LittleEndian>>::default();
+ assert_eq!(map5, map6);
+ assert_eq!(map6, map5);
+ }
+
+ #[test]
+ fn iter_register_rules() {
+ let mut row = UnwindTableRow::<EndianSlice<LittleEndian>>::default();
+ row.registers = [
+ (Register(0), RegisterRule::SameValue),
+ (Register(1), RegisterRule::Offset(1)),
+ (Register(2), RegisterRule::ValOffset(2)),
+ ]
+ .iter()
+ .collect();
+
+ let mut found0 = false;
+ let mut found1 = false;
+ let mut found2 = false;
+
+ for &(register, ref rule) in row.registers() {
+ match register.0 {
+ 0 => {
+ assert_eq!(found0, false);
+ found0 = true;
+ assert_eq!(*rule, RegisterRule::SameValue);
+ }
+ 1 => {
+ assert_eq!(found1, false);
+ found1 = true;
+ assert_eq!(*rule, RegisterRule::Offset(1));
+ }
+ 2 => {
+ assert_eq!(found2, false);
+ found2 = true;
+ assert_eq!(*rule, RegisterRule::ValOffset(2));
+ }
+ x => panic!("Unexpected register rule: ({}, {:?})", x, rule),
+ }
+ }
+
+ assert_eq!(found0, true);
+ assert_eq!(found1, true);
+ assert_eq!(found2, true);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn size_of_unwind_ctx() {
+ use core::mem;
+ let size = mem::size_of::<UnwindContext<EndianSlice<NativeEndian>>>();
+ let max_size = 30968;
+ if size > max_size {
+ assert_eq!(size, max_size);
+ }
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn size_of_register_rule_map() {
+ use core::mem;
+ let size = mem::size_of::<RegisterRuleMap<EndianSlice<NativeEndian>>>();
+ let max_size = 6152;
+ if size > max_size {
+ assert_eq!(size, max_size);
+ }
+ }
+
+ #[test]
+ fn test_parse_pointer_encoding_ok() {
+ use crate::endianity::NativeEndian;
+ let expected =
+ constants::DwEhPe(constants::DW_EH_PE_uleb128.0 | constants::DW_EH_PE_pcrel.0);
+ let input = [expected.0, 1, 2, 3, 4];
+ let input = &mut EndianSlice::new(&input, NativeEndian);
+ assert_eq!(parse_pointer_encoding(input), Ok(expected));
+ assert_eq!(*input, EndianSlice::new(&[1, 2, 3, 4], NativeEndian));
+ }
+
+ #[test]
+ fn test_parse_pointer_encoding_bad_encoding() {
+ use crate::endianity::NativeEndian;
+ let expected =
+ constants::DwEhPe((constants::DW_EH_PE_sdata8.0 + 1) | constants::DW_EH_PE_pcrel.0);
+ let input = [expected.0, 1, 2, 3, 4];
+ let input = &mut EndianSlice::new(&input, NativeEndian);
+ assert_eq!(
+ Err(Error::UnknownPointerEncoding),
+ parse_pointer_encoding(input)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_absptr() {
+ let encoding = constants::DW_EH_PE_absptr;
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0xf00d_f00d)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0xf00d_f00d))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_pcrel() {
+ let encoding = constants::DW_EH_PE_pcrel;
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .append_repeated(0, 0x10)
+ .L32(0x1)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input.range_from(0x10..);
+
+ let parameters = PointerEncodingParameters {
+ bases: &BaseAddresses::default().set_eh_frame(0x100).eh_frame,
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x111))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_pcrel_undefined() {
+ let encoding = constants::DW_EH_PE_pcrel;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::PcRelativePointerButSectionBaseIsUndefined)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_textrel() {
+ let encoding = constants::DW_EH_PE_textrel;
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0x1)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &BaseAddresses::default().set_text(0x10).eh_frame,
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x11))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_textrel_undefined() {
+ let encoding = constants::DW_EH_PE_textrel;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::TextRelativePointerButTextBaseIsUndefined)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_datarel() {
+ let encoding = constants::DW_EH_PE_datarel;
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0x1)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &BaseAddresses::default().set_got(0x10).eh_frame,
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x11))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_datarel_undefined() {
+ let encoding = constants::DW_EH_PE_datarel;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::DataRelativePointerButDataBaseIsUndefined)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_funcrel() {
+ let encoding = constants::DW_EH_PE_funcrel;
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0x1)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: Some(0x10),
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x11))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_funcrel_undefined() {
+ let encoding = constants::DW_EH_PE_funcrel;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::FuncRelativePointerInBadContext)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_uleb128() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_uleb128.0);
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .uleb(0x12_3456)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x12_3456))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_udata2() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_udata2.0);
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L16(0x1234)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x1234))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_udata4() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_udata4.0);
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0x1234_5678)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x1234_5678))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_udata8() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_udata8.0);
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .L64(0x1234_5678_1234_5678)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x1234_5678_1234_5678))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_sleb128() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_textrel.0 | constants::DW_EH_PE_sleb128.0);
+ let expected_rest = [1, 2, 3, 4];
+
+ let input = Section::with_endian(Endian::Little)
+ .sleb(-0x1111)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &BaseAddresses::default().set_text(0x1111_1111).eh_frame,
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(0x1111_0000))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_sdata2() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_sdata2.0);
+ let expected_rest = [1, 2, 3, 4];
+ let expected = 0x111 as i16;
+
+ let input = Section::with_endian(Endian::Little)
+ .L16(expected as u16)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(expected as u64))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_sdata4() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_sdata4.0);
+ let expected_rest = [1, 2, 3, 4];
+ let expected = 0x111_1111 as i32;
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(expected as u32)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(expected as u64))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_sdata8() {
+ let encoding =
+ constants::DwEhPe(constants::DW_EH_PE_absptr.0 | constants::DW_EH_PE_sdata8.0);
+ let expected_rest = [1, 2, 3, 4];
+ let expected = -0x11_1111_1222_2222 as i64;
+
+ let input = Section::with_endian(Endian::Little)
+ .L64(expected as u64)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Direct(expected as u64))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_omit() {
+ let encoding = constants::DW_EH_PE_omit;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::CannotParseOmitPointerEncoding)
+ );
+ assert_eq!(rest, input);
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_bad_encoding() {
+ let encoding = constants::DwEhPe(constants::DW_EH_PE_sdata8.0 + 1);
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::UnknownPointerEncoding)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_aligned() {
+ // FIXME: support this encoding!
+
+ let encoding = constants::DW_EH_PE_aligned;
+
+ let input = Section::with_endian(Endian::Little).L32(0x1);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Err(Error::UnsupportedPointerEncoding)
+ );
+ }
+
+ #[test]
+ fn test_parse_encoded_pointer_indirect() {
+ let expected_rest = [1, 2, 3, 4];
+ let encoding = constants::DW_EH_PE_indirect;
+
+ let input = Section::with_endian(Endian::Little)
+ .L32(0x1234_5678)
+ .append_bytes(&expected_rest);
+ let input = input.get_contents().unwrap();
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut rest = input;
+
+ let parameters = PointerEncodingParameters {
+ bases: &SectionBaseAddresses::default(),
+ func_base: None,
+ address_size: 4,
+ section: &input,
+ };
+ assert_eq!(
+ parse_encoded_pointer(encoding, &parameters, &mut rest),
+ Ok(Pointer::Indirect(0x1234_5678))
+ );
+ assert_eq!(rest, EndianSlice::new(&expected_rest, LittleEndian));
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/dwarf.rs b/vendor/gimli-0.26.2/src/read/dwarf.rs
new file mode 100644
index 000000000..b63526941
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/dwarf.rs
@@ -0,0 +1,1143 @@
+use alloc::string::String;
+use alloc::sync::Arc;
+
+use crate::common::{
+ DebugAddrBase, DebugAddrIndex, DebugInfoOffset, DebugLineStrOffset, DebugLocListsBase,
+ DebugLocListsIndex, DebugRngListsBase, DebugRngListsIndex, DebugStrOffset, DebugStrOffsetsBase,
+ DebugStrOffsetsIndex, DebugTypeSignature, DebugTypesOffset, DwarfFileType, DwoId, Encoding,
+ LocationListsOffset, RangeListsOffset, RawRangeListsOffset, SectionId, UnitSectionOffset,
+};
+use crate::constants;
+use crate::read::{
+ Abbreviations, AttributeValue, DebugAbbrev, DebugAddr, DebugAranges, DebugCuIndex, DebugInfo,
+ DebugInfoUnitHeadersIter, DebugLine, DebugLineStr, DebugLoc, DebugLocLists, DebugRngLists,
+ DebugStr, DebugStrOffsets, DebugTuIndex, DebugTypes, DebugTypesUnitHeadersIter,
+ DebuggingInformationEntry, EntriesCursor, EntriesRaw, EntriesTree, Error,
+ IncompleteLineProgram, LocListIter, LocationLists, Range, RangeLists, RawLocListIter,
+ RawRngListIter, Reader, ReaderOffset, ReaderOffsetId, Result, RngListIter, Section, UnitHeader,
+ UnitIndex, UnitIndexSectionIterator, UnitOffset, UnitType,
+};
+
+/// All of the commonly used DWARF sections, and other common information.
+#[derive(Debug, Default)]
+pub struct Dwarf<R> {
+ /// The `.debug_abbrev` section.
+ pub debug_abbrev: DebugAbbrev<R>,
+
+ /// The `.debug_addr` section.
+ pub debug_addr: DebugAddr<R>,
+
+ /// The `.debug_aranges` section.
+ pub debug_aranges: DebugAranges<R>,
+
+ /// The `.debug_info` section.
+ pub debug_info: DebugInfo<R>,
+
+ /// The `.debug_line` section.
+ pub debug_line: DebugLine<R>,
+
+ /// The `.debug_line_str` section.
+ pub debug_line_str: DebugLineStr<R>,
+
+ /// The `.debug_str` section.
+ pub debug_str: DebugStr<R>,
+
+ /// The `.debug_str_offsets` section.
+ pub debug_str_offsets: DebugStrOffsets<R>,
+
+ /// The `.debug_types` section.
+ pub debug_types: DebugTypes<R>,
+
+ /// The location lists in the `.debug_loc` and `.debug_loclists` sections.
+ pub locations: LocationLists<R>,
+
+ /// The range lists in the `.debug_ranges` and `.debug_rnglists` sections.
+ pub ranges: RangeLists<R>,
+
+ /// The type of this file.
+ pub file_type: DwarfFileType,
+
+ /// The DWARF sections for a supplementary object file.
+ pub sup: Option<Arc<Dwarf<R>>>,
+}
+
+impl<T> Dwarf<T> {
+ /// Try to load the DWARF sections using the given loader function.
+ ///
+ /// `section` loads a DWARF section from the object file.
+ /// It should return an empty section if the section does not exist.
+ ///
+ /// `section` may either directly return a `Reader` instance (such as
+ /// `EndianSlice`), or it may return some other type and then convert
+ /// that type into a `Reader` using `Dwarf::borrow`.
+ ///
+ /// After loading, the user should set the `file_type` field and
+ /// call `load_sup` if required.
+ pub fn load<F, E>(mut section: F) -> core::result::Result<Self, E>
+ where
+ F: FnMut(SectionId) -> core::result::Result<T, E>,
+ {
+ // Section types are inferred.
+ let debug_loc = Section::load(&mut section)?;
+ let debug_loclists = Section::load(&mut section)?;
+ let debug_ranges = Section::load(&mut section)?;
+ let debug_rnglists = Section::load(&mut section)?;
+ Ok(Dwarf {
+ debug_abbrev: Section::load(&mut section)?,
+ debug_addr: Section::load(&mut section)?,
+ debug_aranges: Section::load(&mut section)?,
+ debug_info: Section::load(&mut section)?,
+ debug_line: Section::load(&mut section)?,
+ debug_line_str: Section::load(&mut section)?,
+ debug_str: Section::load(&mut section)?,
+ debug_str_offsets: Section::load(&mut section)?,
+ debug_types: Section::load(&mut section)?,
+ locations: LocationLists::new(debug_loc, debug_loclists),
+ ranges: RangeLists::new(debug_ranges, debug_rnglists),
+ file_type: DwarfFileType::Main,
+ sup: None,
+ })
+ }
+
+ /// Load the DWARF sections from the supplementary object file.
+ ///
+ /// `section` operates the same as for `load`.
+ ///
+ /// Sets `self.sup`, replacing any previous value.
+ pub fn load_sup<F, E>(&mut self, section: F) -> core::result::Result<(), E>
+ where
+ F: FnMut(SectionId) -> core::result::Result<T, E>,
+ {
+ self.sup = Some(Arc::new(Self::load(section)?));
+ Ok(())
+ }
+
+ /// Create a `Dwarf` structure that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// It can be useful to load DWARF sections into owned data structures,
+ /// such as `Vec`. However, we do not implement the `Reader` trait
+ /// for `Vec`, because it would be very inefficient, but this trait
+ /// is required for all of the methods that parse the DWARF data.
+ /// So we first load the DWARF sections into `Vec`s, and then use
+ /// `borrow` to create `Reader`s that reference the data.
+ ///
+ /// ```rust,no_run
+ /// # fn example() -> Result<(), gimli::Error> {
+ /// # let loader = |name| -> Result<_, gimli::Error> { unimplemented!() };
+ /// # let sup_loader = |name| -> Result<_, gimli::Error> { unimplemented!() };
+ /// // Read the DWARF sections into `Vec`s with whatever object loader you're using.
+ /// let mut owned_dwarf: gimli::Dwarf<Vec<u8>> = gimli::Dwarf::load(loader)?;
+ /// owned_dwarf.load_sup(sup_loader)?;
+ /// // Create references to the DWARF sections.
+ /// let dwarf = owned_dwarf.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// # unreachable!()
+ /// # }
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> Dwarf<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ Dwarf {
+ debug_abbrev: self.debug_abbrev.borrow(&mut borrow),
+ debug_addr: self.debug_addr.borrow(&mut borrow),
+ debug_aranges: self.debug_aranges.borrow(&mut borrow),
+ debug_info: self.debug_info.borrow(&mut borrow),
+ debug_line: self.debug_line.borrow(&mut borrow),
+ debug_line_str: self.debug_line_str.borrow(&mut borrow),
+ debug_str: self.debug_str.borrow(&mut borrow),
+ debug_str_offsets: self.debug_str_offsets.borrow(&mut borrow),
+ debug_types: self.debug_types.borrow(&mut borrow),
+ locations: self.locations.borrow(&mut borrow),
+ ranges: self.ranges.borrow(&mut borrow),
+ file_type: self.file_type,
+ sup: self.sup().map(|sup| Arc::new(sup.borrow(borrow))),
+ }
+ }
+
+ /// Return a reference to the DWARF sections for supplementary object file.
+ pub fn sup(&self) -> Option<&Dwarf<T>> {
+ self.sup.as_ref().map(Arc::as_ref)
+ }
+}
+
+impl<R: Reader> Dwarf<R> {
+ /// Iterate the unit headers in the `.debug_info` section.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ #[inline]
+ pub fn units(&self) -> DebugInfoUnitHeadersIter<R> {
+ self.debug_info.units()
+ }
+
+ /// Construct a new `Unit` from the given unit header.
+ #[inline]
+ pub fn unit(&self, header: UnitHeader<R>) -> Result<Unit<R>> {
+ Unit::new(self, header)
+ }
+
+ /// Iterate the type-unit headers in the `.debug_types` section.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ #[inline]
+ pub fn type_units(&self) -> DebugTypesUnitHeadersIter<R> {
+ self.debug_types.units()
+ }
+
+ /// Parse the abbreviations for a compilation unit.
+ // TODO: provide caching of abbreviations
+ #[inline]
+ pub fn abbreviations(&self, unit: &UnitHeader<R>) -> Result<Abbreviations> {
+ unit.abbreviations(&self.debug_abbrev)
+ }
+
+ /// Return the string offset at the given index.
+ #[inline]
+ pub fn string_offset(
+ &self,
+ unit: &Unit<R>,
+ index: DebugStrOffsetsIndex<R::Offset>,
+ ) -> Result<DebugStrOffset<R::Offset>> {
+ self.debug_str_offsets
+ .get_str_offset(unit.header.format(), unit.str_offsets_base, index)
+ }
+
+ /// Return the string at the given offset in `.debug_str`.
+ #[inline]
+ pub fn string(&self, offset: DebugStrOffset<R::Offset>) -> Result<R> {
+ self.debug_str.get_str(offset)
+ }
+
+ /// Return the string at the given offset in `.debug_line_str`.
+ #[inline]
+ pub fn line_string(&self, offset: DebugLineStrOffset<R::Offset>) -> Result<R> {
+ self.debug_line_str.get_str(offset)
+ }
+
+ /// Return an attribute value as a string slice.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - an inline `DW_FORM_string` string
+ /// - a `DW_FORM_strp` reference to an offset into the `.debug_str` section
+ /// - a `DW_FORM_strp_sup` reference to an offset into a supplementary
+ /// object file
+ /// - a `DW_FORM_line_strp` reference to an offset into the `.debug_line_str`
+ /// section
+ /// - a `DW_FORM_strx` index into the `.debug_str_offsets` entries for the unit
+ ///
+ /// then return the attribute's string value. Returns an error if the attribute
+ /// value does not have a string form, or if a string form has an invalid value.
+ pub fn attr_string(&self, unit: &Unit<R>, attr: AttributeValue<R>) -> Result<R> {
+ match attr {
+ AttributeValue::String(string) => Ok(string),
+ AttributeValue::DebugStrRef(offset) => self.debug_str.get_str(offset),
+ AttributeValue::DebugStrRefSup(offset) => {
+ if let Some(sup) = self.sup() {
+ sup.debug_str.get_str(offset)
+ } else {
+ Err(Error::ExpectedStringAttributeValue)
+ }
+ }
+ AttributeValue::DebugLineStrRef(offset) => self.debug_line_str.get_str(offset),
+ AttributeValue::DebugStrOffsetsIndex(index) => {
+ let offset = self.debug_str_offsets.get_str_offset(
+ unit.header.format(),
+ unit.str_offsets_base,
+ index,
+ )?;
+ self.debug_str.get_str(offset)
+ }
+ _ => Err(Error::ExpectedStringAttributeValue),
+ }
+ }
+
+ /// Return the address at the given index.
+ pub fn address(&self, unit: &Unit<R>, index: DebugAddrIndex<R::Offset>) -> Result<u64> {
+ self.debug_addr
+ .get_address(unit.encoding().address_size, unit.addr_base, index)
+ }
+
+ /// Try to return an attribute value as an address.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - a `DW_FORM_addr`
+ /// - a `DW_FORM_addrx` index into the `.debug_addr` entries for the unit
+ ///
+ /// then return the address.
+ /// Returns `None` for other forms.
+ pub fn attr_address(&self, unit: &Unit<R>, attr: AttributeValue<R>) -> Result<Option<u64>> {
+ match attr {
+ AttributeValue::Addr(addr) => Ok(Some(addr)),
+ AttributeValue::DebugAddrIndex(index) => self.address(unit, index).map(Some),
+ _ => Ok(None),
+ }
+ }
+
+ /// Return the range list offset for the given raw offset.
+ ///
+ /// This handles adding `DW_AT_GNU_ranges_base` if required.
+ pub fn ranges_offset_from_raw(
+ &self,
+ unit: &Unit<R>,
+ offset: RawRangeListsOffset<R::Offset>,
+ ) -> RangeListsOffset<R::Offset> {
+ if self.file_type == DwarfFileType::Dwo && unit.header.version() < 5 {
+ RangeListsOffset(offset.0.wrapping_add(unit.rnglists_base.0))
+ } else {
+ RangeListsOffset(offset.0)
+ }
+ }
+
+ /// Return the range list offset at the given index.
+ pub fn ranges_offset(
+ &self,
+ unit: &Unit<R>,
+ index: DebugRngListsIndex<R::Offset>,
+ ) -> Result<RangeListsOffset<R::Offset>> {
+ self.ranges
+ .get_offset(unit.encoding(), unit.rnglists_base, index)
+ }
+
+ /// Iterate over the `RangeListEntry`s starting at the given offset.
+ pub fn ranges(
+ &self,
+ unit: &Unit<R>,
+ offset: RangeListsOffset<R::Offset>,
+ ) -> Result<RngListIter<R>> {
+ self.ranges.ranges(
+ offset,
+ unit.encoding(),
+ unit.low_pc,
+ &self.debug_addr,
+ unit.addr_base,
+ )
+ }
+
+ /// Iterate over the `RawRngListEntry`ies starting at the given offset.
+ pub fn raw_ranges(
+ &self,
+ unit: &Unit<R>,
+ offset: RangeListsOffset<R::Offset>,
+ ) -> Result<RawRngListIter<R>> {
+ self.ranges.raw_ranges(offset, unit.encoding())
+ }
+
+ /// Try to return an attribute value as a range list offset.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - a `DW_FORM_sec_offset` reference to the `.debug_ranges` or `.debug_rnglists` sections
+ /// - a `DW_FORM_rnglistx` index into the `.debug_rnglists` entries for the unit
+ ///
+ /// then return the range list offset of the range list.
+ /// Returns `None` for other forms.
+ pub fn attr_ranges_offset(
+ &self,
+ unit: &Unit<R>,
+ attr: AttributeValue<R>,
+ ) -> Result<Option<RangeListsOffset<R::Offset>>> {
+ match attr {
+ AttributeValue::RangeListsRef(offset) => {
+ Ok(Some(self.ranges_offset_from_raw(unit, offset)))
+ }
+ AttributeValue::DebugRngListsIndex(index) => self.ranges_offset(unit, index).map(Some),
+ _ => Ok(None),
+ }
+ }
+
+ /// Try to return an attribute value as a range list entry iterator.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - a `DW_FORM_sec_offset` reference to the `.debug_ranges` or `.debug_rnglists` sections
+ /// - a `DW_FORM_rnglistx` index into the `.debug_rnglists` entries for the unit
+ ///
+ /// then return an iterator over the entries in the range list.
+ /// Returns `None` for other forms.
+ pub fn attr_ranges(
+ &self,
+ unit: &Unit<R>,
+ attr: AttributeValue<R>,
+ ) -> Result<Option<RngListIter<R>>> {
+ match self.attr_ranges_offset(unit, attr)? {
+ Some(offset) => Ok(Some(self.ranges(unit, offset)?)),
+ None => Ok(None),
+ }
+ }
+
+ /// Return an iterator for the address ranges of a `DebuggingInformationEntry`.
+ ///
+ /// This uses `DW_AT_low_pc`, `DW_AT_high_pc` and `DW_AT_ranges`.
+ pub fn die_ranges(
+ &self,
+ unit: &Unit<R>,
+ entry: &DebuggingInformationEntry<R>,
+ ) -> Result<RangeIter<R>> {
+ let mut low_pc = None;
+ let mut high_pc = None;
+ let mut size = None;
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next()? {
+ match attr.name() {
+ constants::DW_AT_low_pc => {
+ low_pc = Some(
+ self.attr_address(unit, attr.value())?
+ .ok_or(Error::UnsupportedAttributeForm)?,
+ );
+ }
+ constants::DW_AT_high_pc => match attr.value() {
+ AttributeValue::Udata(val) => size = Some(val),
+ attr => {
+ high_pc = Some(
+ self.attr_address(unit, attr)?
+ .ok_or(Error::UnsupportedAttributeForm)?,
+ );
+ }
+ },
+ constants::DW_AT_ranges => {
+ if let Some(list) = self.attr_ranges(unit, attr.value())? {
+ return Ok(RangeIter(RangeIterInner::List(list)));
+ }
+ }
+ _ => {}
+ }
+ }
+ let range = low_pc.and_then(|begin| {
+ let end = size.map(|size| begin + size).or(high_pc);
+ // TODO: perhaps return an error if `end` is `None`
+ end.map(|end| Range { begin, end })
+ });
+ Ok(RangeIter(RangeIterInner::Single(range)))
+ }
+
+ /// Return an iterator for the address ranges of a `Unit`.
+ ///
+ /// This uses `DW_AT_low_pc`, `DW_AT_high_pc` and `DW_AT_ranges` of the
+ /// root `DebuggingInformationEntry`.
+ pub fn unit_ranges(&self, unit: &Unit<R>) -> Result<RangeIter<R>> {
+ let mut cursor = unit.header.entries(&unit.abbreviations);
+ cursor.next_dfs()?;
+ let root = cursor.current().ok_or(Error::MissingUnitDie)?;
+ self.die_ranges(unit, root)
+ }
+
+ /// Return the location list offset at the given index.
+ pub fn locations_offset(
+ &self,
+ unit: &Unit<R>,
+ index: DebugLocListsIndex<R::Offset>,
+ ) -> Result<LocationListsOffset<R::Offset>> {
+ self.locations
+ .get_offset(unit.encoding(), unit.loclists_base, index)
+ }
+
+ /// Iterate over the `LocationListEntry`s starting at the given offset.
+ pub fn locations(
+ &self,
+ unit: &Unit<R>,
+ offset: LocationListsOffset<R::Offset>,
+ ) -> Result<LocListIter<R>> {
+ match self.file_type {
+ DwarfFileType::Main => self.locations.locations(
+ offset,
+ unit.encoding(),
+ unit.low_pc,
+ &self.debug_addr,
+ unit.addr_base,
+ ),
+ DwarfFileType::Dwo => self.locations.locations_dwo(
+ offset,
+ unit.encoding(),
+ unit.low_pc,
+ &self.debug_addr,
+ unit.addr_base,
+ ),
+ }
+ }
+
+ /// Iterate over the raw `LocationListEntry`s starting at the given offset.
+ pub fn raw_locations(
+ &self,
+ unit: &Unit<R>,
+ offset: LocationListsOffset<R::Offset>,
+ ) -> Result<RawLocListIter<R>> {
+ match self.file_type {
+ DwarfFileType::Main => self.locations.raw_locations(offset, unit.encoding()),
+ DwarfFileType::Dwo => self.locations.raw_locations_dwo(offset, unit.encoding()),
+ }
+ }
+
+ /// Try to return an attribute value as a location list offset.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - a `DW_FORM_sec_offset` reference to the `.debug_loc` or `.debug_loclists` sections
+ /// - a `DW_FORM_loclistx` index into the `.debug_loclists` entries for the unit
+ ///
+ /// then return the location list offset of the location list.
+ /// Returns `None` for other forms.
+ pub fn attr_locations_offset(
+ &self,
+ unit: &Unit<R>,
+ attr: AttributeValue<R>,
+ ) -> Result<Option<LocationListsOffset<R::Offset>>> {
+ match attr {
+ AttributeValue::LocationListsRef(offset) => Ok(Some(offset)),
+ AttributeValue::DebugLocListsIndex(index) => {
+ self.locations_offset(unit, index).map(Some)
+ }
+ _ => Ok(None),
+ }
+ }
+
+ /// Try to return an attribute value as a location list entry iterator.
+ ///
+ /// If the attribute value is one of:
+ ///
+ /// - a `DW_FORM_sec_offset` reference to the `.debug_loc` or `.debug_loclists` sections
+ /// - a `DW_FORM_loclistx` index into the `.debug_loclists` entries for the unit
+ ///
+ /// then return an iterator over the entries in the location list.
+ /// Returns `None` for other forms.
+ pub fn attr_locations(
+ &self,
+ unit: &Unit<R>,
+ attr: AttributeValue<R>,
+ ) -> Result<Option<LocListIter<R>>> {
+ match self.attr_locations_offset(unit, attr)? {
+ Some(offset) => Ok(Some(self.locations(unit, offset)?)),
+ None => Ok(None),
+ }
+ }
+
+ /// Call `Reader::lookup_offset_id` for each section, and return the first match.
+ ///
+ /// The first element of the tuple is `true` for supplementary sections.
+ pub fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<(bool, SectionId, R::Offset)> {
+ None.or_else(|| self.debug_abbrev.lookup_offset_id(id))
+ .or_else(|| self.debug_addr.lookup_offset_id(id))
+ .or_else(|| self.debug_aranges.lookup_offset_id(id))
+ .or_else(|| self.debug_info.lookup_offset_id(id))
+ .or_else(|| self.debug_line.lookup_offset_id(id))
+ .or_else(|| self.debug_line_str.lookup_offset_id(id))
+ .or_else(|| self.debug_str.lookup_offset_id(id))
+ .or_else(|| self.debug_str_offsets.lookup_offset_id(id))
+ .or_else(|| self.debug_types.lookup_offset_id(id))
+ .or_else(|| self.locations.lookup_offset_id(id))
+ .or_else(|| self.ranges.lookup_offset_id(id))
+ .map(|(id, offset)| (false, id, offset))
+ .or_else(|| {
+ self.sup()
+ .and_then(|sup| sup.lookup_offset_id(id))
+ .map(|(_, id, offset)| (true, id, offset))
+ })
+ }
+
+ /// Returns a string representation of the given error.
+ ///
+ /// This uses information from the DWARF sections to provide more information in some cases.
+ pub fn format_error(&self, err: Error) -> String {
+ #[allow(clippy::single_match)]
+ match err {
+ Error::UnexpectedEof(id) => match self.lookup_offset_id(id) {
+ Some((sup, section, offset)) => {
+ return format!(
+ "{} at {}{}+0x{:x}",
+ err,
+ section.name(),
+ if sup { "(sup)" } else { "" },
+ offset.into_u64(),
+ );
+ }
+ None => {}
+ },
+ _ => {}
+ }
+ err.description().into()
+ }
+}
+
+/// The sections from a `.dwp` file.
+#[derive(Debug)]
+pub struct DwarfPackage<R: Reader> {
+ /// The compilation unit index in the `.debug_cu_index` section.
+ pub cu_index: UnitIndex<R>,
+
+ /// The type unit index in the `.debug_tu_index` section.
+ pub tu_index: UnitIndex<R>,
+
+ /// The `.debug_abbrev.dwo` section.
+ pub debug_abbrev: DebugAbbrev<R>,
+
+ /// The `.debug_info.dwo` section.
+ pub debug_info: DebugInfo<R>,
+
+ /// The `.debug_line.dwo` section.
+ pub debug_line: DebugLine<R>,
+
+ /// The `.debug_str.dwo` section.
+ pub debug_str: DebugStr<R>,
+
+ /// The `.debug_str_offsets.dwo` section.
+ pub debug_str_offsets: DebugStrOffsets<R>,
+
+ /// The `.debug_loc.dwo` section.
+ ///
+ /// Only present when using GNU split-dwarf extension to DWARF 4.
+ pub debug_loc: DebugLoc<R>,
+
+ /// The `.debug_loclists.dwo` section.
+ pub debug_loclists: DebugLocLists<R>,
+
+ /// The `.debug_rnglists.dwo` section.
+ pub debug_rnglists: DebugRngLists<R>,
+
+ /// The `.debug_types.dwo` section.
+ ///
+ /// Only present when using GNU split-dwarf extension to DWARF 4.
+ pub debug_types: DebugTypes<R>,
+
+ /// An empty section.
+ ///
+ /// Used when creating `Dwarf<R>`.
+ pub empty: R,
+}
+
+impl<R: Reader> DwarfPackage<R> {
+ /// Try to load the `.dwp` sections using the given loader function.
+ ///
+ /// `section` loads a DWARF section from the object file.
+ /// It should return an empty section if the section does not exist.
+ pub fn load<F, E>(mut section: F, empty: R) -> core::result::Result<Self, E>
+ where
+ F: FnMut(SectionId) -> core::result::Result<R, E>,
+ E: From<Error>,
+ {
+ Ok(DwarfPackage {
+ cu_index: DebugCuIndex::load(&mut section)?.index()?,
+ tu_index: DebugTuIndex::load(&mut section)?.index()?,
+ // Section types are inferred.
+ debug_abbrev: Section::load(&mut section)?,
+ debug_info: Section::load(&mut section)?,
+ debug_line: Section::load(&mut section)?,
+ debug_str: Section::load(&mut section)?,
+ debug_str_offsets: Section::load(&mut section)?,
+ debug_loc: Section::load(&mut section)?,
+ debug_loclists: Section::load(&mut section)?,
+ debug_rnglists: Section::load(&mut section)?,
+ debug_types: Section::load(&mut section)?,
+ empty,
+ })
+ }
+
+ /// Find the compilation unit with the given DWO identifier and return its section
+ /// contributions.
+ pub fn find_cu(&self, id: DwoId, parent: &Dwarf<R>) -> Result<Option<Dwarf<R>>> {
+ let row = match self.cu_index.find(id.0) {
+ Some(row) => row,
+ None => return Ok(None),
+ };
+ self.cu_sections(row, parent).map(Some)
+ }
+
+ /// Find the type unit with the given type signature and return its section
+ /// contributions.
+ pub fn find_tu(
+ &self,
+ signature: DebugTypeSignature,
+ parent: &Dwarf<R>,
+ ) -> Result<Option<Dwarf<R>>> {
+ let row = match self.tu_index.find(signature.0) {
+ Some(row) => row,
+ None => return Ok(None),
+ };
+ self.tu_sections(row, parent).map(Some)
+ }
+
+ /// Return the section contributions of the compilation unit at the given index.
+ ///
+ /// The index must be in the range `1..cu_index.unit_count`.
+ ///
+ /// This function should only be needed by low level parsers.
+ pub fn cu_sections(&self, index: u32, parent: &Dwarf<R>) -> Result<Dwarf<R>> {
+ self.sections(self.cu_index.sections(index)?, parent)
+ }
+
+ /// Return the section contributions of the compilation unit at the given index.
+ ///
+ /// The index must be in the range `1..tu_index.unit_count`.
+ ///
+ /// This function should only be needed by low level parsers.
+ pub fn tu_sections(&self, index: u32, parent: &Dwarf<R>) -> Result<Dwarf<R>> {
+ self.sections(self.tu_index.sections(index)?, parent)
+ }
+
+ /// Return the section contributions of a unit.
+ ///
+ /// This function should only be needed by low level parsers.
+ pub fn sections(
+ &self,
+ sections: UnitIndexSectionIterator<R>,
+ parent: &Dwarf<R>,
+ ) -> Result<Dwarf<R>> {
+ let mut abbrev_offset = 0;
+ let mut abbrev_size = 0;
+ let mut info_offset = 0;
+ let mut info_size = 0;
+ let mut line_offset = 0;
+ let mut line_size = 0;
+ let mut loc_offset = 0;
+ let mut loc_size = 0;
+ let mut loclists_offset = 0;
+ let mut loclists_size = 0;
+ let mut str_offsets_offset = 0;
+ let mut str_offsets_size = 0;
+ let mut rnglists_offset = 0;
+ let mut rnglists_size = 0;
+ let mut types_offset = 0;
+ let mut types_size = 0;
+ for section in sections {
+ match section.section {
+ SectionId::DebugAbbrev => {
+ abbrev_offset = section.offset;
+ abbrev_size = section.size;
+ }
+ SectionId::DebugInfo => {
+ info_offset = section.offset;
+ info_size = section.size;
+ }
+ SectionId::DebugLine => {
+ line_offset = section.offset;
+ line_size = section.size;
+ }
+ SectionId::DebugLoc => {
+ loc_offset = section.offset;
+ loc_size = section.size;
+ }
+ SectionId::DebugLocLists => {
+ loclists_offset = section.offset;
+ loclists_size = section.size;
+ }
+ SectionId::DebugStrOffsets => {
+ str_offsets_offset = section.offset;
+ str_offsets_size = section.size;
+ }
+ SectionId::DebugRngLists => {
+ rnglists_offset = section.offset;
+ rnglists_size = section.size;
+ }
+ SectionId::DebugTypes => {
+ types_offset = section.offset;
+ types_size = section.size;
+ }
+ SectionId::DebugMacro | SectionId::DebugMacinfo => {
+ // These are valid but we can't parse these yet.
+ }
+ _ => return Err(Error::UnknownIndexSection),
+ }
+ }
+
+ let debug_abbrev = self.debug_abbrev.dwp_range(abbrev_offset, abbrev_size)?;
+ let debug_info = self.debug_info.dwp_range(info_offset, info_size)?;
+ let debug_line = self.debug_line.dwp_range(line_offset, line_size)?;
+ let debug_loc = self.debug_loc.dwp_range(loc_offset, loc_size)?;
+ let debug_loclists = self
+ .debug_loclists
+ .dwp_range(loclists_offset, loclists_size)?;
+ let debug_str_offsets = self
+ .debug_str_offsets
+ .dwp_range(str_offsets_offset, str_offsets_size)?;
+ let debug_rnglists = self
+ .debug_rnglists
+ .dwp_range(rnglists_offset, rnglists_size)?;
+ let debug_types = self.debug_types.dwp_range(types_offset, types_size)?;
+
+ let debug_str = self.debug_str.clone();
+
+ let debug_addr = parent.debug_addr.clone();
+ let debug_ranges = parent.ranges.debug_ranges().clone();
+
+ let debug_aranges = self.empty.clone().into();
+ let debug_line_str = self.empty.clone().into();
+
+ Ok(Dwarf {
+ debug_abbrev,
+ debug_addr,
+ debug_aranges,
+ debug_info,
+ debug_line,
+ debug_line_str,
+ debug_str,
+ debug_str_offsets,
+ debug_types,
+ locations: LocationLists::new(debug_loc, debug_loclists),
+ ranges: RangeLists::new(debug_ranges, debug_rnglists),
+ file_type: DwarfFileType::Dwo,
+ sup: None,
+ })
+ }
+}
+
+/// All of the commonly used information for a unit in the `.debug_info` or `.debug_types`
+/// sections.
+#[derive(Debug)]
+pub struct Unit<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// The header of the unit.
+ pub header: UnitHeader<R, Offset>,
+
+ /// The parsed abbreviations for the unit.
+ pub abbreviations: Abbreviations,
+
+ /// The `DW_AT_name` attribute of the unit.
+ pub name: Option<R>,
+
+ /// The `DW_AT_comp_dir` attribute of the unit.
+ pub comp_dir: Option<R>,
+
+ /// The `DW_AT_low_pc` attribute of the unit. Defaults to 0.
+ pub low_pc: u64,
+
+ /// The `DW_AT_str_offsets_base` attribute of the unit. Defaults to 0.
+ pub str_offsets_base: DebugStrOffsetsBase<Offset>,
+
+ /// The `DW_AT_addr_base` attribute of the unit. Defaults to 0.
+ pub addr_base: DebugAddrBase<Offset>,
+
+ /// The `DW_AT_loclists_base` attribute of the unit. Defaults to 0.
+ pub loclists_base: DebugLocListsBase<Offset>,
+
+ /// The `DW_AT_rnglists_base` attribute of the unit. Defaults to 0.
+ pub rnglists_base: DebugRngListsBase<Offset>,
+
+ /// The line number program of the unit.
+ pub line_program: Option<IncompleteLineProgram<R, Offset>>,
+
+ /// The DWO ID of a skeleton unit or split compilation unit.
+ pub dwo_id: Option<DwoId>,
+}
+
+impl<R: Reader> Unit<R> {
+ /// Construct a new `Unit` from the given unit header.
+ #[inline]
+ pub fn new(dwarf: &Dwarf<R>, header: UnitHeader<R>) -> Result<Self> {
+ let abbreviations = header.abbreviations(&dwarf.debug_abbrev)?;
+ let mut unit = Unit {
+ abbreviations,
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase::default_for_encoding_and_file(
+ header.encoding(),
+ dwarf.file_type,
+ ),
+ // NB: Because the .debug_addr section never lives in a .dwo, we can assume its base is always 0 or provided.
+ addr_base: DebugAddrBase(R::Offset::from_u8(0)),
+ loclists_base: DebugLocListsBase::default_for_encoding_and_file(
+ header.encoding(),
+ dwarf.file_type,
+ ),
+ rnglists_base: DebugRngListsBase::default_for_encoding_and_file(
+ header.encoding(),
+ dwarf.file_type,
+ ),
+ line_program: None,
+ dwo_id: match header.type_() {
+ UnitType::Skeleton(dwo_id) | UnitType::SplitCompilation(dwo_id) => Some(dwo_id),
+ _ => None,
+ },
+ header,
+ };
+ let mut name = None;
+ let mut comp_dir = None;
+ let mut line_program_offset = None;
+ let mut low_pc_attr = None;
+
+ {
+ let mut cursor = unit.header.entries(&unit.abbreviations);
+ cursor.next_dfs()?;
+ let root = cursor.current().ok_or(Error::MissingUnitDie)?;
+ let mut attrs = root.attrs();
+ while let Some(attr) = attrs.next()? {
+ match attr.name() {
+ constants::DW_AT_name => {
+ name = Some(attr.value());
+ }
+ constants::DW_AT_comp_dir => {
+ comp_dir = Some(attr.value());
+ }
+ constants::DW_AT_low_pc => {
+ low_pc_attr = Some(attr.value());
+ }
+ constants::DW_AT_stmt_list => {
+ if let AttributeValue::DebugLineRef(offset) = attr.value() {
+ line_program_offset = Some(offset);
+ }
+ }
+ constants::DW_AT_str_offsets_base => {
+ if let AttributeValue::DebugStrOffsetsBase(base) = attr.value() {
+ unit.str_offsets_base = base;
+ }
+ }
+ constants::DW_AT_addr_base | constants::DW_AT_GNU_addr_base => {
+ if let AttributeValue::DebugAddrBase(base) = attr.value() {
+ unit.addr_base = base;
+ }
+ }
+ constants::DW_AT_loclists_base => {
+ if let AttributeValue::DebugLocListsBase(base) = attr.value() {
+ unit.loclists_base = base;
+ }
+ }
+ constants::DW_AT_rnglists_base | constants::DW_AT_GNU_ranges_base => {
+ if let AttributeValue::DebugRngListsBase(base) = attr.value() {
+ unit.rnglists_base = base;
+ }
+ }
+ constants::DW_AT_GNU_dwo_id => {
+ if unit.dwo_id.is_none() {
+ if let AttributeValue::DwoId(dwo_id) = attr.value() {
+ unit.dwo_id = Some(dwo_id);
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+
+ unit.name = match name {
+ Some(val) => dwarf.attr_string(&unit, val).ok(),
+ None => None,
+ };
+ unit.comp_dir = match comp_dir {
+ Some(val) => dwarf.attr_string(&unit, val).ok(),
+ None => None,
+ };
+ unit.line_program = match line_program_offset {
+ Some(offset) => Some(dwarf.debug_line.program(
+ offset,
+ unit.header.address_size(),
+ unit.comp_dir.clone(),
+ unit.name.clone(),
+ )?),
+ None => None,
+ };
+ if let Some(low_pc_attr) = low_pc_attr {
+ if let Some(addr) = dwarf.attr_address(&unit, low_pc_attr)? {
+ unit.low_pc = addr;
+ }
+ }
+ Ok(unit)
+ }
+
+ /// Return the encoding parameters for this unit.
+ #[inline]
+ pub fn encoding(&self) -> Encoding {
+ self.header.encoding()
+ }
+
+ /// Read the `DebuggingInformationEntry` at the given offset.
+ pub fn entry(&self, offset: UnitOffset<R::Offset>) -> Result<DebuggingInformationEntry<R>> {
+ self.header.entry(&self.abbreviations, offset)
+ }
+
+ /// Navigate this unit's `DebuggingInformationEntry`s.
+ #[inline]
+ pub fn entries(&self) -> EntriesCursor<R> {
+ self.header.entries(&self.abbreviations)
+ }
+
+ /// Navigate this unit's `DebuggingInformationEntry`s
+ /// starting at the given offset.
+ #[inline]
+ pub fn entries_at_offset(&self, offset: UnitOffset<R::Offset>) -> Result<EntriesCursor<R>> {
+ self.header.entries_at_offset(&self.abbreviations, offset)
+ }
+
+ /// Navigate this unit's `DebuggingInformationEntry`s as a tree
+ /// starting at the given offset.
+ #[inline]
+ pub fn entries_tree(&self, offset: Option<UnitOffset<R::Offset>>) -> Result<EntriesTree<R>> {
+ self.header.entries_tree(&self.abbreviations, offset)
+ }
+
+ /// Read the raw data that defines the Debugging Information Entries.
+ #[inline]
+ pub fn entries_raw(&self, offset: Option<UnitOffset<R::Offset>>) -> Result<EntriesRaw<R>> {
+ self.header.entries_raw(&self.abbreviations, offset)
+ }
+
+ /// Copy attributes that are subject to relocation from another unit. This is intended
+ /// to be used to copy attributes from a skeleton compilation unit to the corresponding
+ /// split compilation unit.
+ pub fn copy_relocated_attributes(&mut self, other: &Unit<R>) {
+ self.low_pc = other.low_pc;
+ self.addr_base = other.addr_base;
+ if self.header.version() < 5 {
+ self.rnglists_base = other.rnglists_base;
+ }
+ }
+}
+
+impl<T: ReaderOffset> UnitSectionOffset<T> {
+ /// Convert an offset to be relative to the start of the given unit,
+ /// instead of relative to the start of the section.
+ /// Returns `None` if the offset is not within the unit entries.
+ pub fn to_unit_offset<R>(&self, unit: &Unit<R>) -> Option<UnitOffset<T>>
+ where
+ R: Reader<Offset = T>,
+ {
+ let (offset, unit_offset) = match (self, unit.header.offset()) {
+ (
+ UnitSectionOffset::DebugInfoOffset(offset),
+ UnitSectionOffset::DebugInfoOffset(unit_offset),
+ ) => (offset.0, unit_offset.0),
+ (
+ UnitSectionOffset::DebugTypesOffset(offset),
+ UnitSectionOffset::DebugTypesOffset(unit_offset),
+ ) => (offset.0, unit_offset.0),
+ _ => return None,
+ };
+ let offset = match offset.checked_sub(unit_offset) {
+ Some(offset) => UnitOffset(offset),
+ None => return None,
+ };
+ if !unit.header.is_valid_offset(offset) {
+ return None;
+ }
+ Some(offset)
+ }
+}
+
+impl<T: ReaderOffset> UnitOffset<T> {
+ /// Convert an offset to be relative to the start of the .debug_info section,
+ /// instead of relative to the start of the given compilation unit.
+ ///
+ /// Does not check that the offset is valid.
+ pub fn to_unit_section_offset<R>(&self, unit: &Unit<R>) -> UnitSectionOffset<T>
+ where
+ R: Reader<Offset = T>,
+ {
+ match unit.header.offset() {
+ UnitSectionOffset::DebugInfoOffset(unit_offset) => {
+ DebugInfoOffset(unit_offset.0 + self.0).into()
+ }
+ UnitSectionOffset::DebugTypesOffset(unit_offset) => {
+ DebugTypesOffset(unit_offset.0 + self.0).into()
+ }
+ }
+ }
+}
+
+/// An iterator for the address ranges of a `DebuggingInformationEntry`.
+///
+/// Returned by `Dwarf::die_ranges` and `Dwarf::unit_ranges`.
+#[derive(Debug)]
+pub struct RangeIter<R: Reader>(RangeIterInner<R>);
+
+#[derive(Debug)]
+enum RangeIterInner<R: Reader> {
+ Single(Option<Range>),
+ List(RngListIter<R>),
+}
+
+impl<R: Reader> Default for RangeIter<R> {
+ fn default() -> Self {
+ RangeIter(RangeIterInner::Single(None))
+ }
+}
+
+impl<R: Reader> RangeIter<R> {
+ /// Advance the iterator to the next range.
+ pub fn next(&mut self) -> Result<Option<Range>> {
+ match self.0 {
+ RangeIterInner::Single(ref mut range) => Ok(range.take()),
+ RangeIterInner::List(ref mut list) => list.next(),
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for RangeIter<R> {
+ type Item = Range;
+ type Error = Error;
+
+ #[inline]
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ RangeIter::next(self)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::read::EndianSlice;
+ use crate::{Endianity, LittleEndian};
+
+ /// Ensure that `Dwarf<R>` is covariant wrt R.
+ #[test]
+ fn test_dwarf_variance() {
+ /// This only needs to compile.
+ fn _f<'a: 'b, 'b, E: Endianity>(x: Dwarf<EndianSlice<'a, E>>) -> Dwarf<EndianSlice<'b, E>> {
+ x
+ }
+ }
+
+ /// Ensure that `Unit<R>` is covariant wrt R.
+ #[test]
+ fn test_dwarf_unit_variance() {
+ /// This only needs to compile.
+ fn _f<'a: 'b, 'b, E: Endianity>(x: Unit<EndianSlice<'a, E>>) -> Unit<EndianSlice<'b, E>> {
+ x
+ }
+ }
+
+ #[test]
+ fn test_send() {
+ fn assert_is_send<T: Send>() {}
+ assert_is_send::<Dwarf<EndianSlice<LittleEndian>>>();
+ assert_is_send::<Unit<EndianSlice<LittleEndian>>>();
+ }
+
+ #[test]
+ fn test_format_error() {
+ let mut owned_dwarf = Dwarf::load(|_| -> Result<_> { Ok(vec![1, 2]) }).unwrap();
+ owned_dwarf
+ .load_sup(|_| -> Result<_> { Ok(vec![1, 2]) })
+ .unwrap();
+ let dwarf = owned_dwarf.borrow(|section| EndianSlice::new(&section, LittleEndian));
+
+ match dwarf.debug_str.get_str(DebugStrOffset(1)) {
+ Ok(r) => panic!("Unexpected str {:?}", r),
+ Err(e) => {
+ assert_eq!(
+ dwarf.format_error(e),
+ "Hit the end of input before it was expected at .debug_str+0x1"
+ );
+ }
+ }
+ match dwarf.sup().unwrap().debug_str.get_str(DebugStrOffset(1)) {
+ Ok(r) => panic!("Unexpected str {:?}", r),
+ Err(e) => {
+ assert_eq!(
+ dwarf.format_error(e),
+ "Hit the end of input before it was expected at .debug_str(sup)+0x1"
+ );
+ }
+ }
+ assert_eq!(dwarf.format_error(Error::Io), Error::Io.description());
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/endian_reader.rs b/vendor/gimli-0.26.2/src/read/endian_reader.rs
new file mode 100644
index 000000000..8852b3804
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/endian_reader.rs
@@ -0,0 +1,639 @@
+//! Defining custom `Reader`s quickly.
+
+use alloc::borrow::Cow;
+use alloc::rc::Rc;
+use alloc::string::String;
+use alloc::sync::Arc;
+use core::fmt::Debug;
+use core::ops::{Deref, Index, Range, RangeFrom, RangeTo};
+use core::slice;
+use core::str;
+use stable_deref_trait::CloneStableDeref;
+
+use crate::endianity::Endianity;
+use crate::read::{Error, Reader, ReaderOffsetId, Result};
+
+/// A reference counted, non-thread-safe slice of bytes and associated
+/// endianity.
+///
+/// ```
+/// # #[cfg(feature = "std")] {
+/// use std::rc::Rc;
+///
+/// let buf = Rc::from(&[1, 2, 3, 4][..]);
+/// let reader = gimli::EndianRcSlice::new(buf, gimli::NativeEndian);
+/// # let _ = reader;
+/// # }
+/// ```
+pub type EndianRcSlice<Endian> = EndianReader<Endian, Rc<[u8]>>;
+
+/// An atomically reference counted, thread-safe slice of bytes and associated
+/// endianity.
+///
+/// ```
+/// # #[cfg(feature = "std")] {
+/// use std::sync::Arc;
+///
+/// let buf = Arc::from(&[1, 2, 3, 4][..]);
+/// let reader = gimli::EndianArcSlice::new(buf, gimli::NativeEndian);
+/// # let _ = reader;
+/// # }
+/// ```
+pub type EndianArcSlice<Endian> = EndianReader<Endian, Arc<[u8]>>;
+
+/// An easy way to define a custom `Reader` implementation with a reference to a
+/// generic buffer of bytes and an associated endianity.
+///
+/// Note that the whole original buffer is kept alive in memory even if there is
+/// only one reader that references only a handful of bytes from that original
+/// buffer. That is, `EndianReader` will not do any copying, moving, or
+/// compacting in order to free up unused regions of the original buffer. If you
+/// require this kind of behavior, it is up to you to implement `Reader`
+/// directly by-hand.
+///
+/// # Example
+///
+/// Say you have an `mmap`ed file that you want to serve as a `gimli::Reader`.
+/// You can wrap that `mmap`ed file up in a `MmapFile` type and use
+/// `EndianReader<Rc<MmapFile>>` or `EndianReader<Arc<MmapFile>>` as readers as
+/// long as `MmapFile` dereferences to the underlying `[u8]` data.
+///
+/// ```
+/// use std::io;
+/// use std::ops::Deref;
+/// use std::path::Path;
+/// use std::slice;
+/// use std::sync::Arc;
+///
+/// /// A type that represents an `mmap`ed file.
+/// #[derive(Debug)]
+/// pub struct MmapFile {
+/// ptr: *const u8,
+/// len: usize,
+/// }
+///
+/// impl MmapFile {
+/// pub fn new(path: &Path) -> io::Result<MmapFile> {
+/// // Call `mmap` and check for errors and all that...
+/// # unimplemented!()
+/// }
+/// }
+///
+/// impl Drop for MmapFile {
+/// fn drop(&mut self) {
+/// // Call `munmap` to clean up after ourselves...
+/// # unimplemented!()
+/// }
+/// }
+///
+/// // And `MmapFile` can deref to a slice of the `mmap`ed region of memory.
+/// impl Deref for MmapFile {
+/// type Target = [u8];
+/// fn deref(&self) -> &[u8] {
+/// unsafe {
+/// slice::from_raw_parts(self.ptr, self.len)
+/// }
+/// }
+/// }
+///
+/// /// A type that represents a shared `mmap`ed file.
+/// #[derive(Debug, Clone)]
+/// pub struct ArcMmapFile(Arc<MmapFile>);
+///
+/// // And `ArcMmapFile` can deref to a slice of the `mmap`ed region of memory.
+/// impl Deref for ArcMmapFile {
+/// type Target = [u8];
+/// fn deref(&self) -> &[u8] {
+/// &self.0
+/// }
+/// }
+///
+/// // These are both valid for any `Rc` or `Arc`.
+/// unsafe impl gimli::StableDeref for ArcMmapFile {}
+/// unsafe impl gimli::CloneStableDeref for ArcMmapFile {}
+///
+/// /// A `gimli::Reader` that is backed by an `mmap`ed file!
+/// pub type MmapFileReader<Endian> = gimli::EndianReader<Endian, ArcMmapFile>;
+/// # fn test(_: &MmapFileReader<gimli::NativeEndian>) { }
+/// ```
+#[derive(Debug, Clone, Copy, Hash)]
+pub struct EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ range: SubRange<T>,
+ endian: Endian,
+}
+
+impl<Endian, T1, T2> PartialEq<EndianReader<Endian, T2>> for EndianReader<Endian, T1>
+where
+ Endian: Endianity,
+ T1: CloneStableDeref<Target = [u8]> + Debug,
+ T2: CloneStableDeref<Target = [u8]> + Debug,
+{
+ fn eq(&self, rhs: &EndianReader<Endian, T2>) -> bool {
+ self.bytes() == rhs.bytes()
+ }
+}
+
+impl<Endian, T> Eq for EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+}
+
+// This is separated out from `EndianReader` so that we can avoid running afoul
+// of borrowck. We need to `read_slice(&mut self, ...) -> &[u8]` and then call
+// `self.endian.read_whatever` on the result. The problem is that the returned
+// slice keeps the `&mut self` borrow active, so we wouldn't be able to access
+// `self.endian`. Splitting the sub-range out from the endian lets us work
+// around this, making it so that only the `self.range` borrow is held active,
+// not all of `self`.
+//
+// This also serves to encapsulate the unsafe code concerning `CloneStableDeref`.
+// The `bytes` member is held so that the bytes live long enough, and the
+// `CloneStableDeref` ensures these bytes never move. The `ptr` and `len`
+// members point inside `bytes`, and are updated during read operations.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+struct SubRange<T>
+where
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ bytes: T,
+ ptr: *const u8,
+ len: usize,
+}
+
+unsafe impl<T> Send for SubRange<T> where T: CloneStableDeref<Target = [u8]> + Debug + Send {}
+
+unsafe impl<T> Sync for SubRange<T> where T: CloneStableDeref<Target = [u8]> + Debug + Sync {}
+
+impl<T> SubRange<T>
+where
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ #[inline]
+ fn new(bytes: T) -> Self {
+ let ptr = bytes.as_ptr();
+ let len = bytes.len();
+ SubRange { bytes, ptr, len }
+ }
+
+ #[inline]
+ fn bytes(&self) -> &[u8] {
+ // Safe because `T` implements `CloneStableDeref`, `bytes` can't be modified,
+ // and all operations that modify `ptr` and `len` ensure they stay in range.
+ unsafe { slice::from_raw_parts(self.ptr, self.len) }
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.len
+ }
+
+ #[inline]
+ fn truncate(&mut self, len: usize) {
+ assert!(len <= self.len);
+ self.len = len;
+ }
+
+ #[inline]
+ fn skip(&mut self, len: usize) {
+ assert!(len <= self.len);
+ self.ptr = unsafe { self.ptr.add(len) };
+ self.len -= len;
+ }
+
+ #[inline]
+ fn read_slice(&mut self, len: usize) -> Option<&[u8]> {
+ if self.len() < len {
+ None
+ } else {
+ // Same as for `bytes()`.
+ let bytes = unsafe { slice::from_raw_parts(self.ptr, len) };
+ self.skip(len);
+ Some(bytes)
+ }
+ }
+}
+
+impl<Endian, T> EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ /// Construct a new `EndianReader` with the given bytes.
+ #[inline]
+ pub fn new(bytes: T, endian: Endian) -> EndianReader<Endian, T> {
+ EndianReader {
+ range: SubRange::new(bytes),
+ endian,
+ }
+ }
+
+ /// Return a reference to the raw bytes underlying this reader.
+ #[inline]
+ pub fn bytes(&self) -> &[u8] {
+ self.range.bytes()
+ }
+}
+
+/// # Range Methods
+///
+/// Unfortunately, `std::ops::Index` *must* return a reference, so we can't
+/// implement `Index<Range<usize>>` to return a new `EndianReader` the way we
+/// would like to. Instead, we abandon fancy indexing operators and have these
+/// plain old methods.
+impl<Endian, T> EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ /// Take the given `start..end` range of the underlying buffer and return a
+ /// new `EndianReader`.
+ ///
+ /// ```
+ /// # #[cfg(feature = "std")] {
+ /// use gimli::{EndianReader, LittleEndian};
+ /// use std::sync::Arc;
+ ///
+ /// let buf = Arc::<[u8]>::from(&[0x01, 0x02, 0x03, 0x04][..]);
+ /// let reader = EndianReader::new(buf.clone(), LittleEndian);
+ /// assert_eq!(reader.range(1..3),
+ /// EndianReader::new(&buf[1..3], LittleEndian));
+ /// # }
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// Panics if the range is out of bounds.
+ pub fn range(&self, idx: Range<usize>) -> EndianReader<Endian, T> {
+ let mut r = self.clone();
+ r.range.skip(idx.start);
+ r.range.truncate(idx.len());
+ r
+ }
+
+ /// Take the given `start..` range of the underlying buffer and return a new
+ /// `EndianReader`.
+ ///
+ /// ```
+ /// # #[cfg(feature = "std")] {
+ /// use gimli::{EndianReader, LittleEndian};
+ /// use std::sync::Arc;
+ ///
+ /// let buf = Arc::<[u8]>::from(&[0x01, 0x02, 0x03, 0x04][..]);
+ /// let reader = EndianReader::new(buf.clone(), LittleEndian);
+ /// assert_eq!(reader.range_from(2..),
+ /// EndianReader::new(&buf[2..], LittleEndian));
+ /// # }
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// Panics if the range is out of bounds.
+ pub fn range_from(&self, idx: RangeFrom<usize>) -> EndianReader<Endian, T> {
+ let mut r = self.clone();
+ r.range.skip(idx.start);
+ r
+ }
+
+ /// Take the given `..end` range of the underlying buffer and return a new
+ /// `EndianReader`.
+ ///
+ /// ```
+ /// # #[cfg(feature = "std")] {
+ /// use gimli::{EndianReader, LittleEndian};
+ /// use std::sync::Arc;
+ ///
+ /// let buf = Arc::<[u8]>::from(&[0x01, 0x02, 0x03, 0x04][..]);
+ /// let reader = EndianReader::new(buf.clone(), LittleEndian);
+ /// assert_eq!(reader.range_to(..3),
+ /// EndianReader::new(&buf[..3], LittleEndian));
+ /// # }
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// Panics if the range is out of bounds.
+ pub fn range_to(&self, idx: RangeTo<usize>) -> EndianReader<Endian, T> {
+ let mut r = self.clone();
+ r.range.truncate(idx.end);
+ r
+ }
+}
+
+impl<Endian, T> Index<usize> for EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ type Output = u8;
+ fn index(&self, idx: usize) -> &Self::Output {
+ &self.bytes()[idx]
+ }
+}
+
+impl<Endian, T> Index<RangeFrom<usize>> for EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ type Output = [u8];
+ fn index(&self, idx: RangeFrom<usize>) -> &Self::Output {
+ &self.bytes()[idx]
+ }
+}
+
+impl<Endian, T> Deref for EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ type Target = [u8];
+ fn deref(&self) -> &Self::Target {
+ self.bytes()
+ }
+}
+
+impl<Endian, T> Reader for EndianReader<Endian, T>
+where
+ Endian: Endianity,
+ T: CloneStableDeref<Target = [u8]> + Debug,
+{
+ type Endian = Endian;
+ type Offset = usize;
+
+ #[inline]
+ fn endian(&self) -> Endian {
+ self.endian
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.range.len()
+ }
+
+ #[inline]
+ fn empty(&mut self) {
+ self.range.truncate(0);
+ }
+
+ #[inline]
+ fn truncate(&mut self, len: usize) -> Result<()> {
+ if self.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ self.range.truncate(len);
+ Ok(())
+ }
+ }
+
+ #[inline]
+ fn offset_from(&self, base: &EndianReader<Endian, T>) -> usize {
+ let base_ptr = base.bytes().as_ptr() as *const u8 as usize;
+ let ptr = self.bytes().as_ptr() as *const u8 as usize;
+ debug_assert!(base_ptr <= ptr);
+ debug_assert!(ptr + self.bytes().len() <= base_ptr + base.bytes().len());
+ ptr - base_ptr
+ }
+
+ #[inline]
+ fn offset_id(&self) -> ReaderOffsetId {
+ ReaderOffsetId(self.bytes().as_ptr() as u64)
+ }
+
+ #[inline]
+ fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<Self::Offset> {
+ let id = id.0;
+ let self_id = self.bytes().as_ptr() as u64;
+ let self_len = self.bytes().len() as u64;
+ if id >= self_id && id <= self_id + self_len {
+ Some((id - self_id) as usize)
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn find(&self, byte: u8) -> Result<usize> {
+ self.bytes()
+ .iter()
+ .position(|x| *x == byte)
+ .ok_or_else(|| Error::UnexpectedEof(self.offset_id()))
+ }
+
+ #[inline]
+ fn skip(&mut self, len: usize) -> Result<()> {
+ if self.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ self.range.skip(len);
+ Ok(())
+ }
+ }
+
+ #[inline]
+ fn split(&mut self, len: usize) -> Result<Self> {
+ if self.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ let mut r = self.clone();
+ r.range.truncate(len);
+ self.range.skip(len);
+ Ok(r)
+ }
+ }
+
+ #[inline]
+ fn to_slice(&self) -> Result<Cow<[u8]>> {
+ Ok(self.bytes().into())
+ }
+
+ #[inline]
+ fn to_string(&self) -> Result<Cow<str>> {
+ match str::from_utf8(self.bytes()) {
+ Ok(s) => Ok(s.into()),
+ _ => Err(Error::BadUtf8),
+ }
+ }
+
+ #[inline]
+ fn to_string_lossy(&self) -> Result<Cow<str>> {
+ Ok(String::from_utf8_lossy(self.bytes()))
+ }
+
+ #[inline]
+ fn read_slice(&mut self, buf: &mut [u8]) -> Result<()> {
+ match self.range.read_slice(buf.len()) {
+ Some(slice) => {
+ buf.copy_from_slice(slice);
+ Ok(())
+ }
+ None => Err(Error::UnexpectedEof(self.offset_id())),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::endianity::NativeEndian;
+ use crate::read::Reader;
+
+ fn native_reader<T: CloneStableDeref<Target = [u8]> + Debug>(
+ bytes: T,
+ ) -> EndianReader<NativeEndian, T> {
+ EndianReader::new(bytes, NativeEndian)
+ }
+
+ const BUF: &[u8] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
+
+ #[test]
+ fn test_reader_split() {
+ let mut reader = native_reader(BUF);
+ let left = reader.split(3).unwrap();
+ assert_eq!(left, native_reader(&BUF[..3]));
+ assert_eq!(reader, native_reader(&BUF[3..]));
+ }
+
+ #[test]
+ fn test_reader_split_out_of_bounds() {
+ let mut reader = native_reader(BUF);
+ assert!(reader.split(30).is_err());
+ }
+
+ #[test]
+ fn bytes_and_len_and_range_and_eq() {
+ let reader = native_reader(BUF);
+ assert_eq!(reader.len(), BUF.len());
+ assert_eq!(reader.bytes(), BUF);
+ assert_eq!(reader, native_reader(BUF));
+
+ let range = reader.range(2..8);
+ let buf_range = &BUF[2..8];
+ assert_eq!(range.len(), buf_range.len());
+ assert_eq!(range.bytes(), buf_range);
+ assert_ne!(range, native_reader(BUF));
+ assert_eq!(range, native_reader(buf_range));
+
+ let range_from = range.range_from(1..);
+ let buf_range_from = &buf_range[1..];
+ assert_eq!(range_from.len(), buf_range_from.len());
+ assert_eq!(range_from.bytes(), buf_range_from);
+ assert_ne!(range_from, native_reader(BUF));
+ assert_eq!(range_from, native_reader(buf_range_from));
+
+ let range_to = range_from.range_to(..4);
+ let buf_range_to = &buf_range_from[..4];
+ assert_eq!(range_to.len(), buf_range_to.len());
+ assert_eq!(range_to.bytes(), buf_range_to);
+ assert_ne!(range_to, native_reader(BUF));
+ assert_eq!(range_to, native_reader(buf_range_to));
+ }
+
+ #[test]
+ fn find() {
+ let mut reader = native_reader(BUF);
+ reader.skip(2).unwrap();
+ assert_eq!(
+ reader.find(5),
+ Ok(BUF[2..].iter().position(|x| *x == 5).unwrap())
+ );
+ }
+
+ #[test]
+ fn indexing() {
+ let mut reader = native_reader(BUF);
+ reader.skip(2).unwrap();
+ assert_eq!(reader[0], BUF[2]);
+ }
+
+ #[test]
+ #[should_panic]
+ fn indexing_out_of_bounds() {
+ let mut reader = native_reader(BUF);
+ reader.skip(2).unwrap();
+ let _ = reader[900];
+ }
+
+ #[test]
+ fn endian() {
+ let reader = native_reader(BUF);
+ assert_eq!(reader.endian(), NativeEndian);
+ }
+
+ #[test]
+ fn empty() {
+ let mut reader = native_reader(BUF);
+ assert!(!reader.is_empty());
+ reader.empty();
+ assert!(reader.is_empty());
+ assert!(reader.bytes().is_empty());
+ }
+
+ #[test]
+ fn truncate() {
+ let reader = native_reader(BUF);
+ let mut reader = reader.range(2..8);
+ reader.truncate(2).unwrap();
+ assert_eq!(reader.bytes(), &BUF[2..4]);
+ }
+
+ #[test]
+ fn offset_from() {
+ let reader = native_reader(BUF);
+ let sub = reader.range(2..8);
+ assert_eq!(sub.offset_from(&reader), 2);
+ }
+
+ #[test]
+ fn skip() {
+ let mut reader = native_reader(BUF);
+ reader.skip(2).unwrap();
+ assert_eq!(reader.bytes(), &BUF[2..]);
+ }
+
+ #[test]
+ fn to_slice() {
+ assert_eq!(
+ native_reader(BUF).range(2..5).to_slice(),
+ Ok(Cow::from(&BUF[2..5]))
+ );
+ }
+
+ #[test]
+ fn to_string_ok() {
+ let buf = b"hello, world!";
+ let reader = native_reader(&buf[..]);
+ let reader = reader.range_from(7..);
+ assert_eq!(reader.to_string(), Ok(Cow::from("world!")));
+ }
+
+ // The rocket emoji (🚀 = [0xf0, 0x9f, 0x9a, 0x80]) but rotated left by one
+ // to make it invalid UTF-8.
+ const BAD_UTF8: &[u8] = &[0x9f, 0x9a, 0x80, 0xf0];
+
+ #[test]
+ fn to_string_err() {
+ let reader = native_reader(BAD_UTF8);
+ assert!(reader.to_string().is_err());
+ }
+
+ #[test]
+ fn to_string_lossy() {
+ let reader = native_reader(BAD_UTF8);
+ assert_eq!(reader.to_string_lossy(), Ok(Cow::from("����")));
+ }
+
+ #[test]
+ fn read_u8_array() {
+ let mut reader = native_reader(BAD_UTF8);
+ reader.skip(1).unwrap();
+ let arr: [u8; 2] = reader.read_u8_array().unwrap();
+ assert_eq!(arr, &BAD_UTF8[1..3]);
+ assert_eq!(reader.bytes(), &BAD_UTF8[3..]);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/endian_slice.rs b/vendor/gimli-0.26.2/src/read/endian_slice.rs
new file mode 100644
index 000000000..05262cdec
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/endian_slice.rs
@@ -0,0 +1,350 @@
+//! Working with byte slices that have an associated endianity.
+
+#[cfg(feature = "read")]
+use alloc::borrow::Cow;
+#[cfg(feature = "read")]
+use alloc::string::String;
+use core::ops::{Deref, Index, Range, RangeFrom, RangeTo};
+use core::str;
+
+use crate::endianity::Endianity;
+use crate::read::{Error, Reader, ReaderOffsetId, Result};
+
+/// A `&[u8]` slice with endianity metadata.
+///
+/// This implements the `Reader` trait, which is used for all reading of DWARF sections.
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ slice: &'input [u8],
+ endian: Endian,
+}
+
+impl<'input, Endian> EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `EndianSlice` with the given slice and endianity.
+ #[inline]
+ pub fn new(slice: &'input [u8], endian: Endian) -> EndianSlice<'input, Endian> {
+ EndianSlice { slice, endian }
+ }
+
+ /// Return a reference to the raw slice.
+ #[inline]
+ #[doc(hidden)]
+ #[deprecated(note = "Method renamed to EndianSlice::slice; use that instead.")]
+ pub fn buf(&self) -> &'input [u8] {
+ self.slice
+ }
+
+ /// Return a reference to the raw slice.
+ #[inline]
+ pub fn slice(&self) -> &'input [u8] {
+ self.slice
+ }
+
+ /// Split the slice in two at the given index, resulting in the tuple where
+ /// the first item has range [0, idx), and the second has range [idx,
+ /// len). Panics if the index is out of bounds.
+ #[inline]
+ pub fn split_at(
+ &self,
+ idx: usize,
+ ) -> (EndianSlice<'input, Endian>, EndianSlice<'input, Endian>) {
+ (self.range_to(..idx), self.range_from(idx..))
+ }
+
+ /// Find the first occurence of a byte in the slice, and return its index.
+ #[inline]
+ pub fn find(&self, byte: u8) -> Option<usize> {
+ self.slice.iter().position(|ch| *ch == byte)
+ }
+
+ /// Return the offset of the start of the slice relative to the start
+ /// of the given slice.
+ #[inline]
+ pub fn offset_from(&self, base: EndianSlice<'input, Endian>) -> usize {
+ let base_ptr = base.slice.as_ptr() as *const u8 as usize;
+ let ptr = self.slice.as_ptr() as *const u8 as usize;
+ debug_assert!(base_ptr <= ptr);
+ debug_assert!(ptr + self.slice.len() <= base_ptr + base.slice.len());
+ ptr - base_ptr
+ }
+
+ /// Converts the slice to a string using `str::from_utf8`.
+ ///
+ /// Returns an error if the slice contains invalid characters.
+ #[inline]
+ pub fn to_string(&self) -> Result<&'input str> {
+ str::from_utf8(self.slice).map_err(|_| Error::BadUtf8)
+ }
+
+ /// Converts the slice to a string, including invalid characters,
+ /// using `String::from_utf8_lossy`.
+ #[cfg(feature = "read")]
+ #[inline]
+ pub fn to_string_lossy(&self) -> Cow<'input, str> {
+ String::from_utf8_lossy(self.slice)
+ }
+
+ #[inline]
+ fn read_slice(&mut self, len: usize) -> Result<&'input [u8]> {
+ if self.slice.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ let val = &self.slice[..len];
+ self.slice = &self.slice[len..];
+ Ok(val)
+ }
+ }
+}
+
+/// # Range Methods
+///
+/// Unfortunately, `std::ops::Index` *must* return a reference, so we can't
+/// implement `Index<Range<usize>>` to return a new `EndianSlice` the way we would
+/// like to. Instead, we abandon fancy indexing operators and have these plain
+/// old methods.
+impl<'input, Endian> EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ /// Take the given `start..end` range of the underlying slice and return a
+ /// new `EndianSlice`.
+ ///
+ /// ```
+ /// use gimli::{EndianSlice, LittleEndian};
+ ///
+ /// let slice = &[0x01, 0x02, 0x03, 0x04];
+ /// let endian_slice = EndianSlice::new(slice, LittleEndian);
+ /// assert_eq!(endian_slice.range(1..3),
+ /// EndianSlice::new(&slice[1..3], LittleEndian));
+ /// ```
+ pub fn range(&self, idx: Range<usize>) -> EndianSlice<'input, Endian> {
+ EndianSlice {
+ slice: &self.slice[idx],
+ endian: self.endian,
+ }
+ }
+
+ /// Take the given `start..` range of the underlying slice and return a new
+ /// `EndianSlice`.
+ ///
+ /// ```
+ /// use gimli::{EndianSlice, LittleEndian};
+ ///
+ /// let slice = &[0x01, 0x02, 0x03, 0x04];
+ /// let endian_slice = EndianSlice::new(slice, LittleEndian);
+ /// assert_eq!(endian_slice.range_from(2..),
+ /// EndianSlice::new(&slice[2..], LittleEndian));
+ /// ```
+ pub fn range_from(&self, idx: RangeFrom<usize>) -> EndianSlice<'input, Endian> {
+ EndianSlice {
+ slice: &self.slice[idx],
+ endian: self.endian,
+ }
+ }
+
+ /// Take the given `..end` range of the underlying slice and return a new
+ /// `EndianSlice`.
+ ///
+ /// ```
+ /// use gimli::{EndianSlice, LittleEndian};
+ ///
+ /// let slice = &[0x01, 0x02, 0x03, 0x04];
+ /// let endian_slice = EndianSlice::new(slice, LittleEndian);
+ /// assert_eq!(endian_slice.range_to(..3),
+ /// EndianSlice::new(&slice[..3], LittleEndian));
+ /// ```
+ pub fn range_to(&self, idx: RangeTo<usize>) -> EndianSlice<'input, Endian> {
+ EndianSlice {
+ slice: &self.slice[idx],
+ endian: self.endian,
+ }
+ }
+}
+
+impl<'input, Endian> Index<usize> for EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ type Output = u8;
+ fn index(&self, idx: usize) -> &Self::Output {
+ &self.slice[idx]
+ }
+}
+
+impl<'input, Endian> Index<RangeFrom<usize>> for EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ type Output = [u8];
+ fn index(&self, idx: RangeFrom<usize>) -> &Self::Output {
+ &self.slice[idx]
+ }
+}
+
+impl<'input, Endian> Deref for EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ type Target = [u8];
+ fn deref(&self) -> &Self::Target {
+ self.slice
+ }
+}
+
+impl<'input, Endian> Into<&'input [u8]> for EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ fn into(self) -> &'input [u8] {
+ self.slice
+ }
+}
+
+impl<'input, Endian> Reader for EndianSlice<'input, Endian>
+where
+ Endian: Endianity,
+{
+ type Endian = Endian;
+ type Offset = usize;
+
+ #[inline]
+ fn endian(&self) -> Endian {
+ self.endian
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.slice.len()
+ }
+
+ #[inline]
+ fn is_empty(&self) -> bool {
+ self.slice.is_empty()
+ }
+
+ #[inline]
+ fn empty(&mut self) {
+ self.slice = &[];
+ }
+
+ #[inline]
+ fn truncate(&mut self, len: usize) -> Result<()> {
+ if self.slice.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ self.slice = &self.slice[..len];
+ Ok(())
+ }
+ }
+
+ #[inline]
+ fn offset_from(&self, base: &Self) -> usize {
+ self.offset_from(*base)
+ }
+
+ #[inline]
+ fn offset_id(&self) -> ReaderOffsetId {
+ ReaderOffsetId(self.slice.as_ptr() as u64)
+ }
+
+ #[inline]
+ fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<Self::Offset> {
+ let id = id.0;
+ let self_id = self.slice.as_ptr() as u64;
+ let self_len = self.slice.len() as u64;
+ if id >= self_id && id <= self_id + self_len {
+ Some((id - self_id) as usize)
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn find(&self, byte: u8) -> Result<usize> {
+ self.find(byte)
+ .ok_or_else(|| Error::UnexpectedEof(self.offset_id()))
+ }
+
+ #[inline]
+ fn skip(&mut self, len: usize) -> Result<()> {
+ if self.slice.len() < len {
+ Err(Error::UnexpectedEof(self.offset_id()))
+ } else {
+ self.slice = &self.slice[len..];
+ Ok(())
+ }
+ }
+
+ #[inline]
+ fn split(&mut self, len: usize) -> Result<Self> {
+ let slice = self.read_slice(len)?;
+ Ok(EndianSlice::new(slice, self.endian))
+ }
+
+ #[cfg(not(feature = "read"))]
+ fn cannot_implement() -> super::reader::seal_if_no_alloc::Sealed {
+ super::reader::seal_if_no_alloc::Sealed
+ }
+
+ #[cfg(feature = "read")]
+ #[inline]
+ fn to_slice(&self) -> Result<Cow<[u8]>> {
+ Ok(self.slice.into())
+ }
+
+ #[cfg(feature = "read")]
+ #[inline]
+ fn to_string(&self) -> Result<Cow<str>> {
+ match str::from_utf8(self.slice) {
+ Ok(s) => Ok(s.into()),
+ _ => Err(Error::BadUtf8),
+ }
+ }
+
+ #[cfg(feature = "read")]
+ #[inline]
+ fn to_string_lossy(&self) -> Result<Cow<str>> {
+ Ok(String::from_utf8_lossy(self.slice))
+ }
+
+ #[inline]
+ fn read_slice(&mut self, buf: &mut [u8]) -> Result<()> {
+ let slice = self.read_slice(buf.len())?;
+ buf.copy_from_slice(slice);
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::endianity::NativeEndian;
+
+ #[test]
+ fn test_endian_slice_split_at() {
+ let endian = NativeEndian;
+ let slice = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
+ let eb = EndianSlice::new(slice, endian);
+ assert_eq!(
+ eb.split_at(3),
+ (
+ EndianSlice::new(&slice[..3], endian),
+ EndianSlice::new(&slice[3..], endian)
+ )
+ );
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_endian_slice_split_at_out_of_bounds() {
+ let slice = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
+ let eb = EndianSlice::new(slice, NativeEndian);
+ eb.split_at(30);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/index.rs b/vendor/gimli-0.26.2/src/read/index.rs
new file mode 100644
index 000000000..129eb2fb1
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/index.rs
@@ -0,0 +1,535 @@
+use core::slice;
+
+use crate::common::SectionId;
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::{EndianSlice, Error, Reader, ReaderOffset, Result, Section};
+
+/// The data in the `.debug_cu_index` section of a `.dwp` file.
+///
+/// This section contains the compilation unit index.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugCuIndex<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugCuIndex<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugCuIndex` instance from the data in the `.debug_cu_index`
+ /// section.
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugCuIndex<R> {
+ fn id() -> SectionId {
+ SectionId::DebugCuIndex
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugCuIndex<R> {
+ fn from(section: R) -> Self {
+ DebugCuIndex { section }
+ }
+}
+
+impl<R: Reader> DebugCuIndex<R> {
+ /// Parse the index header.
+ pub fn index(self) -> Result<UnitIndex<R>> {
+ UnitIndex::parse(self.section)
+ }
+}
+
+/// The data in the `.debug_tu_index` section of a `.dwp` file.
+///
+/// This section contains the type unit index.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugTuIndex<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugTuIndex<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugTuIndex` instance from the data in the `.debug_tu_index`
+ /// section.
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugTuIndex<R> {
+ fn id() -> SectionId {
+ SectionId::DebugTuIndex
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugTuIndex<R> {
+ fn from(section: R) -> Self {
+ DebugTuIndex { section }
+ }
+}
+
+impl<R: Reader> DebugTuIndex<R> {
+ /// Parse the index header.
+ pub fn index(self) -> Result<UnitIndex<R>> {
+ UnitIndex::parse(self.section)
+ }
+}
+
+const SECTION_COUNT_MAX: u8 = 8;
+
+/// The partially parsed index from a `DebugCuIndex` or `DebugTuIndex`.
+#[derive(Debug, Clone)]
+pub struct UnitIndex<R: Reader> {
+ version: u16,
+ section_count: u32,
+ unit_count: u32,
+ slot_count: u32,
+ hash_ids: R,
+ hash_rows: R,
+ // Only `section_count` values are valid.
+ sections: [SectionId; SECTION_COUNT_MAX as usize],
+ offsets: R,
+ sizes: R,
+}
+
+impl<R: Reader> UnitIndex<R> {
+ fn parse(mut input: R) -> Result<UnitIndex<R>> {
+ if input.is_empty() {
+ return Ok(UnitIndex {
+ version: 5,
+ section_count: 0,
+ unit_count: 0,
+ slot_count: 0,
+ hash_ids: input.clone(),
+ hash_rows: input.clone(),
+ sections: [SectionId::DebugAbbrev; SECTION_COUNT_MAX as usize],
+ offsets: input.clone(),
+ sizes: input.clone(),
+ });
+ }
+
+ // GNU split-dwarf extension to DWARF 4 uses a 32-bit version,
+ // but DWARF 5 uses a 16-bit version followed by 16-bit padding.
+ let mut original_input = input.clone();
+ let version;
+ if input.read_u32()? == 2 {
+ version = 2
+ } else {
+ version = original_input.read_u16()?;
+ if version != 5 {
+ return Err(Error::UnknownVersion(version.into()));
+ }
+ }
+
+ let section_count = input.read_u32()?;
+ let unit_count = input.read_u32()?;
+ let slot_count = input.read_u32()?;
+ if slot_count == 0 || slot_count & (slot_count - 1) != 0 || slot_count <= unit_count {
+ return Err(Error::InvalidIndexSlotCount);
+ }
+
+ let hash_ids = input.split(R::Offset::from_u64(u64::from(slot_count) * 8)?)?;
+ let hash_rows = input.split(R::Offset::from_u64(u64::from(slot_count) * 4)?)?;
+
+ let mut sections = [SectionId::DebugAbbrev; SECTION_COUNT_MAX as usize];
+ if section_count > SECTION_COUNT_MAX.into() {
+ return Err(Error::InvalidIndexSectionCount);
+ }
+ for i in 0..section_count {
+ let section = input.read_u32()?;
+ sections[i as usize] = if version == 2 {
+ match constants::DwSectV2(section) {
+ constants::DW_SECT_V2_INFO => SectionId::DebugInfo,
+ constants::DW_SECT_V2_TYPES => SectionId::DebugTypes,
+ constants::DW_SECT_V2_ABBREV => SectionId::DebugAbbrev,
+ constants::DW_SECT_V2_LINE => SectionId::DebugLine,
+ constants::DW_SECT_V2_LOC => SectionId::DebugLoc,
+ constants::DW_SECT_V2_STR_OFFSETS => SectionId::DebugStrOffsets,
+ constants::DW_SECT_V2_MACINFO => SectionId::DebugMacinfo,
+ constants::DW_SECT_V2_MACRO => SectionId::DebugMacro,
+ _ => return Err(Error::UnknownIndexSection),
+ }
+ } else {
+ match constants::DwSect(section) {
+ constants::DW_SECT_INFO => SectionId::DebugInfo,
+ constants::DW_SECT_ABBREV => SectionId::DebugAbbrev,
+ constants::DW_SECT_LINE => SectionId::DebugLine,
+ constants::DW_SECT_LOCLISTS => SectionId::DebugLocLists,
+ constants::DW_SECT_STR_OFFSETS => SectionId::DebugStrOffsets,
+ constants::DW_SECT_MACRO => SectionId::DebugMacro,
+ constants::DW_SECT_RNGLISTS => SectionId::DebugRngLists,
+ _ => return Err(Error::UnknownIndexSection),
+ }
+ };
+ }
+
+ let offsets = input.split(R::Offset::from_u64(
+ u64::from(unit_count) * u64::from(section_count) * 4,
+ )?)?;
+ let sizes = input.split(R::Offset::from_u64(
+ u64::from(unit_count) * u64::from(section_count) * 4,
+ )?)?;
+
+ Ok(UnitIndex {
+ version,
+ section_count,
+ unit_count,
+ slot_count,
+ hash_ids,
+ hash_rows,
+ sections,
+ offsets,
+ sizes,
+ })
+ }
+
+ /// Find `id` in the index hash table, and return the row index.
+ ///
+ /// `id` may be a compilation unit ID if this index is from `.debug_cu_index`,
+ /// or a type signature if this index is from `.debug_tu_index`.
+ pub fn find(&self, id: u64) -> Option<u32> {
+ if self.slot_count == 0 {
+ return None;
+ }
+ let mask = u64::from(self.slot_count - 1);
+ let mut hash1 = id & mask;
+ let hash2 = ((id >> 32) & mask) | 1;
+ for _ in 0..self.slot_count {
+ // The length of these arrays was validated in `UnitIndex::parse`.
+ let mut hash_ids = self.hash_ids.clone();
+ hash_ids.skip(R::Offset::from_u64(hash1 * 8).ok()?).ok()?;
+ let hash_id = hash_ids.read_u64().ok()?;
+ if hash_id == id {
+ let mut hash_rows = self.hash_rows.clone();
+ hash_rows.skip(R::Offset::from_u64(hash1 * 4).ok()?).ok()?;
+ let hash_row = hash_rows.read_u32().ok()?;
+ return Some(hash_row);
+ }
+ if hash_id == 0 {
+ return None;
+ }
+ hash1 = (hash1 + hash2) & mask;
+ }
+ None
+ }
+
+ /// Return the section offsets and sizes for the given row index.
+ pub fn sections(&self, mut row: u32) -> Result<UnitIndexSectionIterator<R>> {
+ if row == 0 {
+ return Err(Error::InvalidIndexRow);
+ }
+ row -= 1;
+ if row >= self.unit_count {
+ return Err(Error::InvalidIndexRow);
+ }
+ let mut offsets = self.offsets.clone();
+ offsets.skip(R::Offset::from_u64(
+ u64::from(row) * u64::from(self.section_count) * 4,
+ )?)?;
+ let mut sizes = self.sizes.clone();
+ sizes.skip(R::Offset::from_u64(
+ u64::from(row) * u64::from(self.section_count) * 4,
+ )?)?;
+ Ok(UnitIndexSectionIterator {
+ sections: self.sections[..self.section_count as usize].iter(),
+ offsets,
+ sizes,
+ })
+ }
+
+ /// Return the version.
+ pub fn version(&self) -> u16 {
+ self.version
+ }
+
+ /// Return the number of sections.
+ pub fn section_count(&self) -> u32 {
+ self.section_count
+ }
+
+ /// Return the number of units.
+ pub fn unit_count(&self) -> u32 {
+ self.unit_count
+ }
+
+ /// Return the number of slots.
+ pub fn slot_count(&self) -> u32 {
+ self.slot_count
+ }
+}
+
+/// An iterator over the section offsets and sizes for a row in a `UnitIndex`.
+#[derive(Debug, Clone)]
+pub struct UnitIndexSectionIterator<'index, R: Reader> {
+ sections: slice::Iter<'index, SectionId>,
+ offsets: R,
+ sizes: R,
+}
+
+impl<'index, R: Reader> Iterator for UnitIndexSectionIterator<'index, R> {
+ type Item = UnitIndexSection;
+
+ fn next(&mut self) -> Option<UnitIndexSection> {
+ let section = *self.sections.next()?;
+ // The length of these arrays was validated in `UnitIndex::parse`.
+ let offset = self.offsets.read_u32().ok()?;
+ let size = self.sizes.read_u32().ok()?;
+ Some(UnitIndexSection {
+ section,
+ offset,
+ size,
+ })
+ }
+}
+
+/// Information about a unit's contribution to a section in a `.dwp` file.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct UnitIndexSection {
+ /// The section kind.
+ pub section: SectionId,
+ /// The base offset of the unit's contribution to the section.
+ pub offset: u32,
+ /// The size of the unit's contribution to the section.
+ pub size: u32,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::endianity::BigEndian;
+ use test_assembler::{Endian, Section};
+
+ #[test]
+ fn test_empty() {
+ let buf = EndianSlice::new(&[], BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert!(index.find(0).is_none());
+ }
+
+ #[test]
+ fn test_version_2() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D32(2).D32(0).D32(0).D32(1)
+ // Slots.
+ .D64(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert_eq!(index.version, 2);
+ }
+
+ #[test]
+ fn test_version_5() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D16(5).D16(0).D32(0).D32(0).D32(1)
+ // Slots.
+ .D64(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert_eq!(index.version, 5);
+ }
+
+ #[test]
+ fn test_version_5_invalid() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D32(5).D32(0).D32(0).D32(1)
+ // Slots.
+ .D64(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ assert!(UnitIndex::parse(buf).is_err());
+ }
+
+ #[test]
+ fn test_version_2_sections() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D32(2).D32(8).D32(1).D32(2)
+ // Slots.
+ .D64(0).D64(0).D32(0).D32(0)
+ // Sections.
+ .D32(constants::DW_SECT_V2_INFO.0)
+ .D32(constants::DW_SECT_V2_TYPES.0)
+ .D32(constants::DW_SECT_V2_ABBREV.0)
+ .D32(constants::DW_SECT_V2_LINE.0)
+ .D32(constants::DW_SECT_V2_LOC.0)
+ .D32(constants::DW_SECT_V2_STR_OFFSETS.0)
+ .D32(constants::DW_SECT_V2_MACINFO.0)
+ .D32(constants::DW_SECT_V2_MACRO.0)
+ // Offsets.
+ .D32(11).D32(12).D32(13).D32(14).D32(15).D32(16).D32(17).D32(18)
+ // Sizes.
+ .D32(21).D32(22).D32(23).D32(24).D32(25).D32(26).D32(27).D32(28);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert_eq!(index.section_count, 8);
+ assert_eq!(
+ index.sections,
+ [
+ SectionId::DebugInfo,
+ SectionId::DebugTypes,
+ SectionId::DebugAbbrev,
+ SectionId::DebugLine,
+ SectionId::DebugLoc,
+ SectionId::DebugStrOffsets,
+ SectionId::DebugMacinfo,
+ SectionId::DebugMacro,
+ ]
+ );
+ #[rustfmt::skip]
+ let expect = [
+ UnitIndexSection { section: SectionId::DebugInfo, offset: 11, size: 21 },
+ UnitIndexSection { section: SectionId::DebugTypes, offset: 12, size: 22 },
+ UnitIndexSection { section: SectionId::DebugAbbrev, offset: 13, size: 23 },
+ UnitIndexSection { section: SectionId::DebugLine, offset: 14, size: 24 },
+ UnitIndexSection { section: SectionId::DebugLoc, offset: 15, size: 25 },
+ UnitIndexSection { section: SectionId::DebugStrOffsets, offset: 16, size: 26 },
+ UnitIndexSection { section: SectionId::DebugMacinfo, offset: 17, size: 27 },
+ UnitIndexSection { section: SectionId::DebugMacro, offset: 18, size: 28 },
+ ];
+ let mut sections = index.sections(1).unwrap();
+ for section in &expect {
+ assert_eq!(*section, sections.next().unwrap());
+ }
+ assert!(sections.next().is_none());
+ }
+
+ #[test]
+ fn test_version_5_sections() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D16(5).D16(0).D32(7).D32(1).D32(2)
+ // Slots.
+ .D64(0).D64(0).D32(0).D32(0)
+ // Sections.
+ .D32(constants::DW_SECT_INFO.0)
+ .D32(constants::DW_SECT_ABBREV.0)
+ .D32(constants::DW_SECT_LINE.0)
+ .D32(constants::DW_SECT_LOCLISTS.0)
+ .D32(constants::DW_SECT_STR_OFFSETS.0)
+ .D32(constants::DW_SECT_MACRO.0)
+ .D32(constants::DW_SECT_RNGLISTS.0)
+ // Offsets.
+ .D32(11).D32(12).D32(13).D32(14).D32(15).D32(16).D32(17)
+ // Sizes.
+ .D32(21).D32(22).D32(23).D32(24).D32(25).D32(26).D32(27);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert_eq!(index.section_count, 7);
+ assert_eq!(
+ index.sections[..7],
+ [
+ SectionId::DebugInfo,
+ SectionId::DebugAbbrev,
+ SectionId::DebugLine,
+ SectionId::DebugLocLists,
+ SectionId::DebugStrOffsets,
+ SectionId::DebugMacro,
+ SectionId::DebugRngLists,
+ ]
+ );
+ #[rustfmt::skip]
+ let expect = [
+ UnitIndexSection { section: SectionId::DebugInfo, offset: 11, size: 21 },
+ UnitIndexSection { section: SectionId::DebugAbbrev, offset: 12, size: 22 },
+ UnitIndexSection { section: SectionId::DebugLine, offset: 13, size: 23 },
+ UnitIndexSection { section: SectionId::DebugLocLists, offset: 14, size: 24 },
+ UnitIndexSection { section: SectionId::DebugStrOffsets, offset: 15, size: 25 },
+ UnitIndexSection { section: SectionId::DebugMacro, offset: 16, size: 26 },
+ UnitIndexSection { section: SectionId::DebugRngLists, offset: 17, size: 27 },
+ ];
+ let mut sections = index.sections(1).unwrap();
+ for section in &expect {
+ assert_eq!(*section, sections.next().unwrap());
+ }
+ assert!(sections.next().is_none());
+
+ assert!(index.sections(0).is_err());
+ assert!(index.sections(2).is_err());
+ }
+
+ #[test]
+ fn test_hash() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D16(5).D16(0).D32(2).D32(3).D32(4)
+ // Slots.
+ .D64(0xffff_fff2_ffff_fff1)
+ .D64(0xffff_fff0_ffff_fff1)
+ .D64(0xffff_fff1_ffff_fff1)
+ .D64(0)
+ .D32(3).D32(1).D32(2).D32(0)
+ // Sections.
+ .D32(constants::DW_SECT_INFO.0)
+ .D32(constants::DW_SECT_ABBREV.0)
+ // Offsets.
+ .D32(0).D32(0).D32(0).D32(0).D32(0).D32(0)
+ // Sizes.
+ .D32(0).D32(0).D32(0).D32(0).D32(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let buf = EndianSlice::new(&buf, BigEndian);
+ let index = UnitIndex::parse(buf).unwrap();
+ assert_eq!(index.version(), 5);
+ assert_eq!(index.slot_count(), 4);
+ assert_eq!(index.unit_count(), 3);
+ assert_eq!(index.section_count(), 2);
+ assert_eq!(index.find(0xffff_fff0_ffff_fff1), Some(1));
+ assert_eq!(index.find(0xffff_fff1_ffff_fff1), Some(2));
+ assert_eq!(index.find(0xffff_fff2_ffff_fff1), Some(3));
+ assert_eq!(index.find(0xffff_fff3_ffff_fff1), None);
+ }
+
+ #[test]
+ fn test_cu_index() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D16(5).D16(0).D32(0).D32(0).D32(1)
+ // Slots.
+ .D64(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let cu_index = DebugCuIndex::new(&buf, BigEndian);
+ let index = cu_index.index().unwrap();
+ assert_eq!(index.version, 5);
+ }
+
+ #[test]
+ fn test_tu_index() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Big)
+ // Header.
+ .D16(5).D16(0).D32(0).D32(0).D32(1)
+ // Slots.
+ .D64(0).D32(0);
+ let buf = section.get_contents().unwrap();
+ let tu_index = DebugTuIndex::new(&buf, BigEndian);
+ let index = tu_index.index().unwrap();
+ assert_eq!(index.version, 5);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/line.rs b/vendor/gimli-0.26.2/src/read/line.rs
new file mode 100644
index 000000000..0e7380bb9
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/line.rs
@@ -0,0 +1,3030 @@
+use alloc::vec::Vec;
+use core::fmt;
+use core::num::{NonZeroU64, Wrapping};
+use core::result;
+
+use crate::common::{
+ DebugLineOffset, DebugLineStrOffset, DebugStrOffset, DebugStrOffsetsIndex, Encoding, Format,
+ LineEncoding, SectionId,
+};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::{AttributeValue, EndianSlice, Error, Reader, ReaderOffset, Result, Section};
+
+/// The `DebugLine` struct contains the source location to instruction mapping
+/// found in the `.debug_line` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugLine<R> {
+ debug_line_section: R,
+}
+
+impl<'input, Endian> DebugLine<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugLine` instance from the data in the `.debug_line`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_line` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugLine, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_line_section_somehow = || &buf;
+ /// let debug_line = DebugLine::new(read_debug_line_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_line_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_line_section, endian))
+ }
+}
+
+impl<R: Reader> DebugLine<R> {
+ /// Parse the line number program whose header is at the given `offset` in the
+ /// `.debug_line` section.
+ ///
+ /// The `address_size` must match the compilation unit that the lines apply to.
+ /// The `comp_dir` should be from the `DW_AT_comp_dir` attribute of the compilation
+ /// unit. The `comp_name` should be from the `DW_AT_name` attribute of the
+ /// compilation unit.
+ ///
+ /// ```rust,no_run
+ /// use gimli::{DebugLine, DebugLineOffset, IncompleteLineProgram, EndianSlice, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_line_section_somehow = || &buf;
+ /// let debug_line = DebugLine::new(read_debug_line_section_somehow(), LittleEndian);
+ ///
+ /// // In a real example, we'd grab the offset via a compilation unit
+ /// // entry's `DW_AT_stmt_list` attribute, and the address size from that
+ /// // unit directly.
+ /// let offset = DebugLineOffset(0);
+ /// let address_size = 8;
+ ///
+ /// let program = debug_line.program(offset, address_size, None, None)
+ /// .expect("should have found a header at that offset, and parsed it OK");
+ /// ```
+ pub fn program(
+ &self,
+ offset: DebugLineOffset<R::Offset>,
+ address_size: u8,
+ comp_dir: Option<R>,
+ comp_name: Option<R>,
+ ) -> Result<IncompleteLineProgram<R>> {
+ let input = &mut self.debug_line_section.clone();
+ input.skip(offset.0)?;
+ let header = LineProgramHeader::parse(input, offset, address_size, comp_dir, comp_name)?;
+ let program = IncompleteLineProgram { header };
+ Ok(program)
+ }
+}
+
+impl<T> DebugLine<T> {
+ /// Create a `DebugLine` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugLine<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugLine<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.debug_line_section).into()
+ }
+}
+
+impl<R> Section<R> for DebugLine<R> {
+ fn id() -> SectionId {
+ SectionId::DebugLine
+ }
+
+ fn reader(&self) -> &R {
+ &self.debug_line_section
+ }
+}
+
+impl<R> From<R> for DebugLine<R> {
+ fn from(debug_line_section: R) -> Self {
+ DebugLine { debug_line_section }
+ }
+}
+
+/// Deprecated. `LineNumberProgram` has been renamed to `LineProgram`.
+#[deprecated(note = "LineNumberProgram has been renamed to LineProgram, use that instead.")]
+pub type LineNumberProgram<R, Offset> = dyn LineProgram<R, Offset>;
+
+/// A `LineProgram` provides access to a `LineProgramHeader` and
+/// a way to add files to the files table if necessary. Gimli consumers should
+/// never need to use or see this trait.
+pub trait LineProgram<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Get a reference to the held `LineProgramHeader`.
+ fn header(&self) -> &LineProgramHeader<R, Offset>;
+ /// Add a file to the file table if necessary.
+ fn add_file(&mut self, file: FileEntry<R, Offset>);
+}
+
+impl<R, Offset> LineProgram<R, Offset> for IncompleteLineProgram<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ fn header(&self) -> &LineProgramHeader<R, Offset> {
+ &self.header
+ }
+ fn add_file(&mut self, file: FileEntry<R, Offset>) {
+ self.header.file_names.push(file);
+ }
+}
+
+impl<'program, R, Offset> LineProgram<R, Offset> for &'program CompleteLineProgram<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ fn header(&self) -> &LineProgramHeader<R, Offset> {
+ &self.header
+ }
+ fn add_file(&mut self, _: FileEntry<R, Offset>) {
+ // Nop. Our file table is already complete.
+ }
+}
+
+/// Deprecated. `StateMachine` has been renamed to `LineRows`.
+#[deprecated(note = "StateMachine has been renamed to LineRows, use that instead.")]
+pub type StateMachine<R, Program, Offset> = LineRows<R, Program, Offset>;
+
+/// Executes a `LineProgram` to iterate over the rows in the matrix of line number information.
+///
+/// "The hypothetical machine used by a consumer of the line number information
+/// to expand the byte-coded instruction stream into a matrix of line number
+/// information." -- Section 6.2.1
+#[derive(Debug, Clone)]
+pub struct LineRows<R, Program, Offset = <R as Reader>::Offset>
+where
+ Program: LineProgram<R, Offset>,
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ program: Program,
+ row: LineRow,
+ instructions: LineInstructions<R>,
+}
+
+type OneShotLineRows<R, Offset = <R as Reader>::Offset> =
+ LineRows<R, IncompleteLineProgram<R, Offset>, Offset>;
+
+type ResumedLineRows<'program, R, Offset = <R as Reader>::Offset> =
+ LineRows<R, &'program CompleteLineProgram<R, Offset>, Offset>;
+
+impl<R, Program, Offset> LineRows<R, Program, Offset>
+where
+ Program: LineProgram<R, Offset>,
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ #[allow(clippy::new_ret_no_self)]
+ fn new(program: IncompleteLineProgram<R, Offset>) -> OneShotLineRows<R, Offset> {
+ let row = LineRow::new(program.header());
+ let instructions = LineInstructions {
+ input: program.header().program_buf.clone(),
+ };
+ LineRows {
+ program,
+ row,
+ instructions,
+ }
+ }
+
+ fn resume<'program>(
+ program: &'program CompleteLineProgram<R, Offset>,
+ sequence: &LineSequence<R>,
+ ) -> ResumedLineRows<'program, R, Offset> {
+ let row = LineRow::new(program.header());
+ let instructions = sequence.instructions.clone();
+ LineRows {
+ program,
+ row,
+ instructions,
+ }
+ }
+
+ /// Get a reference to the header for this state machine's line number
+ /// program.
+ #[inline]
+ pub fn header(&self) -> &LineProgramHeader<R, Offset> {
+ self.program.header()
+ }
+
+ /// Parse and execute the next instructions in the line number program until
+ /// another row in the line number matrix is computed.
+ ///
+ /// The freshly computed row is returned as `Ok(Some((header, row)))`.
+ /// If the matrix is complete, and there are no more new rows in the line
+ /// number matrix, then `Ok(None)` is returned. If there was an error parsing
+ /// an instruction, then `Err(e)` is returned.
+ ///
+ /// Unfortunately, the references mean that this cannot be a
+ /// `FallibleIterator`.
+ pub fn next_row(&mut self) -> Result<Option<(&LineProgramHeader<R, Offset>, &LineRow)>> {
+ // Perform any reset that was required after copying the previous row.
+ self.row.reset(self.program.header());
+
+ loop {
+ // Split the borrow here, rather than calling `self.header()`.
+ match self.instructions.next_instruction(self.program.header()) {
+ Err(err) => return Err(err),
+ Ok(None) => return Ok(None),
+ Ok(Some(instruction)) => {
+ if self.row.execute(instruction, &mut self.program) {
+ return Ok(Some((self.header(), &self.row)));
+ }
+ // Fall through, parse the next instruction, and see if that
+ // yields a row.
+ }
+ }
+ }
+ }
+}
+
+/// Deprecated. `Opcode` has been renamed to `LineInstruction`.
+#[deprecated(note = "Opcode has been renamed to LineInstruction, use that instead.")]
+pub type Opcode<R> = LineInstruction<R, <R as Reader>::Offset>;
+
+/// A parsed line number program instruction.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum LineInstruction<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// > ### 6.2.5.1 Special Opcodes
+ /// >
+ /// > Each ubyte special opcode has the following effect on the state machine:
+ /// >
+ /// > 1. Add a signed integer to the line register.
+ /// >
+ /// > 2. Modify the operation pointer by incrementing the address and
+ /// > op_index registers as described below.
+ /// >
+ /// > 3. Append a row to the matrix using the current values of the state
+ /// > machine registers.
+ /// >
+ /// > 4. Set the basic_block register to “false.”
+ /// >
+ /// > 5. Set the prologue_end register to “false.”
+ /// >
+ /// > 6. Set the epilogue_begin register to “false.”
+ /// >
+ /// > 7. Set the discriminator register to 0.
+ /// >
+ /// > All of the special opcodes do those same seven things; they differ from
+ /// > one another only in what values they add to the line, address and
+ /// > op_index registers.
+ Special(u8),
+
+ /// "[`LineInstruction::Copy`] appends a row to the matrix using the current
+ /// values of the state machine registers. Then it sets the discriminator
+ /// register to 0, and sets the basic_block, prologue_end and epilogue_begin
+ /// registers to “false.”"
+ Copy,
+
+ /// "The DW_LNS_advance_pc opcode takes a single unsigned LEB128 operand as
+ /// the operation advance and modifies the address and op_index registers
+ /// [the same as `LineInstruction::Special`]"
+ AdvancePc(u64),
+
+ /// "The DW_LNS_advance_line opcode takes a single signed LEB128 operand and
+ /// adds that value to the line register of the state machine."
+ AdvanceLine(i64),
+
+ /// "The DW_LNS_set_file opcode takes a single unsigned LEB128 operand and
+ /// stores it in the file register of the state machine."
+ SetFile(u64),
+
+ /// "The DW_LNS_set_column opcode takes a single unsigned LEB128 operand and
+ /// stores it in the column register of the state machine."
+ SetColumn(u64),
+
+ /// "The DW_LNS_negate_stmt opcode takes no operands. It sets the is_stmt
+ /// register of the state machine to the logical negation of its current
+ /// value."
+ NegateStatement,
+
+ /// "The DW_LNS_set_basic_block opcode takes no operands. It sets the
+ /// basic_block register of the state machine to “true.”"
+ SetBasicBlock,
+
+ /// > The DW_LNS_const_add_pc opcode takes no operands. It advances the
+ /// > address and op_index registers by the increments corresponding to
+ /// > special opcode 255.
+ /// >
+ /// > When the line number program needs to advance the address by a small
+ /// > amount, it can use a single special opcode, which occupies a single
+ /// > byte. When it needs to advance the address by up to twice the range of
+ /// > the last special opcode, it can use DW_LNS_const_add_pc followed by a
+ /// > special opcode, for a total of two bytes. Only if it needs to advance
+ /// > the address by more than twice that range will it need to use both
+ /// > DW_LNS_advance_pc and a special opcode, requiring three or more bytes.
+ ConstAddPc,
+
+ /// > The DW_LNS_fixed_advance_pc opcode takes a single uhalf (unencoded)
+ /// > operand and adds it to the address register of the state machine and
+ /// > sets the op_index register to 0. This is the only standard opcode whose
+ /// > operand is not a variable length number. It also does not multiply the
+ /// > operand by the minimum_instruction_length field of the header.
+ FixedAddPc(u16),
+
+ /// "[`LineInstruction::SetPrologueEnd`] sets the prologue_end register to “true”."
+ SetPrologueEnd,
+
+ /// "[`LineInstruction::SetEpilogueBegin`] sets the epilogue_begin register to
+ /// “true”."
+ SetEpilogueBegin,
+
+ /// "The DW_LNS_set_isa opcode takes a single unsigned LEB128 operand and
+ /// stores that value in the isa register of the state machine."
+ SetIsa(u64),
+
+ /// An unknown standard opcode with zero operands.
+ UnknownStandard0(constants::DwLns),
+
+ /// An unknown standard opcode with one operand.
+ UnknownStandard1(constants::DwLns, u64),
+
+ /// An unknown standard opcode with multiple operands.
+ UnknownStandardN(constants::DwLns, R),
+
+ /// > [`LineInstruction::EndSequence`] sets the end_sequence register of the state
+ /// > machine to “true” and appends a row to the matrix using the current
+ /// > values of the state-machine registers. Then it resets the registers to
+ /// > the initial values specified above (see Section 6.2.2). Every line
+ /// > number program sequence must end with a DW_LNE_end_sequence instruction
+ /// > which creates a row whose address is that of the byte after the last
+ /// > target machine instruction of the sequence.
+ EndSequence,
+
+ /// > The DW_LNE_set_address opcode takes a single relocatable address as an
+ /// > operand. The size of the operand is the size of an address on the target
+ /// > machine. It sets the address register to the value given by the
+ /// > relocatable address and sets the op_index register to 0.
+ /// >
+ /// > All of the other line number program opcodes that affect the address
+ /// > register add a delta to it. This instruction stores a relocatable value
+ /// > into it instead.
+ SetAddress(u64),
+
+ /// Defines a new source file in the line number program and appends it to
+ /// the line number program header's list of source files.
+ DefineFile(FileEntry<R, Offset>),
+
+ /// "The DW_LNE_set_discriminator opcode takes a single parameter, an
+ /// unsigned LEB128 integer. It sets the discriminator register to the new
+ /// value."
+ SetDiscriminator(u64),
+
+ /// An unknown extended opcode and the slice of its unparsed operands.
+ UnknownExtended(constants::DwLne, R),
+}
+
+impl<R, Offset> LineInstruction<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ fn parse<'header>(
+ header: &'header LineProgramHeader<R>,
+ input: &mut R,
+ ) -> Result<LineInstruction<R>>
+ where
+ R: 'header,
+ {
+ let opcode = input.read_u8()?;
+ if opcode == 0 {
+ let length = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let mut instr_rest = input.split(length)?;
+ let opcode = instr_rest.read_u8()?;
+
+ match constants::DwLne(opcode) {
+ constants::DW_LNE_end_sequence => Ok(LineInstruction::EndSequence),
+
+ constants::DW_LNE_set_address => {
+ let address = instr_rest.read_address(header.address_size())?;
+ Ok(LineInstruction::SetAddress(address))
+ }
+
+ constants::DW_LNE_define_file => {
+ if header.version() <= 4 {
+ let path_name = instr_rest.read_null_terminated_slice()?;
+ let entry = FileEntry::parse(&mut instr_rest, path_name)?;
+ Ok(LineInstruction::DefineFile(entry))
+ } else {
+ Ok(LineInstruction::UnknownExtended(
+ constants::DW_LNE_define_file,
+ instr_rest,
+ ))
+ }
+ }
+
+ constants::DW_LNE_set_discriminator => {
+ let discriminator = instr_rest.read_uleb128()?;
+ Ok(LineInstruction::SetDiscriminator(discriminator))
+ }
+
+ otherwise => Ok(LineInstruction::UnknownExtended(otherwise, instr_rest)),
+ }
+ } else if opcode >= header.opcode_base {
+ Ok(LineInstruction::Special(opcode))
+ } else {
+ match constants::DwLns(opcode) {
+ constants::DW_LNS_copy => Ok(LineInstruction::Copy),
+
+ constants::DW_LNS_advance_pc => {
+ let advance = input.read_uleb128()?;
+ Ok(LineInstruction::AdvancePc(advance))
+ }
+
+ constants::DW_LNS_advance_line => {
+ let increment = input.read_sleb128()?;
+ Ok(LineInstruction::AdvanceLine(increment))
+ }
+
+ constants::DW_LNS_set_file => {
+ let file = input.read_uleb128()?;
+ Ok(LineInstruction::SetFile(file))
+ }
+
+ constants::DW_LNS_set_column => {
+ let column = input.read_uleb128()?;
+ Ok(LineInstruction::SetColumn(column))
+ }
+
+ constants::DW_LNS_negate_stmt => Ok(LineInstruction::NegateStatement),
+
+ constants::DW_LNS_set_basic_block => Ok(LineInstruction::SetBasicBlock),
+
+ constants::DW_LNS_const_add_pc => Ok(LineInstruction::ConstAddPc),
+
+ constants::DW_LNS_fixed_advance_pc => {
+ let advance = input.read_u16()?;
+ Ok(LineInstruction::FixedAddPc(advance))
+ }
+
+ constants::DW_LNS_set_prologue_end => Ok(LineInstruction::SetPrologueEnd),
+
+ constants::DW_LNS_set_epilogue_begin => Ok(LineInstruction::SetEpilogueBegin),
+
+ constants::DW_LNS_set_isa => {
+ let isa = input.read_uleb128()?;
+ Ok(LineInstruction::SetIsa(isa))
+ }
+
+ otherwise => {
+ let mut opcode_lengths = header.standard_opcode_lengths().clone();
+ opcode_lengths.skip(R::Offset::from_u8(opcode - 1))?;
+ let num_args = opcode_lengths.read_u8()? as usize;
+ match num_args {
+ 0 => Ok(LineInstruction::UnknownStandard0(otherwise)),
+ 1 => {
+ let arg = input.read_uleb128()?;
+ Ok(LineInstruction::UnknownStandard1(otherwise, arg))
+ }
+ _ => {
+ let mut args = input.clone();
+ for _ in 0..num_args {
+ input.read_uleb128()?;
+ }
+ let len = input.offset_from(&args);
+ args.truncate(len)?;
+ Ok(LineInstruction::UnknownStandardN(otherwise, args))
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+impl<R, Offset> fmt::Display for LineInstruction<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
+ match *self {
+ LineInstruction::Special(opcode) => write!(f, "Special opcode {}", opcode),
+ LineInstruction::Copy => write!(f, "{}", constants::DW_LNS_copy),
+ LineInstruction::AdvancePc(advance) => {
+ write!(f, "{} by {}", constants::DW_LNS_advance_pc, advance)
+ }
+ LineInstruction::AdvanceLine(increment) => {
+ write!(f, "{} by {}", constants::DW_LNS_advance_line, increment)
+ }
+ LineInstruction::SetFile(file) => {
+ write!(f, "{} to {}", constants::DW_LNS_set_file, file)
+ }
+ LineInstruction::SetColumn(column) => {
+ write!(f, "{} to {}", constants::DW_LNS_set_column, column)
+ }
+ LineInstruction::NegateStatement => write!(f, "{}", constants::DW_LNS_negate_stmt),
+ LineInstruction::SetBasicBlock => write!(f, "{}", constants::DW_LNS_set_basic_block),
+ LineInstruction::ConstAddPc => write!(f, "{}", constants::DW_LNS_const_add_pc),
+ LineInstruction::FixedAddPc(advance) => {
+ write!(f, "{} by {}", constants::DW_LNS_fixed_advance_pc, advance)
+ }
+ LineInstruction::SetPrologueEnd => write!(f, "{}", constants::DW_LNS_set_prologue_end),
+ LineInstruction::SetEpilogueBegin => {
+ write!(f, "{}", constants::DW_LNS_set_epilogue_begin)
+ }
+ LineInstruction::SetIsa(isa) => write!(f, "{} to {}", constants::DW_LNS_set_isa, isa),
+ LineInstruction::UnknownStandard0(opcode) => write!(f, "Unknown {}", opcode),
+ LineInstruction::UnknownStandard1(opcode, arg) => {
+ write!(f, "Unknown {} with operand {}", opcode, arg)
+ }
+ LineInstruction::UnknownStandardN(opcode, ref args) => {
+ write!(f, "Unknown {} with operands {:?}", opcode, args)
+ }
+ LineInstruction::EndSequence => write!(f, "{}", constants::DW_LNE_end_sequence),
+ LineInstruction::SetAddress(address) => {
+ write!(f, "{} to {}", constants::DW_LNE_set_address, address)
+ }
+ LineInstruction::DefineFile(_) => write!(f, "{}", constants::DW_LNE_define_file),
+ LineInstruction::SetDiscriminator(discr) => {
+ write!(f, "{} to {}", constants::DW_LNE_set_discriminator, discr)
+ }
+ LineInstruction::UnknownExtended(opcode, _) => write!(f, "Unknown {}", opcode),
+ }
+ }
+}
+
+/// Deprecated. `OpcodesIter` has been renamed to `LineInstructions`.
+#[deprecated(note = "OpcodesIter has been renamed to LineInstructions, use that instead.")]
+pub type OpcodesIter<R> = LineInstructions<R>;
+
+/// An iterator yielding parsed instructions.
+///
+/// See
+/// [`LineProgramHeader::instructions`](./struct.LineProgramHeader.html#method.instructions)
+/// for more details.
+#[derive(Clone, Debug)]
+pub struct LineInstructions<R: Reader> {
+ input: R,
+}
+
+impl<R: Reader> LineInstructions<R> {
+ fn remove_trailing(&self, other: &LineInstructions<R>) -> Result<LineInstructions<R>> {
+ let offset = other.input.offset_from(&self.input);
+ let mut input = self.input.clone();
+ input.truncate(offset)?;
+ Ok(LineInstructions { input })
+ }
+}
+
+impl<R: Reader> LineInstructions<R> {
+ /// Advance the iterator and return the next instruction.
+ ///
+ /// Returns the newly parsed instruction as `Ok(Some(instruction))`. Returns
+ /// `Ok(None)` when iteration is complete and all instructions have already been
+ /// parsed and yielded. If an error occurs while parsing the next attribute,
+ /// then this error is returned as `Err(e)`, and all subsequent calls return
+ /// `Ok(None)`.
+ ///
+ /// Unfortunately, the `header` parameter means that this cannot be a
+ /// `FallibleIterator`.
+ #[allow(clippy::inline_always)]
+ #[inline(always)]
+ pub fn next_instruction(
+ &mut self,
+ header: &LineProgramHeader<R>,
+ ) -> Result<Option<LineInstruction<R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match LineInstruction::parse(header, &mut self.input) {
+ Ok(instruction) => Ok(Some(instruction)),
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+/// Deprecated. `LineNumberRow` has been renamed to `LineRow`.
+#[deprecated(note = "LineNumberRow has been renamed to LineRow, use that instead.")]
+pub type LineNumberRow = LineRow;
+
+/// A row in the line number program's resulting matrix.
+///
+/// Each row is a copy of the registers of the state machine, as defined in section 6.2.2.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct LineRow {
+ address: Wrapping<u64>,
+ op_index: Wrapping<u64>,
+ file: u64,
+ line: Wrapping<u64>,
+ column: u64,
+ is_stmt: bool,
+ basic_block: bool,
+ end_sequence: bool,
+ prologue_end: bool,
+ epilogue_begin: bool,
+ isa: u64,
+ discriminator: u64,
+}
+
+impl LineRow {
+ /// Create a line number row in the initial state for the given program.
+ pub fn new<R: Reader>(header: &LineProgramHeader<R>) -> Self {
+ LineRow {
+ // "At the beginning of each sequence within a line number program, the
+ // state of the registers is:" -- Section 6.2.2
+ address: Wrapping(0),
+ op_index: Wrapping(0),
+ file: 1,
+ line: Wrapping(1),
+ column: 0,
+ // "determined by default_is_stmt in the line number program header"
+ is_stmt: header.line_encoding.default_is_stmt,
+ basic_block: false,
+ end_sequence: false,
+ prologue_end: false,
+ epilogue_begin: false,
+ // "The isa value 0 specifies that the instruction set is the
+ // architecturally determined default instruction set. This may be fixed
+ // by the ABI, or it may be specified by other means, for example, by
+ // the object file description."
+ isa: 0,
+ discriminator: 0,
+ }
+ }
+
+ /// "The program-counter value corresponding to a machine instruction
+ /// generated by the compiler."
+ #[inline]
+ pub fn address(&self) -> u64 {
+ self.address.0
+ }
+
+ /// > An unsigned integer representing the index of an operation within a VLIW
+ /// > instruction. The index of the first operation is 0. For non-VLIW
+ /// > architectures, this register will always be 0.
+ /// >
+ /// > The address and op_index registers, taken together, form an operation
+ /// > pointer that can reference any individual operation with the
+ /// > instruction stream.
+ #[inline]
+ pub fn op_index(&self) -> u64 {
+ self.op_index.0
+ }
+
+ /// "An unsigned integer indicating the identity of the source file
+ /// corresponding to a machine instruction."
+ #[inline]
+ pub fn file_index(&self) -> u64 {
+ self.file
+ }
+
+ /// The source file corresponding to the current machine instruction.
+ #[inline]
+ pub fn file<'header, R: Reader>(
+ &self,
+ header: &'header LineProgramHeader<R>,
+ ) -> Option<&'header FileEntry<R>> {
+ header.file(self.file)
+ }
+
+ /// "An unsigned integer indicating a source line number. Lines are numbered
+ /// beginning at 1. The compiler may emit the value 0 in cases where an
+ /// instruction cannot be attributed to any source line."
+ /// Line number values of 0 are represented as `None`.
+ #[inline]
+ pub fn line(&self) -> Option<NonZeroU64> {
+ NonZeroU64::new(self.line.0)
+ }
+
+ /// "An unsigned integer indicating a column number within a source
+ /// line. Columns are numbered beginning at 1. The value 0 is reserved to
+ /// indicate that a statement begins at the “left edge” of the line."
+ #[inline]
+ pub fn column(&self) -> ColumnType {
+ NonZeroU64::new(self.column)
+ .map(ColumnType::Column)
+ .unwrap_or(ColumnType::LeftEdge)
+ }
+
+ /// "A boolean indicating that the current instruction is a recommended
+ /// breakpoint location. A recommended breakpoint location is intended to
+ /// “represent” a line, a statement and/or a semantically distinct subpart
+ /// of a statement."
+ #[inline]
+ pub fn is_stmt(&self) -> bool {
+ self.is_stmt
+ }
+
+ /// "A boolean indicating that the current instruction is the beginning of a
+ /// basic block."
+ #[inline]
+ pub fn basic_block(&self) -> bool {
+ self.basic_block
+ }
+
+ /// "A boolean indicating that the current address is that of the first byte
+ /// after the end of a sequence of target machine instructions. end_sequence
+ /// terminates a sequence of lines; therefore other information in the same
+ /// row is not meaningful."
+ #[inline]
+ pub fn end_sequence(&self) -> bool {
+ self.end_sequence
+ }
+
+ /// "A boolean indicating that the current address is one (of possibly many)
+ /// where execution should be suspended for an entry breakpoint of a
+ /// function."
+ #[inline]
+ pub fn prologue_end(&self) -> bool {
+ self.prologue_end
+ }
+
+ /// "A boolean indicating that the current address is one (of possibly many)
+ /// where execution should be suspended for an exit breakpoint of a
+ /// function."
+ #[inline]
+ pub fn epilogue_begin(&self) -> bool {
+ self.epilogue_begin
+ }
+
+ /// Tag for the current instruction set architecture.
+ ///
+ /// > An unsigned integer whose value encodes the applicable instruction set
+ /// > architecture for the current instruction.
+ /// >
+ /// > The encoding of instruction sets should be shared by all users of a
+ /// > given architecture. It is recommended that this encoding be defined by
+ /// > the ABI authoring committee for each architecture.
+ #[inline]
+ pub fn isa(&self) -> u64 {
+ self.isa
+ }
+
+ /// "An unsigned integer identifying the block to which the current
+ /// instruction belongs. Discriminator values are assigned arbitrarily by
+ /// the DWARF producer and serve to distinguish among multiple blocks that
+ /// may all be associated with the same source file, line, and column. Where
+ /// only one block exists for a given source position, the discriminator
+ /// value should be zero."
+ #[inline]
+ pub fn discriminator(&self) -> u64 {
+ self.discriminator
+ }
+
+ /// Execute the given instruction, and return true if a new row in the
+ /// line number matrix needs to be generated.
+ ///
+ /// Unknown opcodes are treated as no-ops.
+ #[inline]
+ pub fn execute<R, Program>(
+ &mut self,
+ instruction: LineInstruction<R>,
+ program: &mut Program,
+ ) -> bool
+ where
+ Program: LineProgram<R>,
+ R: Reader,
+ {
+ match instruction {
+ LineInstruction::Special(opcode) => {
+ self.exec_special_opcode(opcode, program.header());
+ true
+ }
+
+ LineInstruction::Copy => true,
+
+ LineInstruction::AdvancePc(operation_advance) => {
+ self.apply_operation_advance(operation_advance, program.header());
+ false
+ }
+
+ LineInstruction::AdvanceLine(line_increment) => {
+ self.apply_line_advance(line_increment);
+ false
+ }
+
+ LineInstruction::SetFile(file) => {
+ self.file = file;
+ false
+ }
+
+ LineInstruction::SetColumn(column) => {
+ self.column = column;
+ false
+ }
+
+ LineInstruction::NegateStatement => {
+ self.is_stmt = !self.is_stmt;
+ false
+ }
+
+ LineInstruction::SetBasicBlock => {
+ self.basic_block = true;
+ false
+ }
+
+ LineInstruction::ConstAddPc => {
+ let adjusted = self.adjust_opcode(255, program.header());
+ let operation_advance = adjusted / program.header().line_encoding.line_range;
+ self.apply_operation_advance(u64::from(operation_advance), program.header());
+ false
+ }
+
+ LineInstruction::FixedAddPc(operand) => {
+ self.address += Wrapping(u64::from(operand));
+ self.op_index.0 = 0;
+ false
+ }
+
+ LineInstruction::SetPrologueEnd => {
+ self.prologue_end = true;
+ false
+ }
+
+ LineInstruction::SetEpilogueBegin => {
+ self.epilogue_begin = true;
+ false
+ }
+
+ LineInstruction::SetIsa(isa) => {
+ self.isa = isa;
+ false
+ }
+
+ LineInstruction::EndSequence => {
+ self.end_sequence = true;
+ true
+ }
+
+ LineInstruction::SetAddress(address) => {
+ self.address.0 = address;
+ self.op_index.0 = 0;
+ false
+ }
+
+ LineInstruction::DefineFile(entry) => {
+ program.add_file(entry);
+ false
+ }
+
+ LineInstruction::SetDiscriminator(discriminator) => {
+ self.discriminator = discriminator;
+ false
+ }
+
+ // Compatibility with future opcodes.
+ LineInstruction::UnknownStandard0(_)
+ | LineInstruction::UnknownStandard1(_, _)
+ | LineInstruction::UnknownStandardN(_, _)
+ | LineInstruction::UnknownExtended(_, _) => false,
+ }
+ }
+
+ /// Perform any reset that was required after copying the previous row.
+ #[inline]
+ pub fn reset<R: Reader>(&mut self, header: &LineProgramHeader<R>) {
+ if self.end_sequence {
+ // Previous instruction was EndSequence, so reset everything
+ // as specified in Section 6.2.5.3.
+ *self = Self::new(header);
+ } else {
+ // Previous instruction was one of:
+ // - Special - specified in Section 6.2.5.1, steps 4-7
+ // - Copy - specified in Section 6.2.5.2
+ // The reset behaviour is the same in both cases.
+ self.discriminator = 0;
+ self.basic_block = false;
+ self.prologue_end = false;
+ self.epilogue_begin = false;
+ }
+ }
+
+ /// Step 1 of section 6.2.5.1
+ fn apply_line_advance(&mut self, line_increment: i64) {
+ if line_increment < 0 {
+ let decrement = -line_increment as u64;
+ if decrement <= self.line.0 {
+ self.line.0 -= decrement;
+ } else {
+ self.line.0 = 0;
+ }
+ } else {
+ self.line += Wrapping(line_increment as u64);
+ }
+ }
+
+ /// Step 2 of section 6.2.5.1
+ fn apply_operation_advance<R: Reader>(
+ &mut self,
+ operation_advance: u64,
+ header: &LineProgramHeader<R>,
+ ) {
+ let operation_advance = Wrapping(operation_advance);
+
+ let minimum_instruction_length = u64::from(header.line_encoding.minimum_instruction_length);
+ let minimum_instruction_length = Wrapping(minimum_instruction_length);
+
+ let maximum_operations_per_instruction =
+ u64::from(header.line_encoding.maximum_operations_per_instruction);
+ let maximum_operations_per_instruction = Wrapping(maximum_operations_per_instruction);
+
+ if maximum_operations_per_instruction.0 == 1 {
+ self.address += minimum_instruction_length * operation_advance;
+ self.op_index.0 = 0;
+ } else {
+ let op_index_with_advance = self.op_index + operation_advance;
+ self.address += minimum_instruction_length
+ * (op_index_with_advance / maximum_operations_per_instruction);
+ self.op_index = op_index_with_advance % maximum_operations_per_instruction;
+ }
+ }
+
+ #[inline]
+ fn adjust_opcode<R: Reader>(&self, opcode: u8, header: &LineProgramHeader<R>) -> u8 {
+ opcode - header.opcode_base
+ }
+
+ /// Section 6.2.5.1
+ fn exec_special_opcode<R: Reader>(&mut self, opcode: u8, header: &LineProgramHeader<R>) {
+ let adjusted_opcode = self.adjust_opcode(opcode, header);
+
+ let line_range = header.line_encoding.line_range;
+ let line_advance = adjusted_opcode % line_range;
+ let operation_advance = adjusted_opcode / line_range;
+
+ // Step 1
+ let line_base = i64::from(header.line_encoding.line_base);
+ self.apply_line_advance(line_base + i64::from(line_advance));
+
+ // Step 2
+ self.apply_operation_advance(u64::from(operation_advance), header);
+ }
+}
+
+/// The type of column that a row is referring to.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub enum ColumnType {
+ /// The `LeftEdge` means that the statement begins at the start of the new
+ /// line.
+ LeftEdge,
+ /// A column number, whose range begins at 1.
+ Column(NonZeroU64),
+}
+
+/// Deprecated. `LineNumberSequence` has been renamed to `LineSequence`.
+#[deprecated(note = "LineNumberSequence has been renamed to LineSequence, use that instead.")]
+pub type LineNumberSequence<R> = LineSequence<R>;
+
+/// A sequence within a line number program. A sequence, as defined in section
+/// 6.2.5 of the standard, is a linear subset of a line number program within
+/// which addresses are monotonically increasing.
+#[derive(Clone, Debug)]
+pub struct LineSequence<R: Reader> {
+ /// The first address that is covered by this sequence within the line number
+ /// program.
+ pub start: u64,
+ /// The first address that is *not* covered by this sequence within the line
+ /// number program.
+ pub end: u64,
+ instructions: LineInstructions<R>,
+}
+
+/// Deprecated. `LineNumberProgramHeader` has been renamed to `LineProgramHeader`.
+#[deprecated(
+ note = "LineNumberProgramHeader has been renamed to LineProgramHeader, use that instead."
+)]
+pub type LineNumberProgramHeader<R, Offset> = LineProgramHeader<R, Offset>;
+
+/// A header for a line number program in the `.debug_line` section, as defined
+/// in section 6.2.4 of the standard.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct LineProgramHeader<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ encoding: Encoding,
+ offset: DebugLineOffset<Offset>,
+ unit_length: Offset,
+
+ header_length: Offset,
+
+ line_encoding: LineEncoding,
+
+ /// "The number assigned to the first special opcode."
+ opcode_base: u8,
+
+ /// "This array specifies the number of LEB128 operands for each of the
+ /// standard opcodes. The first element of the array corresponds to the
+ /// opcode whose value is 1, and the last element corresponds to the opcode
+ /// whose value is `opcode_base - 1`."
+ standard_opcode_lengths: R,
+
+ /// "A sequence of directory entry format descriptions."
+ directory_entry_format: Vec<FileEntryFormat>,
+
+ /// > Entries in this sequence describe each path that was searched for
+ /// > included source files in this compilation. (The paths include those
+ /// > directories specified explicitly by the user for the compiler to search
+ /// > and those the compiler searches without explicit direction.) Each path
+ /// > entry is either a full path name or is relative to the current directory
+ /// > of the compilation.
+ /// >
+ /// > The last entry is followed by a single null byte.
+ include_directories: Vec<AttributeValue<R, Offset>>,
+
+ /// "A sequence of file entry format descriptions."
+ file_name_entry_format: Vec<FileEntryFormat>,
+
+ /// "Entries in this sequence describe source files that contribute to the
+ /// line number information for this compilation unit or is used in other
+ /// contexts."
+ file_names: Vec<FileEntry<R, Offset>>,
+
+ /// The encoded line program instructions.
+ program_buf: R,
+
+ /// The current directory of the compilation.
+ comp_dir: Option<R>,
+
+ /// The primary source file.
+ comp_file: Option<FileEntry<R, Offset>>,
+}
+
+impl<R, Offset> LineProgramHeader<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Return the offset of the line number program header in the `.debug_line` section.
+ pub fn offset(&self) -> DebugLineOffset<R::Offset> {
+ self.offset
+ }
+
+ /// Return the length of the line number program and header, not including
+ /// the length of the encoded length itself.
+ pub fn unit_length(&self) -> R::Offset {
+ self.unit_length
+ }
+
+ /// Return the encoding parameters for this header's line program.
+ pub fn encoding(&self) -> Encoding {
+ self.encoding
+ }
+
+ /// Get the version of this header's line program.
+ pub fn version(&self) -> u16 {
+ self.encoding.version
+ }
+
+ /// Get the length of the encoded line number program header, not including
+ /// the length of the encoded length itself.
+ pub fn header_length(&self) -> R::Offset {
+ self.header_length
+ }
+
+ /// Get the size in bytes of a target machine address.
+ pub fn address_size(&self) -> u8 {
+ self.encoding.address_size
+ }
+
+ /// Whether this line program is encoded in 64- or 32-bit DWARF.
+ pub fn format(&self) -> Format {
+ self.encoding.format
+ }
+
+ /// Get the line encoding parameters for this header's line program.
+ pub fn line_encoding(&self) -> LineEncoding {
+ self.line_encoding
+ }
+
+ /// Get the minimum instruction length any instruction in this header's line
+ /// program may have.
+ pub fn minimum_instruction_length(&self) -> u8 {
+ self.line_encoding.minimum_instruction_length
+ }
+
+ /// Get the maximum number of operations each instruction in this header's
+ /// line program may have.
+ pub fn maximum_operations_per_instruction(&self) -> u8 {
+ self.line_encoding.maximum_operations_per_instruction
+ }
+
+ /// Get the default value of the `is_stmt` register for this header's line
+ /// program.
+ pub fn default_is_stmt(&self) -> bool {
+ self.line_encoding.default_is_stmt
+ }
+
+ /// Get the line base for this header's line program.
+ pub fn line_base(&self) -> i8 {
+ self.line_encoding.line_base
+ }
+
+ /// Get the line range for this header's line program.
+ pub fn line_range(&self) -> u8 {
+ self.line_encoding.line_range
+ }
+
+ /// Get opcode base for this header's line program.
+ pub fn opcode_base(&self) -> u8 {
+ self.opcode_base
+ }
+
+ /// An array of `u8` that specifies the number of LEB128 operands for
+ /// each of the standard opcodes.
+ pub fn standard_opcode_lengths(&self) -> &R {
+ &self.standard_opcode_lengths
+ }
+
+ /// Get the format of a directory entry.
+ pub fn directory_entry_format(&self) -> &[FileEntryFormat] {
+ &self.directory_entry_format[..]
+ }
+
+ /// Get the set of include directories for this header's line program.
+ ///
+ /// For DWARF version <= 4, the compilation's current directory is not included
+ /// in the return value, but is implicitly considered to be in the set per spec.
+ pub fn include_directories(&self) -> &[AttributeValue<R, Offset>] {
+ &self.include_directories[..]
+ }
+
+ /// The include directory with the given directory index.
+ ///
+ /// A directory index of 0 corresponds to the compilation unit directory.
+ pub fn directory(&self, directory: u64) -> Option<AttributeValue<R, Offset>> {
+ if self.encoding.version <= 4 {
+ if directory == 0 {
+ self.comp_dir.clone().map(AttributeValue::String)
+ } else {
+ let directory = directory as usize - 1;
+ self.include_directories.get(directory).cloned()
+ }
+ } else {
+ self.include_directories.get(directory as usize).cloned()
+ }
+ }
+
+ /// Get the format of a file name entry.
+ pub fn file_name_entry_format(&self) -> &[FileEntryFormat] {
+ &self.file_name_entry_format[..]
+ }
+
+ /// Return true if the file entries may have valid timestamps.
+ ///
+ /// Only returns false if we definitely know that all timestamp fields
+ /// are invalid.
+ pub fn file_has_timestamp(&self) -> bool {
+ self.encoding.version <= 4
+ || self
+ .file_name_entry_format
+ .iter()
+ .any(|x| x.content_type == constants::DW_LNCT_timestamp)
+ }
+
+ /// Return true if the file entries may have valid sizes.
+ ///
+ /// Only returns false if we definitely know that all size fields
+ /// are invalid.
+ pub fn file_has_size(&self) -> bool {
+ self.encoding.version <= 4
+ || self
+ .file_name_entry_format
+ .iter()
+ .any(|x| x.content_type == constants::DW_LNCT_size)
+ }
+
+ /// Return true if the file name entry format contains an MD5 field.
+ pub fn file_has_md5(&self) -> bool {
+ self.file_name_entry_format
+ .iter()
+ .any(|x| x.content_type == constants::DW_LNCT_MD5)
+ }
+
+ /// Get the list of source files that appear in this header's line program.
+ pub fn file_names(&self) -> &[FileEntry<R, Offset>] {
+ &self.file_names[..]
+ }
+
+ /// The source file with the given file index.
+ ///
+ /// A file index of 0 corresponds to the compilation unit file.
+ /// Note that a file index of 0 is invalid for DWARF version <= 4,
+ /// but we support it anyway.
+ pub fn file(&self, file: u64) -> Option<&FileEntry<R, Offset>> {
+ if self.encoding.version <= 4 {
+ if file == 0 {
+ self.comp_file.as_ref()
+ } else {
+ let file = file as usize - 1;
+ self.file_names.get(file)
+ }
+ } else {
+ self.file_names.get(file as usize)
+ }
+ }
+
+ /// Get the raw, un-parsed `EndianSlice` containing this header's line number
+ /// program.
+ ///
+ /// ```
+ /// # fn foo() {
+ /// use gimli::{LineProgramHeader, EndianSlice, NativeEndian};
+ ///
+ /// fn get_line_number_program_header<'a>() -> LineProgramHeader<EndianSlice<'a, NativeEndian>> {
+ /// // Get a line number program header from some offset in a
+ /// // `.debug_line` section...
+ /// # unimplemented!()
+ /// }
+ ///
+ /// let header = get_line_number_program_header();
+ /// let raw_program = header.raw_program_buf();
+ /// println!("The length of the raw program in bytes is {}", raw_program.len());
+ /// # }
+ /// ```
+ pub fn raw_program_buf(&self) -> R {
+ self.program_buf.clone()
+ }
+
+ /// Iterate over the instructions in this header's line number program, parsing
+ /// them as we go.
+ pub fn instructions(&self) -> LineInstructions<R> {
+ LineInstructions {
+ input: self.program_buf.clone(),
+ }
+ }
+
+ fn parse(
+ input: &mut R,
+ offset: DebugLineOffset<Offset>,
+ mut address_size: u8,
+ mut comp_dir: Option<R>,
+ comp_name: Option<R>,
+ ) -> Result<LineProgramHeader<R, Offset>> {
+ let (unit_length, format) = input.read_initial_length()?;
+ let rest = &mut input.split(unit_length)?;
+
+ let version = rest.read_u16()?;
+ if version < 2 || version > 5 {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+
+ if version >= 5 {
+ address_size = rest.read_u8()?;
+ let segment_selector_size = rest.read_u8()?;
+ if segment_selector_size != 0 {
+ return Err(Error::UnsupportedSegmentSize);
+ }
+ }
+
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ let header_length = rest.read_length(format)?;
+
+ let mut program_buf = rest.clone();
+ program_buf.skip(header_length)?;
+ rest.truncate(header_length)?;
+
+ let minimum_instruction_length = rest.read_u8()?;
+ if minimum_instruction_length == 0 {
+ return Err(Error::MinimumInstructionLengthZero);
+ }
+
+ // This field did not exist before DWARF 4, but is specified to be 1 for
+ // non-VLIW architectures, which makes it a no-op.
+ let maximum_operations_per_instruction = if version >= 4 { rest.read_u8()? } else { 1 };
+ if maximum_operations_per_instruction == 0 {
+ return Err(Error::MaximumOperationsPerInstructionZero);
+ }
+
+ let default_is_stmt = rest.read_u8()? != 0;
+ let line_base = rest.read_i8()?;
+ let line_range = rest.read_u8()?;
+ if line_range == 0 {
+ return Err(Error::LineRangeZero);
+ }
+ let line_encoding = LineEncoding {
+ minimum_instruction_length,
+ maximum_operations_per_instruction,
+ default_is_stmt,
+ line_base,
+ line_range,
+ };
+
+ let opcode_base = rest.read_u8()?;
+ if opcode_base == 0 {
+ return Err(Error::OpcodeBaseZero);
+ }
+
+ let standard_opcode_count = R::Offset::from_u8(opcode_base - 1);
+ let standard_opcode_lengths = rest.split(standard_opcode_count)?;
+
+ let directory_entry_format;
+ let mut include_directories = Vec::new();
+ if version <= 4 {
+ directory_entry_format = Vec::new();
+ loop {
+ let directory = rest.read_null_terminated_slice()?;
+ if directory.is_empty() {
+ break;
+ }
+ include_directories.push(AttributeValue::String(directory));
+ }
+ } else {
+ comp_dir = None;
+ directory_entry_format = FileEntryFormat::parse(rest)?;
+ let count = rest.read_uleb128()?;
+ for _ in 0..count {
+ include_directories.push(parse_directory_v5(
+ rest,
+ encoding,
+ &directory_entry_format,
+ )?);
+ }
+ }
+
+ let comp_file;
+ let file_name_entry_format;
+ let mut file_names = Vec::new();
+ if version <= 4 {
+ comp_file = comp_name.map(|name| FileEntry {
+ path_name: AttributeValue::String(name),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ });
+
+ file_name_entry_format = Vec::new();
+ loop {
+ let path_name = rest.read_null_terminated_slice()?;
+ if path_name.is_empty() {
+ break;
+ }
+ file_names.push(FileEntry::parse(rest, path_name)?);
+ }
+ } else {
+ comp_file = None;
+ file_name_entry_format = FileEntryFormat::parse(rest)?;
+ let count = rest.read_uleb128()?;
+ for _ in 0..count {
+ file_names.push(parse_file_v5(rest, encoding, &file_name_entry_format)?);
+ }
+ }
+
+ let header = LineProgramHeader {
+ encoding,
+ offset,
+ unit_length,
+ header_length,
+ line_encoding,
+ opcode_base,
+ standard_opcode_lengths,
+ directory_entry_format,
+ include_directories,
+ file_name_entry_format,
+ file_names,
+ program_buf,
+ comp_dir,
+ comp_file,
+ };
+ Ok(header)
+ }
+}
+
+/// Deprecated. `IncompleteLineNumberProgram` has been renamed to `IncompleteLineProgram`.
+#[deprecated(
+ note = "IncompleteLineNumberProgram has been renamed to IncompleteLineProgram, use that instead."
+)]
+pub type IncompleteLineNumberProgram<R, Offset> = IncompleteLineProgram<R, Offset>;
+
+/// A line number program that has not been run to completion.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct IncompleteLineProgram<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ header: LineProgramHeader<R, Offset>,
+}
+
+impl<R, Offset> IncompleteLineProgram<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Retrieve the `LineProgramHeader` for this program.
+ pub fn header(&self) -> &LineProgramHeader<R, Offset> {
+ &self.header
+ }
+
+ /// Construct a new `LineRows` for executing this program to iterate
+ /// over rows in the line information matrix.
+ pub fn rows(self) -> OneShotLineRows<R, Offset> {
+ OneShotLineRows::new(self)
+ }
+
+ /// Execute the line number program, completing the `IncompleteLineProgram`
+ /// into a `CompleteLineProgram` and producing an array of sequences within
+ /// the line number program that can later be used with
+ /// `CompleteLineProgram::resume_from`.
+ ///
+ /// ```
+ /// # fn foo() {
+ /// use gimli::{IncompleteLineProgram, EndianSlice, NativeEndian};
+ ///
+ /// fn get_line_number_program<'a>() -> IncompleteLineProgram<EndianSlice<'a, NativeEndian>> {
+ /// // Get a line number program from some offset in a
+ /// // `.debug_line` section...
+ /// # unimplemented!()
+ /// }
+ ///
+ /// let program = get_line_number_program();
+ /// let (program, sequences) = program.sequences().unwrap();
+ /// println!("There are {} sequences in this line number program", sequences.len());
+ /// # }
+ /// ```
+ #[allow(clippy::type_complexity)]
+ pub fn sequences(self) -> Result<(CompleteLineProgram<R, Offset>, Vec<LineSequence<R>>)> {
+ let mut sequences = Vec::new();
+ let mut rows = self.rows();
+ let mut instructions = rows.instructions.clone();
+ let mut sequence_start_addr = None;
+ loop {
+ let sequence_end_addr;
+ if rows.next_row()?.is_none() {
+ break;
+ }
+
+ let row = &rows.row;
+ if row.end_sequence() {
+ sequence_end_addr = row.address();
+ } else if sequence_start_addr.is_none() {
+ sequence_start_addr = Some(row.address());
+ continue;
+ } else {
+ continue;
+ }
+
+ // We just finished a sequence.
+ sequences.push(LineSequence {
+ // In theory one could have multiple DW_LNE_end_sequence instructions
+ // in a row.
+ start: sequence_start_addr.unwrap_or(0),
+ end: sequence_end_addr,
+ instructions: instructions.remove_trailing(&rows.instructions)?,
+ });
+ sequence_start_addr = None;
+ instructions = rows.instructions.clone();
+ }
+
+ let program = CompleteLineProgram {
+ header: rows.program.header,
+ };
+ Ok((program, sequences))
+ }
+}
+
+/// Deprecated. `CompleteLineNumberProgram` has been renamed to `CompleteLineProgram`.
+#[deprecated(
+ note = "CompleteLineNumberProgram has been renamed to CompleteLineProgram, use that instead."
+)]
+pub type CompleteLineNumberProgram<R, Offset> = CompleteLineProgram<R, Offset>;
+
+/// A line number program that has previously been run to completion.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct CompleteLineProgram<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ header: LineProgramHeader<R, Offset>,
+}
+
+impl<R, Offset> CompleteLineProgram<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Retrieve the `LineProgramHeader` for this program.
+ pub fn header(&self) -> &LineProgramHeader<R, Offset> {
+ &self.header
+ }
+
+ /// Construct a new `LineRows` for executing the subset of the line
+ /// number program identified by 'sequence' and generating the line information
+ /// matrix.
+ ///
+ /// ```
+ /// # fn foo() {
+ /// use gimli::{IncompleteLineProgram, EndianSlice, NativeEndian};
+ ///
+ /// fn get_line_number_program<'a>() -> IncompleteLineProgram<EndianSlice<'a, NativeEndian>> {
+ /// // Get a line number program from some offset in a
+ /// // `.debug_line` section...
+ /// # unimplemented!()
+ /// }
+ ///
+ /// let program = get_line_number_program();
+ /// let (program, sequences) = program.sequences().unwrap();
+ /// for sequence in &sequences {
+ /// let mut sm = program.resume_from(sequence);
+ /// }
+ /// # }
+ /// ```
+ pub fn resume_from<'program>(
+ &'program self,
+ sequence: &LineSequence<R>,
+ ) -> ResumedLineRows<'program, R, Offset> {
+ ResumedLineRows::resume(self, sequence)
+ }
+}
+
+/// An entry in the `LineProgramHeader`'s `file_names` set.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct FileEntry<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ path_name: AttributeValue<R, Offset>,
+ directory_index: u64,
+ timestamp: u64,
+ size: u64,
+ md5: [u8; 16],
+}
+
+impl<R, Offset> FileEntry<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ // version 2-4
+ fn parse(input: &mut R, path_name: R) -> Result<FileEntry<R, Offset>> {
+ let directory_index = input.read_uleb128()?;
+ let timestamp = input.read_uleb128()?;
+ let size = input.read_uleb128()?;
+
+ let entry = FileEntry {
+ path_name: AttributeValue::String(path_name),
+ directory_index,
+ timestamp,
+ size,
+ md5: [0; 16],
+ };
+
+ Ok(entry)
+ }
+
+ /// > A slice containing the full or relative path name of
+ /// > a source file. If the entry contains a file name or a relative path
+ /// > name, the file is located relative to either the compilation directory
+ /// > (as specified by the DW_AT_comp_dir attribute given in the compilation
+ /// > unit) or one of the directories in the include_directories section.
+ pub fn path_name(&self) -> AttributeValue<R, Offset> {
+ self.path_name.clone()
+ }
+
+ /// > An unsigned LEB128 number representing the directory index of the
+ /// > directory in which the file was found.
+ /// >
+ /// > ...
+ /// >
+ /// > The directory index represents an entry in the include_directories
+ /// > section of the line number program header. The index is 0 if the file
+ /// > was found in the current directory of the compilation, 1 if it was found
+ /// > in the first directory in the include_directories section, and so
+ /// > on. The directory index is ignored for file names that represent full
+ /// > path names.
+ pub fn directory_index(&self) -> u64 {
+ self.directory_index
+ }
+
+ /// Get this file's directory.
+ ///
+ /// A directory index of 0 corresponds to the compilation unit directory.
+ pub fn directory(&self, header: &LineProgramHeader<R>) -> Option<AttributeValue<R, Offset>> {
+ header.directory(self.directory_index)
+ }
+
+ /// The implementation-defined time of last modification of the file,
+ /// or 0 if not available.
+ pub fn timestamp(&self) -> u64 {
+ self.timestamp
+ }
+
+ /// "An unsigned LEB128 number representing the time of last modification of
+ /// the file, or 0 if not available."
+ // Terminology changed in DWARF version 5.
+ #[doc(hidden)]
+ pub fn last_modification(&self) -> u64 {
+ self.timestamp
+ }
+
+ /// The size of the file in bytes, or 0 if not available.
+ pub fn size(&self) -> u64 {
+ self.size
+ }
+
+ /// "An unsigned LEB128 number representing the length in bytes of the file,
+ /// or 0 if not available."
+ // Terminology changed in DWARF version 5.
+ #[doc(hidden)]
+ pub fn length(&self) -> u64 {
+ self.size
+ }
+
+ /// A 16-byte MD5 digest of the file contents.
+ ///
+ /// Only valid if `LineProgramHeader::file_has_md5` returns `true`.
+ pub fn md5(&self) -> &[u8; 16] {
+ &self.md5
+ }
+}
+
+/// The format of a component of an include directory or file name entry.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct FileEntryFormat {
+ /// The type of information that is represented by the component.
+ pub content_type: constants::DwLnct,
+
+ /// The encoding form of the component value.
+ pub form: constants::DwForm,
+}
+
+impl FileEntryFormat {
+ fn parse<R: Reader>(input: &mut R) -> Result<Vec<FileEntryFormat>> {
+ let format_count = input.read_u8()? as usize;
+ let mut format = Vec::with_capacity(format_count);
+ let mut path_count = 0;
+ for _ in 0..format_count {
+ let content_type = input.read_uleb128()?;
+ let content_type = if content_type > u64::from(u16::max_value()) {
+ constants::DwLnct(u16::max_value())
+ } else {
+ constants::DwLnct(content_type as u16)
+ };
+ if content_type == constants::DW_LNCT_path {
+ path_count += 1;
+ }
+
+ let form = constants::DwForm(input.read_uleb128_u16()?);
+
+ format.push(FileEntryFormat { content_type, form });
+ }
+ if path_count != 1 {
+ return Err(Error::MissingFileEntryFormatPath);
+ }
+ Ok(format)
+ }
+}
+
+fn parse_directory_v5<R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ formats: &[FileEntryFormat],
+) -> Result<AttributeValue<R>> {
+ let mut path_name = None;
+
+ for format in formats {
+ let value = parse_attribute(input, encoding, format.form)?;
+ if format.content_type == constants::DW_LNCT_path {
+ path_name = Some(value);
+ }
+ }
+
+ Ok(path_name.unwrap())
+}
+
+fn parse_file_v5<R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ formats: &[FileEntryFormat],
+) -> Result<FileEntry<R>> {
+ let mut path_name = None;
+ let mut directory_index = 0;
+ let mut timestamp = 0;
+ let mut size = 0;
+ let mut md5 = [0; 16];
+
+ for format in formats {
+ let value = parse_attribute(input, encoding, format.form)?;
+ match format.content_type {
+ constants::DW_LNCT_path => path_name = Some(value),
+ constants::DW_LNCT_directory_index => {
+ if let Some(value) = value.udata_value() {
+ directory_index = value;
+ }
+ }
+ constants::DW_LNCT_timestamp => {
+ if let Some(value) = value.udata_value() {
+ timestamp = value;
+ }
+ }
+ constants::DW_LNCT_size => {
+ if let Some(value) = value.udata_value() {
+ size = value;
+ }
+ }
+ constants::DW_LNCT_MD5 => {
+ if let AttributeValue::Block(mut value) = value {
+ if value.len().into_u64() == 16 {
+ md5 = value.read_u8_array()?;
+ }
+ }
+ }
+ // Ignore unknown content types.
+ _ => {}
+ }
+ }
+
+ Ok(FileEntry {
+ path_name: path_name.unwrap(),
+ directory_index,
+ timestamp,
+ size,
+ md5,
+ })
+}
+
+// TODO: this should be shared with unit::parse_attribute(), but that is hard to do.
+fn parse_attribute<R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ form: constants::DwForm,
+) -> Result<AttributeValue<R>> {
+ Ok(match form {
+ constants::DW_FORM_block1 => {
+ let len = input.read_u8().map(R::Offset::from_u8)?;
+ let block = input.split(len)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block2 => {
+ let len = input.read_u16().map(R::Offset::from_u16)?;
+ let block = input.split(len)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block4 => {
+ let len = input.read_u32().map(R::Offset::from_u32)?;
+ let block = input.split(len)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block => {
+ let len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ let block = input.split(len)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_data1 => {
+ let data = input.read_u8()?;
+ AttributeValue::Data1(data)
+ }
+ constants::DW_FORM_data2 => {
+ let data = input.read_u16()?;
+ AttributeValue::Data2(data)
+ }
+ constants::DW_FORM_data4 => {
+ let data = input.read_u32()?;
+ AttributeValue::Data4(data)
+ }
+ constants::DW_FORM_data8 => {
+ let data = input.read_u64()?;
+ AttributeValue::Data8(data)
+ }
+ constants::DW_FORM_data16 => {
+ let block = input.split(R::Offset::from_u8(16))?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_udata => {
+ let data = input.read_uleb128()?;
+ AttributeValue::Udata(data)
+ }
+ constants::DW_FORM_sdata => {
+ let data = input.read_sleb128()?;
+ AttributeValue::Sdata(data)
+ }
+ constants::DW_FORM_flag => {
+ let present = input.read_u8()?;
+ AttributeValue::Flag(present != 0)
+ }
+ constants::DW_FORM_sec_offset => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::SecOffset(offset)
+ }
+ constants::DW_FORM_string => {
+ let string = input.read_null_terminated_slice()?;
+ AttributeValue::String(string)
+ }
+ constants::DW_FORM_strp => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugStrRef(DebugStrOffset(offset))
+ }
+ constants::DW_FORM_strp_sup | constants::DW_FORM_GNU_strp_alt => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugStrRefSup(DebugStrOffset(offset))
+ }
+ constants::DW_FORM_line_strp => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugLineStrRef(DebugLineStrOffset(offset))
+ }
+ constants::DW_FORM_strx | constants::DW_FORM_GNU_str_index => {
+ let index = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx1 => {
+ let index = input.read_u8().map(R::Offset::from_u8)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx2 => {
+ let index = input.read_u16().map(R::Offset::from_u16)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx3 => {
+ let index = input.read_uint(3).and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx4 => {
+ let index = input.read_u32().map(R::Offset::from_u32)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ _ => {
+ return Err(Error::UnknownForm);
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::constants;
+ use crate::endianity::LittleEndian;
+ use crate::read::{EndianSlice, Error};
+ use crate::test_util::GimliSectionMethods;
+ use core::u64;
+ use core::u8;
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ #[test]
+ fn test_parse_debug_line_32_ok() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 62.
+ 0x3e, 0x00, 0x00, 0x00,
+ // Version.
+ 0x04, 0x00,
+ // Header length = 40.
+ 0x28, 0x00, 0x00, 0x00,
+ // Minimum instruction length.
+ 0x01,
+ // Maximum operations per byte.
+ 0x01,
+ // Default is_stmt.
+ 0x01,
+ // Line base.
+ 0x00,
+ // Line range.
+ 0x01,
+ // Opcode base.
+ 0x03,
+ // Standard opcode lengths for opcodes 1 .. opcode base - 1.
+ 0x01, 0x02,
+ // Include directories = '/', 'i', 'n', 'c', '\0', '/', 'i', 'n', 'c', '2', '\0', '\0'
+ 0x2f, 0x69, 0x6e, 0x63, 0x00, 0x2f, 0x69, 0x6e, 0x63, 0x32, 0x00, 0x00,
+ // File names
+ // foo.rs
+ 0x66, 0x6f, 0x6f, 0x2e, 0x72, 0x73, 0x00,
+ 0x00,
+ 0x00,
+ 0x00,
+ // bar.h
+ 0x62, 0x61, 0x72, 0x2e, 0x68, 0x00,
+ 0x01,
+ 0x00,
+ 0x00,
+ // End file names.
+ 0x00,
+
+ // Dummy line program data.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next line program.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+ let comp_dir = EndianSlice::new(b"/comp_dir", LittleEndian);
+ let comp_name = EndianSlice::new(b"/comp_name", LittleEndian);
+
+ let header =
+ LineProgramHeader::parse(rest, DebugLineOffset(0), 4, Some(comp_dir), Some(comp_name))
+ .expect("should parse header ok");
+
+ assert_eq!(
+ *rest,
+ EndianSlice::new(&buf[buf.len() - 16..], LittleEndian)
+ );
+
+ assert_eq!(header.offset, DebugLineOffset(0));
+ assert_eq!(header.version(), 4);
+ assert_eq!(header.minimum_instruction_length(), 1);
+ assert_eq!(header.maximum_operations_per_instruction(), 1);
+ assert_eq!(header.default_is_stmt(), true);
+ assert_eq!(header.line_base(), 0);
+ assert_eq!(header.line_range(), 1);
+ assert_eq!(header.opcode_base(), 3);
+ assert_eq!(header.directory(0), Some(AttributeValue::String(comp_dir)));
+ assert_eq!(
+ header.file(0).unwrap().path_name,
+ AttributeValue::String(comp_name)
+ );
+
+ let expected_lengths = [1, 2];
+ assert_eq!(header.standard_opcode_lengths().slice(), &expected_lengths);
+
+ let expected_include_directories = [
+ AttributeValue::String(EndianSlice::new(b"/inc", LittleEndian)),
+ AttributeValue::String(EndianSlice::new(b"/inc2", LittleEndian)),
+ ];
+ assert_eq!(header.include_directories(), &expected_include_directories);
+
+ let expected_file_names = [
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"foo.rs", LittleEndian)),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ },
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"bar.h", LittleEndian)),
+ directory_index: 1,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ },
+ ];
+ assert_eq!(&*header.file_names(), &expected_file_names);
+ }
+
+ #[test]
+ fn test_parse_debug_line_header_length_too_short() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 62.
+ 0x3e, 0x00, 0x00, 0x00,
+ // Version.
+ 0x04, 0x00,
+ // Header length = 20. TOO SHORT!!!
+ 0x15, 0x00, 0x00, 0x00,
+ // Minimum instruction length.
+ 0x01,
+ // Maximum operations per byte.
+ 0x01,
+ // Default is_stmt.
+ 0x01,
+ // Line base.
+ 0x00,
+ // Line range.
+ 0x01,
+ // Opcode base.
+ 0x03,
+ // Standard opcode lengths for opcodes 1 .. opcode base - 1.
+ 0x01, 0x02,
+ // Include directories = '/', 'i', 'n', 'c', '\0', '/', 'i', 'n', 'c', '2', '\0', '\0'
+ 0x2f, 0x69, 0x6e, 0x63, 0x00, 0x2f, 0x69, 0x6e, 0x63, 0x32, 0x00, 0x00,
+ // File names
+ // foo.rs
+ 0x66, 0x6f, 0x6f, 0x2e, 0x72, 0x73, 0x00,
+ 0x00,
+ 0x00,
+ 0x00,
+ // bar.h
+ 0x62, 0x61, 0x72, 0x2e, 0x68, 0x00,
+ 0x01,
+ 0x00,
+ 0x00,
+ // End file names.
+ 0x00,
+
+ // Dummy line program data.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next line program.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match LineProgramHeader::parse(input, DebugLineOffset(0), 4, None, None) {
+ Err(Error::UnexpectedEof(_)) => return,
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_parse_debug_line_unit_length_too_short() {
+ #[rustfmt::skip]
+ let buf = [
+ // 32-bit length = 40. TOO SHORT!!!
+ 0x28, 0x00, 0x00, 0x00,
+ // Version.
+ 0x04, 0x00,
+ // Header length = 40.
+ 0x28, 0x00, 0x00, 0x00,
+ // Minimum instruction length.
+ 0x01,
+ // Maximum operations per byte.
+ 0x01,
+ // Default is_stmt.
+ 0x01,
+ // Line base.
+ 0x00,
+ // Line range.
+ 0x01,
+ // Opcode base.
+ 0x03,
+ // Standard opcode lengths for opcodes 1 .. opcode base - 1.
+ 0x01, 0x02,
+ // Include directories = '/', 'i', 'n', 'c', '\0', '/', 'i', 'n', 'c', '2', '\0', '\0'
+ 0x2f, 0x69, 0x6e, 0x63, 0x00, 0x2f, 0x69, 0x6e, 0x63, 0x32, 0x00, 0x00,
+ // File names
+ // foo.rs
+ 0x66, 0x6f, 0x6f, 0x2e, 0x72, 0x73, 0x00,
+ 0x00,
+ 0x00,
+ 0x00,
+ // bar.h
+ 0x62, 0x61, 0x72, 0x2e, 0x68, 0x00,
+ 0x01,
+ 0x00,
+ 0x00,
+ // End file names.
+ 0x00,
+
+ // Dummy line program data.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+
+ // Dummy next line program.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ ];
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match LineProgramHeader::parse(input, DebugLineOffset(0), 4, None, None) {
+ Err(Error::UnexpectedEof(_)) => return,
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ const OPCODE_BASE: u8 = 13;
+ const STANDARD_OPCODE_LENGTHS: &[u8] = &[0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1];
+
+ fn make_test_header(
+ buf: EndianSlice<LittleEndian>,
+ ) -> LineProgramHeader<EndianSlice<LittleEndian>> {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 8,
+ };
+ let line_encoding = LineEncoding {
+ line_base: -3,
+ line_range: 12,
+ ..Default::default()
+ };
+ LineProgramHeader {
+ encoding,
+ offset: DebugLineOffset(0),
+ unit_length: 1,
+ header_length: 1,
+ line_encoding,
+ opcode_base: OPCODE_BASE,
+ standard_opcode_lengths: EndianSlice::new(STANDARD_OPCODE_LENGTHS, LittleEndian),
+ file_names: vec![
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"foo.c", LittleEndian)),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ },
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"bar.rs", LittleEndian)),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ },
+ ],
+ include_directories: vec![],
+ directory_entry_format: vec![],
+ file_name_entry_format: vec![],
+ program_buf: buf,
+ comp_dir: None,
+ comp_file: None,
+ }
+ }
+
+ fn make_test_program(
+ buf: EndianSlice<LittleEndian>,
+ ) -> IncompleteLineProgram<EndianSlice<LittleEndian>> {
+ IncompleteLineProgram {
+ header: make_test_header(buf),
+ }
+ }
+
+ #[test]
+ fn test_parse_special_opcodes() {
+ for i in OPCODE_BASE..u8::MAX {
+ let input = [i, 0, 0, 0];
+ let input = EndianSlice::new(&input, LittleEndian);
+ let header = make_test_header(input);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(*rest, *input.range_from(1..));
+ assert_eq!(opcode, LineInstruction::Special(i));
+ }
+ }
+
+ #[test]
+ fn test_parse_standard_opcodes() {
+ fn test<Operands>(
+ raw: constants::DwLns,
+ operands: Operands,
+ expected: LineInstruction<EndianSlice<LittleEndian>>,
+ ) where
+ Operands: AsRef<[u8]>,
+ {
+ let mut input = Vec::new();
+ input.push(raw.0);
+ input.extend_from_slice(operands.as_ref());
+
+ let expected_rest = [0, 1, 2, 3, 4];
+ input.extend_from_slice(&expected_rest);
+
+ let input = EndianSlice::new(&*input, LittleEndian);
+ let header = make_test_header(input);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(opcode, expected);
+ assert_eq!(*rest, expected_rest);
+ }
+
+ test(constants::DW_LNS_copy, [], LineInstruction::Copy);
+ test(
+ constants::DW_LNS_advance_pc,
+ [42],
+ LineInstruction::AdvancePc(42),
+ );
+ test(
+ constants::DW_LNS_advance_line,
+ [9],
+ LineInstruction::AdvanceLine(9),
+ );
+ test(constants::DW_LNS_set_file, [7], LineInstruction::SetFile(7));
+ test(
+ constants::DW_LNS_set_column,
+ [1],
+ LineInstruction::SetColumn(1),
+ );
+ test(
+ constants::DW_LNS_negate_stmt,
+ [],
+ LineInstruction::NegateStatement,
+ );
+ test(
+ constants::DW_LNS_set_basic_block,
+ [],
+ LineInstruction::SetBasicBlock,
+ );
+ test(
+ constants::DW_LNS_const_add_pc,
+ [],
+ LineInstruction::ConstAddPc,
+ );
+ test(
+ constants::DW_LNS_fixed_advance_pc,
+ [42, 0],
+ LineInstruction::FixedAddPc(42),
+ );
+ test(
+ constants::DW_LNS_set_prologue_end,
+ [],
+ LineInstruction::SetPrologueEnd,
+ );
+ test(
+ constants::DW_LNS_set_isa,
+ [57 + 0x80, 100],
+ LineInstruction::SetIsa(12857),
+ );
+ }
+
+ #[test]
+ fn test_parse_unknown_standard_opcode_no_args() {
+ let input = [OPCODE_BASE, 1, 2, 3];
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut standard_opcode_lengths = Vec::new();
+ let mut header = make_test_header(input);
+ standard_opcode_lengths.extend(header.standard_opcode_lengths.slice());
+ standard_opcode_lengths.push(0);
+ header.opcode_base += 1;
+ header.standard_opcode_lengths = EndianSlice::new(&standard_opcode_lengths, LittleEndian);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(
+ opcode,
+ LineInstruction::UnknownStandard0(constants::DwLns(OPCODE_BASE))
+ );
+ assert_eq!(*rest, *input.range_from(1..));
+ }
+
+ #[test]
+ fn test_parse_unknown_standard_opcode_one_arg() {
+ let input = [OPCODE_BASE, 1, 2, 3];
+ let input = EndianSlice::new(&input, LittleEndian);
+ let mut standard_opcode_lengths = Vec::new();
+ let mut header = make_test_header(input);
+ standard_opcode_lengths.extend(header.standard_opcode_lengths.slice());
+ standard_opcode_lengths.push(1);
+ header.opcode_base += 1;
+ header.standard_opcode_lengths = EndianSlice::new(&standard_opcode_lengths, LittleEndian);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(
+ opcode,
+ LineInstruction::UnknownStandard1(constants::DwLns(OPCODE_BASE), 1)
+ );
+ assert_eq!(*rest, *input.range_from(2..));
+ }
+
+ #[test]
+ fn test_parse_unknown_standard_opcode_many_args() {
+ let input = [OPCODE_BASE, 1, 2, 3];
+ let input = EndianSlice::new(&input, LittleEndian);
+ let args = EndianSlice::new(&input[1..], LittleEndian);
+ let mut standard_opcode_lengths = Vec::new();
+ let mut header = make_test_header(input);
+ standard_opcode_lengths.extend(header.standard_opcode_lengths.slice());
+ standard_opcode_lengths.push(3);
+ header.opcode_base += 1;
+ header.standard_opcode_lengths = EndianSlice::new(&standard_opcode_lengths, LittleEndian);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(
+ opcode,
+ LineInstruction::UnknownStandardN(constants::DwLns(OPCODE_BASE), args)
+ );
+ assert_eq!(*rest, []);
+ }
+
+ #[test]
+ fn test_parse_extended_opcodes() {
+ fn test<Operands>(
+ raw: constants::DwLne,
+ operands: Operands,
+ expected: LineInstruction<EndianSlice<LittleEndian>>,
+ ) where
+ Operands: AsRef<[u8]>,
+ {
+ let mut input = Vec::new();
+ input.push(0);
+
+ let operands = operands.as_ref();
+ input.push(1 + operands.len() as u8);
+
+ input.push(raw.0);
+ input.extend_from_slice(operands);
+
+ let expected_rest = [0, 1, 2, 3, 4];
+ input.extend_from_slice(&expected_rest);
+
+ let input = EndianSlice::new(&input, LittleEndian);
+ let header = make_test_header(input);
+
+ let mut rest = input;
+ let opcode =
+ LineInstruction::parse(&header, &mut rest).expect("Should parse the opcode OK");
+
+ assert_eq!(opcode, expected);
+ assert_eq!(*rest, expected_rest);
+ }
+
+ test(
+ constants::DW_LNE_end_sequence,
+ [],
+ LineInstruction::EndSequence,
+ );
+ test(
+ constants::DW_LNE_set_address,
+ [1, 2, 3, 4, 5, 6, 7, 8],
+ LineInstruction::SetAddress(578_437_695_752_307_201),
+ );
+ test(
+ constants::DW_LNE_set_discriminator,
+ [42],
+ LineInstruction::SetDiscriminator(42),
+ );
+
+ let mut file = Vec::new();
+ // "foo.c"
+ let path_name = [b'f', b'o', b'o', b'.', b'c', 0];
+ file.extend_from_slice(&path_name);
+ // Directory index.
+ file.push(0);
+ // Last modification of file.
+ file.push(1);
+ // Size of file.
+ file.push(2);
+
+ test(
+ constants::DW_LNE_define_file,
+ file,
+ LineInstruction::DefineFile(FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"foo.c", LittleEndian)),
+ directory_index: 0,
+ timestamp: 1,
+ size: 2,
+ md5: [0; 16],
+ }),
+ );
+
+ // Unknown extended opcode.
+ let operands = [1, 2, 3, 4, 5, 6];
+ let opcode = constants::DwLne(99);
+ test(
+ opcode,
+ operands,
+ LineInstruction::UnknownExtended(opcode, EndianSlice::new(&operands, LittleEndian)),
+ );
+ }
+
+ #[test]
+ fn test_file_entry_directory() {
+ let path_name = [b'f', b'o', b'o', b'.', b'r', b's', 0];
+
+ let mut file = FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(&path_name, LittleEndian)),
+ directory_index: 1,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ };
+
+ let mut header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let dir = AttributeValue::String(EndianSlice::new(b"dir", LittleEndian));
+ header.include_directories.push(dir);
+
+ assert_eq!(file.directory(&header), Some(dir));
+
+ // Now test the compilation's current directory.
+ file.directory_index = 0;
+ assert_eq!(file.directory(&header), None);
+ }
+
+ fn assert_exec_opcode<'input>(
+ header: LineProgramHeader<EndianSlice<'input, LittleEndian>>,
+ mut registers: LineRow,
+ opcode: LineInstruction<EndianSlice<'input, LittleEndian>>,
+ expected_registers: LineRow,
+ expect_new_row: bool,
+ ) {
+ let mut program = IncompleteLineProgram { header };
+ let is_new_row = registers.execute(opcode, &mut program);
+
+ assert_eq!(is_new_row, expect_new_row);
+ assert_eq!(registers, expected_registers);
+ }
+
+ #[test]
+ fn test_exec_special_noop() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::Special(16);
+ let expected_registers = initial_registers;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_negative_line_advance() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.line.0 = 10;
+
+ let opcode = LineInstruction::Special(13);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.line.0 -= 3;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_positive_line_advance() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let initial_registers = LineRow::new(&header);
+
+ let opcode = LineInstruction::Special(19);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.line.0 += 3;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_positive_address_advance() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let initial_registers = LineRow::new(&header);
+
+ let opcode = LineInstruction::Special(52);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 3;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_positive_address_and_line_advance() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let initial_registers = LineRow::new(&header);
+
+ let opcode = LineInstruction::Special(55);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 3;
+ expected_registers.line.0 += 3;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_positive_address_and_negative_line_advance() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.line.0 = 10;
+
+ let opcode = LineInstruction::Special(49);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 3;
+ expected_registers.line.0 -= 3;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_special_line_underflow() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.line.0 = 2;
+
+ // -3 line advance.
+ let opcode = LineInstruction::Special(13);
+
+ let mut expected_registers = initial_registers;
+ // Clamp at 0. No idea if this is the best way to handle this situation
+ // or not...
+ expected_registers.line.0 = 0;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_copy() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.address.0 = 1337;
+ initial_registers.line.0 = 42;
+
+ let opcode = LineInstruction::Copy;
+
+ let expected_registers = initial_registers;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_advance_pc() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::AdvancePc(42);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 42;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_advance_pc_overflow() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let opcode = LineInstruction::AdvancePc(42);
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.address.0 = u64::MAX;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 = 41;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_advance_line() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::AdvanceLine(42);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.line.0 += 42;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_advance_line_overflow() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let opcode = LineInstruction::AdvanceLine(42);
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.line.0 = u64::MAX;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.line.0 = 41;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_set_file_in_bounds() {
+ for file_idx in 1..3 {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetFile(file_idx);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.file = file_idx;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+ }
+
+ #[test]
+ fn test_exec_set_file_out_of_bounds() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetFile(100);
+
+ // The spec doesn't say anything about rejecting input programs
+ // that set the file register out of bounds of the actual number
+ // of files that have been defined. Instead, we cross our
+ // fingers and hope that one gets defined before
+ // `LineRow::file` gets called and handle the error at
+ // that time if need be.
+ let mut expected_registers = initial_registers;
+ expected_registers.file = 100;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_file_entry_file_index_out_of_bounds() {
+ // These indices are 1-based, so 0 is invalid. 100 is way more than the
+ // number of files defined in the header.
+ let out_of_bounds_indices = [0, 100];
+
+ for file_idx in &out_of_bounds_indices[..] {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let mut row = LineRow::new(&header);
+
+ row.file = *file_idx;
+
+ assert_eq!(row.file(&header), None);
+ }
+ }
+
+ #[test]
+ fn test_file_entry_file_index_in_bounds() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let mut row = LineRow::new(&header);
+
+ row.file = 2;
+
+ assert_eq!(row.file(&header), Some(&header.file_names()[1]));
+ }
+
+ #[test]
+ fn test_exec_set_column() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetColumn(42);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.column = 42;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_negate_statement() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::NegateStatement;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.is_stmt = !initial_registers.is_stmt;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_set_basic_block() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.basic_block = false;
+
+ let opcode = LineInstruction::SetBasicBlock;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.basic_block = true;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_const_add_pc() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::ConstAddPc;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 20;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_fixed_add_pc() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.op_index.0 = 1;
+
+ let opcode = LineInstruction::FixedAddPc(10);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 += 10;
+ expected_registers.op_index.0 = 0;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_set_prologue_end() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+
+ let mut initial_registers = LineRow::new(&header);
+ initial_registers.prologue_end = false;
+
+ let opcode = LineInstruction::SetPrologueEnd;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.prologue_end = true;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_set_isa() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetIsa(1993);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.isa = 1993;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_unknown_standard_0() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::UnknownStandard0(constants::DwLns(111));
+ let expected_registers = initial_registers;
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_unknown_standard_1() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::UnknownStandard1(constants::DwLns(111), 2);
+ let expected_registers = initial_registers;
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_unknown_standard_n() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::UnknownStandardN(
+ constants::DwLns(111),
+ EndianSlice::new(&[2, 2, 2], LittleEndian),
+ );
+ let expected_registers = initial_registers;
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_end_sequence() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::EndSequence;
+
+ let mut expected_registers = initial_registers;
+ expected_registers.end_sequence = true;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, true);
+ }
+
+ #[test]
+ fn test_exec_set_address() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetAddress(3030);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.address.0 = 3030;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_define_file() {
+ let mut program = make_test_program(EndianSlice::new(&[], LittleEndian));
+ let mut row = LineRow::new(program.header());
+
+ let file = FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"test.cpp", LittleEndian)),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [0; 16],
+ };
+
+ let opcode = LineInstruction::DefineFile(file);
+ let is_new_row = row.execute(opcode, &mut program);
+
+ assert_eq!(is_new_row, false);
+ assert_eq!(Some(&file), program.header().file_names.last());
+ }
+
+ #[test]
+ fn test_exec_set_discriminator() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::SetDiscriminator(9);
+
+ let mut expected_registers = initial_registers;
+ expected_registers.discriminator = 9;
+
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ #[test]
+ fn test_exec_unknown_extended() {
+ let header = make_test_header(EndianSlice::new(&[], LittleEndian));
+ let initial_registers = LineRow::new(&header);
+ let opcode = LineInstruction::UnknownExtended(
+ constants::DwLne(74),
+ EndianSlice::new(&[], LittleEndian),
+ );
+ let expected_registers = initial_registers;
+ assert_exec_opcode(header, initial_registers, opcode, expected_registers, false);
+ }
+
+ /// Ensure that `LineRows<R,P>` is covariant wrt R.
+ /// This only needs to compile.
+ #[allow(dead_code, unreachable_code, unused_variables)]
+ fn test_line_rows_variance<'a, 'b>(_: &'a [u8], _: &'b [u8])
+ where
+ 'a: 'b,
+ {
+ let a: &OneShotLineRows<EndianSlice<'a, LittleEndian>> = unimplemented!();
+ let _: &OneShotLineRows<EndianSlice<'b, LittleEndian>> = a;
+ }
+
+ #[test]
+ fn test_parse_debug_line_v5_ok() {
+ let expected_lengths = &[1, 2];
+ let expected_program = &[0, 1, 2, 3, 4];
+ let expected_rest = &[5, 6, 7, 8, 9];
+ let expected_include_directories = [
+ AttributeValue::String(EndianSlice::new(b"dir1", LittleEndian)),
+ AttributeValue::String(EndianSlice::new(b"dir2", LittleEndian)),
+ ];
+ let expected_file_names = [
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"file1", LittleEndian)),
+ directory_index: 0,
+ timestamp: 0,
+ size: 0,
+ md5: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
+ },
+ FileEntry {
+ path_name: AttributeValue::String(EndianSlice::new(b"file2", LittleEndian)),
+ directory_index: 1,
+ timestamp: 0,
+ size: 0,
+ md5: [
+ 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ ],
+ },
+ ];
+
+ for format in vec![Format::Dwarf32, Format::Dwarf64] {
+ let length = Label::new();
+ let header_length = Label::new();
+ let start = Label::new();
+ let header_start = Label::new();
+ let end = Label::new();
+ let header_end = Label::new();
+ let section = Section::with_endian(Endian::Little)
+ .initial_length(format, &length, &start)
+ .D16(5)
+ // Address size.
+ .D8(4)
+ // Segment selector size.
+ .D8(0)
+ .word_label(format.word_size(), &header_length)
+ .mark(&header_start)
+ // Minimum instruction length.
+ .D8(1)
+ // Maximum operations per byte.
+ .D8(1)
+ // Default is_stmt.
+ .D8(1)
+ // Line base.
+ .D8(0)
+ // Line range.
+ .D8(1)
+ // Opcode base.
+ .D8(expected_lengths.len() as u8 + 1)
+ // Standard opcode lengths for opcodes 1 .. opcode base - 1.
+ .append_bytes(expected_lengths)
+ // Directory entry format count.
+ .D8(1)
+ .uleb(constants::DW_LNCT_path.0 as u64)
+ .uleb(constants::DW_FORM_string.0 as u64)
+ // Directory count.
+ .D8(2)
+ .append_bytes(b"dir1\0")
+ .append_bytes(b"dir2\0")
+ // File entry format count.
+ .D8(3)
+ .uleb(constants::DW_LNCT_path.0 as u64)
+ .uleb(constants::DW_FORM_string.0 as u64)
+ .uleb(constants::DW_LNCT_directory_index.0 as u64)
+ .uleb(constants::DW_FORM_data1.0 as u64)
+ .uleb(constants::DW_LNCT_MD5.0 as u64)
+ .uleb(constants::DW_FORM_data16.0 as u64)
+ // File count.
+ .D8(2)
+ .append_bytes(b"file1\0")
+ .D8(0)
+ .append_bytes(&expected_file_names[0].md5)
+ .append_bytes(b"file2\0")
+ .D8(1)
+ .append_bytes(&expected_file_names[1].md5)
+ .mark(&header_end)
+ // Dummy line program data.
+ .append_bytes(expected_program)
+ .mark(&end)
+ // Dummy trailing data.
+ .append_bytes(expected_rest);
+ length.set_const((&end - &start) as u64);
+ header_length.set_const((&header_end - &header_start) as u64);
+ let section = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&section, LittleEndian);
+
+ let header = LineProgramHeader::parse(input, DebugLineOffset(0), 0, None, None)
+ .expect("should parse header ok");
+
+ assert_eq!(header.raw_program_buf().slice(), expected_program);
+ assert_eq!(input.slice(), expected_rest);
+
+ assert_eq!(header.offset, DebugLineOffset(0));
+ assert_eq!(header.version(), 5);
+ assert_eq!(header.address_size(), 4);
+ assert_eq!(header.minimum_instruction_length(), 1);
+ assert_eq!(header.maximum_operations_per_instruction(), 1);
+ assert_eq!(header.default_is_stmt(), true);
+ assert_eq!(header.line_base(), 0);
+ assert_eq!(header.line_range(), 1);
+ assert_eq!(header.opcode_base(), expected_lengths.len() as u8 + 1);
+ assert_eq!(header.standard_opcode_lengths().slice(), expected_lengths);
+ assert_eq!(
+ header.directory_entry_format(),
+ &[FileEntryFormat {
+ content_type: constants::DW_LNCT_path,
+ form: constants::DW_FORM_string,
+ }]
+ );
+ assert_eq!(header.include_directories(), expected_include_directories);
+ assert_eq!(header.directory(0), Some(expected_include_directories[0]));
+ assert_eq!(
+ header.file_name_entry_format(),
+ &[
+ FileEntryFormat {
+ content_type: constants::DW_LNCT_path,
+ form: constants::DW_FORM_string,
+ },
+ FileEntryFormat {
+ content_type: constants::DW_LNCT_directory_index,
+ form: constants::DW_FORM_data1,
+ },
+ FileEntryFormat {
+ content_type: constants::DW_LNCT_MD5,
+ form: constants::DW_FORM_data16,
+ }
+ ]
+ );
+ assert_eq!(header.file_names(), expected_file_names);
+ assert_eq!(header.file(0), Some(&expected_file_names[0]));
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/lists.rs b/vendor/gimli-0.26.2/src/read/lists.rs
new file mode 100644
index 000000000..898a757d3
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/lists.rs
@@ -0,0 +1,68 @@
+use crate::common::{Encoding, Format};
+use crate::read::{Error, Reader, Result};
+
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct ListsHeader {
+ encoding: Encoding,
+ #[allow(dead_code)]
+ offset_entry_count: u32,
+}
+
+impl Default for ListsHeader {
+ fn default() -> Self {
+ ListsHeader {
+ encoding: Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 0,
+ },
+ offset_entry_count: 0,
+ }
+ }
+}
+
+impl ListsHeader {
+ /// Return the serialized size of the table header.
+ #[allow(dead_code)]
+ #[inline]
+ fn size(self) -> u8 {
+ // initial_length + version + address_size + segment_selector_size + offset_entry_count
+ ListsHeader::size_for_encoding(self.encoding)
+ }
+
+ /// Return the serialized size of the table header.
+ #[inline]
+ pub(crate) fn size_for_encoding(encoding: Encoding) -> u8 {
+ // initial_length + version + address_size + segment_selector_size + offset_entry_count
+ encoding.format.initial_length_size() + 2 + 1 + 1 + 4
+ }
+}
+
+// TODO: add an iterator over headers in the appropriate sections section
+#[allow(dead_code)]
+fn parse_header<R: Reader>(input: &mut R) -> Result<ListsHeader> {
+ let (length, format) = input.read_initial_length()?;
+ input.truncate(length)?;
+
+ let version = input.read_u16()?;
+ if version != 5 {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+
+ let address_size = input.read_u8()?;
+ let segment_selector_size = input.read_u8()?;
+ if segment_selector_size != 0 {
+ return Err(Error::UnsupportedSegmentSize);
+ }
+ let offset_entry_count = input.read_u32()?;
+
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ Ok(ListsHeader {
+ encoding,
+ offset_entry_count,
+ })
+}
diff --git a/vendor/gimli-0.26.2/src/read/loclists.rs b/vendor/gimli-0.26.2/src/read/loclists.rs
new file mode 100644
index 000000000..3902c181b
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/loclists.rs
@@ -0,0 +1,1514 @@
+use crate::common::{
+ DebugAddrBase, DebugAddrIndex, DebugLocListsBase, DebugLocListsIndex, DwarfFileType, Encoding,
+ LocationListsOffset, SectionId,
+};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::{
+ lists::ListsHeader, DebugAddr, EndianSlice, Error, Expression, Range, RawRange, Reader,
+ ReaderOffset, ReaderOffsetId, Result, Section,
+};
+
+/// The raw contents of the `.debug_loc` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugLoc<R> {
+ pub(crate) section: R,
+}
+
+impl<'input, Endian> DebugLoc<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugLoc` instance from the data in the `.debug_loc`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_loc` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugLoc, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_loc_section_somehow = || &buf;
+ /// let debug_loc = DebugLoc::new(read_debug_loc_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugLoc<R> {
+ fn id() -> SectionId {
+ SectionId::DebugLoc
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugLoc<R> {
+ fn from(section: R) -> Self {
+ DebugLoc { section }
+ }
+}
+
+/// The `DebugLocLists` struct represents the DWARF data
+/// found in the `.debug_loclists` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugLocLists<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugLocLists<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugLocLists` instance from the data in the `.debug_loclists`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_loclists` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugLocLists, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_loclists_section_somehow = || &buf;
+ /// let debug_loclists = DebugLocLists::new(read_debug_loclists_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugLocLists<R> {
+ fn id() -> SectionId {
+ SectionId::DebugLocLists
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugLocLists<R> {
+ fn from(section: R) -> Self {
+ DebugLocLists { section }
+ }
+}
+
+pub(crate) type LocListsHeader = ListsHeader;
+
+impl<Offset> DebugLocListsBase<Offset>
+where
+ Offset: ReaderOffset,
+{
+ /// Returns a `DebugLocListsBase` with the default value of DW_AT_loclists_base
+ /// for the given `Encoding` and `DwarfFileType`.
+ pub fn default_for_encoding_and_file(
+ encoding: Encoding,
+ file_type: DwarfFileType,
+ ) -> DebugLocListsBase<Offset> {
+ if encoding.version >= 5 && file_type == DwarfFileType::Dwo {
+ // In .dwo files, the compiler omits the DW_AT_loclists_base attribute (because there is
+ // only a single unit in the file) but we must skip past the header, which the attribute
+ // would normally do for us.
+ DebugLocListsBase(Offset::from_u8(LocListsHeader::size_for_encoding(encoding)))
+ } else {
+ DebugLocListsBase(Offset::from_u8(0))
+ }
+ }
+}
+
+/// The DWARF data found in `.debug_loc` and `.debug_loclists` sections.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct LocationLists<R> {
+ debug_loc: DebugLoc<R>,
+ debug_loclists: DebugLocLists<R>,
+}
+
+impl<R> LocationLists<R> {
+ /// Construct a new `LocationLists` instance from the data in the `.debug_loc` and
+ /// `.debug_loclists` sections.
+ pub fn new(debug_loc: DebugLoc<R>, debug_loclists: DebugLocLists<R>) -> LocationLists<R> {
+ LocationLists {
+ debug_loc,
+ debug_loclists,
+ }
+ }
+}
+
+impl<T> LocationLists<T> {
+ /// Create a `LocationLists` that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::LocationLists<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> LocationLists<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ LocationLists {
+ debug_loc: borrow(&self.debug_loc.section).into(),
+ debug_loclists: borrow(&self.debug_loclists.section).into(),
+ }
+ }
+}
+
+impl<R: Reader> LocationLists<R> {
+ /// Iterate over the `LocationListEntry`s starting at the given offset.
+ ///
+ /// The `unit_encoding` must match the compilation unit that the
+ /// offset was contained in.
+ ///
+ /// The `base_address` should be obtained from the `DW_AT_low_pc` attribute in the
+ /// `DW_TAG_compile_unit` entry for the compilation unit that contains this location
+ /// list.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn locations(
+ &self,
+ offset: LocationListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ base_address: u64,
+ debug_addr: &DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+ ) -> Result<LocListIter<R>> {
+ Ok(LocListIter::new(
+ self.raw_locations(offset, unit_encoding)?,
+ base_address,
+ debug_addr.clone(),
+ debug_addr_base,
+ ))
+ }
+
+ /// Similar to `locations`, but with special handling for .dwo files.
+ /// This should only been used when this `LocationLists` was loaded from a
+ /// .dwo file.
+ pub fn locations_dwo(
+ &self,
+ offset: LocationListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ base_address: u64,
+ debug_addr: &DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+ ) -> Result<LocListIter<R>> {
+ Ok(LocListIter::new(
+ self.raw_locations_dwo(offset, unit_encoding)?,
+ base_address,
+ debug_addr.clone(),
+ debug_addr_base,
+ ))
+ }
+
+ /// Iterate over the raw `LocationListEntry`s starting at the given offset.
+ ///
+ /// The `unit_encoding` must match the compilation unit that the
+ /// offset was contained in.
+ ///
+ /// This iterator does not perform any processing of the location entries,
+ /// such as handling base addresses.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn raw_locations(
+ &self,
+ offset: LocationListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ ) -> Result<RawLocListIter<R>> {
+ let (mut input, format) = if unit_encoding.version <= 4 {
+ (self.debug_loc.section.clone(), LocListsFormat::Bare)
+ } else {
+ (self.debug_loclists.section.clone(), LocListsFormat::LLE)
+ };
+ input.skip(offset.0)?;
+ Ok(RawLocListIter::new(input, unit_encoding, format))
+ }
+
+ /// Similar to `raw_locations`, but with special handling for .dwo files.
+ /// This should only been used when this `LocationLists` was loaded from a
+ /// .dwo file.
+ pub fn raw_locations_dwo(
+ &self,
+ offset: LocationListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ ) -> Result<RawLocListIter<R>> {
+ let mut input = if unit_encoding.version <= 4 {
+ // In the GNU split dwarf extension the locations are present in the
+ // .debug_loc section but are encoded with the DW_LLE values used
+ // for the DWARF 5 .debug_loclists section.
+ self.debug_loc.section.clone()
+ } else {
+ self.debug_loclists.section.clone()
+ };
+ input.skip(offset.0)?;
+ Ok(RawLocListIter::new(
+ input,
+ unit_encoding,
+ LocListsFormat::LLE,
+ ))
+ }
+
+ /// Returns the `.debug_loclists` offset at the given `base` and `index`.
+ ///
+ /// The `base` must be the `DW_AT_loclists_base` value from the compilation unit DIE.
+ /// This is an offset that points to the first entry following the header.
+ ///
+ /// The `index` is the value of a `DW_FORM_loclistx` attribute.
+ pub fn get_offset(
+ &self,
+ unit_encoding: Encoding,
+ base: DebugLocListsBase<R::Offset>,
+ index: DebugLocListsIndex<R::Offset>,
+ ) -> Result<LocationListsOffset<R::Offset>> {
+ let format = unit_encoding.format;
+ let input = &mut self.debug_loclists.section.clone();
+ input.skip(base.0)?;
+ input.skip(R::Offset::from_u64(
+ index.0.into_u64() * u64::from(format.word_size()),
+ )?)?;
+ input
+ .read_offset(format)
+ .map(|x| LocationListsOffset(base.0 + x))
+ }
+
+ /// Call `Reader::lookup_offset_id` for each section, and return the first match.
+ pub fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<(SectionId, R::Offset)> {
+ self.debug_loc
+ .lookup_offset_id(id)
+ .or_else(|| self.debug_loclists.lookup_offset_id(id))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum LocListsFormat {
+ /// The bare location list format used before DWARF 5.
+ Bare,
+ /// The DW_LLE encoded range list format used in DWARF 5 and the non-standard GNU
+ /// split dwarf extension.
+ LLE,
+}
+
+/// A raw iterator over a location list.
+///
+/// This iterator does not perform any processing of the location entries,
+/// such as handling base addresses.
+#[derive(Debug)]
+pub struct RawLocListIter<R: Reader> {
+ input: R,
+ encoding: Encoding,
+ format: LocListsFormat,
+}
+
+/// A raw entry in .debug_loclists.
+#[derive(Clone, Debug)]
+pub enum RawLocListEntry<R: Reader> {
+ /// A location from DWARF version <= 4.
+ AddressOrOffsetPair {
+ /// Start of range. May be an address or an offset.
+ begin: u64,
+ /// End of range. May be an address or an offset.
+ end: u64,
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_base_address
+ BaseAddress {
+ /// base address
+ addr: u64,
+ },
+ /// DW_LLE_base_addressx
+ BaseAddressx {
+ /// base address
+ addr: DebugAddrIndex<R::Offset>,
+ },
+ /// DW_LLE_startx_endx
+ StartxEndx {
+ /// start of range
+ begin: DebugAddrIndex<R::Offset>,
+ /// end of range
+ end: DebugAddrIndex<R::Offset>,
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_startx_length
+ StartxLength {
+ /// start of range
+ begin: DebugAddrIndex<R::Offset>,
+ /// length of range
+ length: u64,
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_offset_pair
+ OffsetPair {
+ /// start of range
+ begin: u64,
+ /// end of range
+ end: u64,
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_default_location
+ DefaultLocation {
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_start_end
+ StartEnd {
+ /// start of range
+ begin: u64,
+ /// end of range
+ end: u64,
+ /// expression
+ data: Expression<R>,
+ },
+ /// DW_LLE_start_length
+ StartLength {
+ /// start of range
+ begin: u64,
+ /// length of range
+ length: u64,
+ /// expression
+ data: Expression<R>,
+ },
+}
+
+fn parse_data<R: Reader>(input: &mut R, encoding: Encoding) -> Result<Expression<R>> {
+ if encoding.version >= 5 {
+ let len = R::Offset::from_u64(input.read_uleb128()?)?;
+ Ok(Expression(input.split(len)?))
+ } else {
+ // In the GNU split-dwarf extension this is a fixed 2 byte value.
+ let len = R::Offset::from_u16(input.read_u16()?);
+ Ok(Expression(input.split(len)?))
+ }
+}
+
+impl<R: Reader> RawLocListEntry<R> {
+ /// Parse a location list entry from `.debug_loclists`
+ fn parse(input: &mut R, encoding: Encoding, format: LocListsFormat) -> Result<Option<Self>> {
+ match format {
+ LocListsFormat::Bare => {
+ let range = RawRange::parse(input, encoding.address_size)?;
+ return Ok(if range.is_end() {
+ None
+ } else if range.is_base_address(encoding.address_size) {
+ Some(RawLocListEntry::BaseAddress { addr: range.end })
+ } else {
+ let len = R::Offset::from_u16(input.read_u16()?);
+ let data = Expression(input.split(len)?);
+ Some(RawLocListEntry::AddressOrOffsetPair {
+ begin: range.begin,
+ end: range.end,
+ data,
+ })
+ });
+ }
+ LocListsFormat::LLE => Ok(match constants::DwLle(input.read_u8()?) {
+ constants::DW_LLE_end_of_list => None,
+ constants::DW_LLE_base_addressx => Some(RawLocListEntry::BaseAddressx {
+ addr: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ }),
+ constants::DW_LLE_startx_endx => Some(RawLocListEntry::StartxEndx {
+ begin: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ end: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ data: parse_data(input, encoding)?,
+ }),
+ constants::DW_LLE_startx_length => Some(RawLocListEntry::StartxLength {
+ begin: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ length: if encoding.version >= 5 {
+ input.read_uleb128()?
+ } else {
+ // In the GNU split-dwarf extension this is a fixed 4 byte value.
+ input.read_u32()? as u64
+ },
+ data: parse_data(input, encoding)?,
+ }),
+ constants::DW_LLE_offset_pair => Some(RawLocListEntry::OffsetPair {
+ begin: input.read_uleb128()?,
+ end: input.read_uleb128()?,
+ data: parse_data(input, encoding)?,
+ }),
+ constants::DW_LLE_default_location => Some(RawLocListEntry::DefaultLocation {
+ data: parse_data(input, encoding)?,
+ }),
+ constants::DW_LLE_base_address => Some(RawLocListEntry::BaseAddress {
+ addr: input.read_address(encoding.address_size)?,
+ }),
+ constants::DW_LLE_start_end => Some(RawLocListEntry::StartEnd {
+ begin: input.read_address(encoding.address_size)?,
+ end: input.read_address(encoding.address_size)?,
+ data: parse_data(input, encoding)?,
+ }),
+ constants::DW_LLE_start_length => Some(RawLocListEntry::StartLength {
+ begin: input.read_address(encoding.address_size)?,
+ length: input.read_uleb128()?,
+ data: parse_data(input, encoding)?,
+ }),
+ _ => {
+ return Err(Error::InvalidAddressRange);
+ }
+ }),
+ }
+ }
+}
+
+impl<R: Reader> RawLocListIter<R> {
+ /// Construct a `RawLocListIter`.
+ fn new(input: R, encoding: Encoding, format: LocListsFormat) -> RawLocListIter<R> {
+ RawLocListIter {
+ input,
+ encoding,
+ format,
+ }
+ }
+
+ /// Advance the iterator to the next location.
+ pub fn next(&mut self) -> Result<Option<RawLocListEntry<R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match RawLocListEntry::parse(&mut self.input, self.encoding, self.format) {
+ Ok(entry) => {
+ if entry.is_none() {
+ self.input.empty();
+ }
+ Ok(entry)
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for RawLocListIter<R> {
+ type Item = RawLocListEntry<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ RawLocListIter::next(self)
+ }
+}
+
+/// An iterator over a location list.
+///
+/// This iterator internally handles processing of base address selection entries
+/// and list end entries. Thus, it only returns location entries that are valid
+/// and already adjusted for the base address.
+#[derive(Debug)]
+pub struct LocListIter<R: Reader> {
+ raw: RawLocListIter<R>,
+ base_address: u64,
+ debug_addr: DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+}
+
+impl<R: Reader> LocListIter<R> {
+ /// Construct a `LocListIter`.
+ fn new(
+ raw: RawLocListIter<R>,
+ base_address: u64,
+ debug_addr: DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+ ) -> LocListIter<R> {
+ LocListIter {
+ raw,
+ base_address,
+ debug_addr,
+ debug_addr_base,
+ }
+ }
+
+ #[inline]
+ fn get_address(&self, index: DebugAddrIndex<R::Offset>) -> Result<u64> {
+ self.debug_addr
+ .get_address(self.raw.encoding.address_size, self.debug_addr_base, index)
+ }
+
+ /// Advance the iterator to the next location.
+ pub fn next(&mut self) -> Result<Option<LocationListEntry<R>>> {
+ loop {
+ let raw_loc = match self.raw.next()? {
+ Some(loc) => loc,
+ None => return Ok(None),
+ };
+
+ let (range, data) = match raw_loc {
+ RawLocListEntry::BaseAddress { addr } => {
+ self.base_address = addr;
+ continue;
+ }
+ RawLocListEntry::BaseAddressx { addr } => {
+ self.base_address = self.get_address(addr)?;
+ continue;
+ }
+ RawLocListEntry::StartxEndx { begin, end, data } => {
+ let begin = self.get_address(begin)?;
+ let end = self.get_address(end)?;
+ (Range { begin, end }, data)
+ }
+ RawLocListEntry::StartxLength {
+ begin,
+ length,
+ data,
+ } => {
+ let begin = self.get_address(begin)?;
+ let end = begin + length;
+ (Range { begin, end }, data)
+ }
+ RawLocListEntry::DefaultLocation { data } => (
+ Range {
+ begin: 0,
+ end: u64::max_value(),
+ },
+ data,
+ ),
+ RawLocListEntry::AddressOrOffsetPair { begin, end, data }
+ | RawLocListEntry::OffsetPair { begin, end, data } => {
+ let mut range = Range { begin, end };
+ range.add_base_address(self.base_address, self.raw.encoding.address_size);
+ (range, data)
+ }
+ RawLocListEntry::StartEnd { begin, end, data } => (Range { begin, end }, data),
+ RawLocListEntry::StartLength {
+ begin,
+ length,
+ data,
+ } => (
+ Range {
+ begin,
+ end: begin + length,
+ },
+ data,
+ ),
+ };
+
+ if range.begin > range.end {
+ self.raw.input.empty();
+ return Err(Error::InvalidLocationAddressRange);
+ }
+
+ return Ok(Some(LocationListEntry { range, data }));
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for LocListIter<R> {
+ type Item = LocationListEntry<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ LocListIter::next(self)
+ }
+}
+
+/// A location list entry from the `.debug_loc` or `.debug_loclists` sections.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LocationListEntry<R: Reader> {
+ /// The address range that this location is valid for.
+ pub range: Range,
+
+ /// The data containing a single location description.
+ pub data: Expression<R>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::common::Format;
+ use crate::endianity::LittleEndian;
+ use crate::read::{EndianSlice, Range};
+ use crate::test_util::GimliSectionMethods;
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ #[test]
+ fn test_loclists_32() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+
+ let section = Section::with_endian(Endian::Little)
+ .L32(0x0300_0000)
+ .L32(0x0301_0300)
+ .L32(0x0301_0400)
+ .L32(0x0301_0500);
+ let buf = section.get_contents().unwrap();
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let start = Label::new();
+ let first = Label::new();
+ let size = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // Header
+ .mark(&start)
+ .L32(&size)
+ .L16(encoding.version)
+ .L8(encoding.address_size)
+ .L8(0)
+ .L32(0)
+ .mark(&first)
+ // OffsetPair
+ .L8(4).uleb(0x10200).uleb(0x10300).uleb(4).L32(2)
+ // A base address selection followed by an OffsetPair.
+ .L8(6).L32(0x0200_0000)
+ .L8(4).uleb(0x10400).uleb(0x10500).uleb(4).L32(3)
+ // An empty OffsetPair followed by a normal OffsetPair.
+ .L8(4).uleb(0x10600).uleb(0x10600).uleb(4).L32(4)
+ .L8(4).uleb(0x10800).uleb(0x10900).uleb(4).L32(5)
+ // A StartEnd
+ .L8(7).L32(0x201_0a00).L32(0x201_0b00).uleb(4).L32(6)
+ // A StartLength
+ .L8(8).L32(0x201_0c00).uleb(0x100).uleb(4).L32(7)
+ // An OffsetPair that starts at 0.
+ .L8(4).uleb(0).uleb(1).uleb(4).L32(8)
+ // An OffsetPair that ends at -1.
+ .L8(6).L32(0)
+ .L8(4).uleb(0).uleb(0xffff_ffff).uleb(4).L32(9)
+ // A DefaultLocation
+ .L8(5).uleb(4).L32(10)
+ // A BaseAddressx + OffsetPair
+ .L8(1).uleb(0)
+ .L8(4).uleb(0x10100).uleb(0x10200).uleb(4).L32(11)
+ // A StartxEndx
+ .L8(2).uleb(1).uleb(2).uleb(4).L32(12)
+ // A StartxLength
+ .L8(3).uleb(3).uleb(0x100).uleb(4).L32(13)
+ // A range end.
+ .L8(0)
+ // Some extra data.
+ .L32(0xffff_ffff);
+ size.set_const((&section.here() - &start - 4) as u64);
+
+ let buf = section.get_contents().unwrap();
+ let debug_loc = DebugLoc::new(&[], LittleEndian);
+ let debug_loclists = DebugLocLists::new(&buf, LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let offset = LocationListsOffset((&first - &start) as usize);
+ let mut locations = loclists
+ .locations(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ },
+ data: Expression(EndianSlice::new(&[2, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A base address selection followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ },
+ data: Expression(EndianSlice::new(&[3, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // An empty location range followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ },
+ data: Expression(EndianSlice::new(&[4, 0, 0, 0], LittleEndian)),
+ }))
+ );
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ },
+ data: Expression(EndianSlice::new(&[5, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0a00,
+ end: 0x0201_0b00,
+ },
+ data: Expression(EndianSlice::new(&[6, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0c00,
+ end: 0x0201_0d00,
+ },
+ data: Expression(EndianSlice::new(&[7, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that starts at 0.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ },
+ data: Expression(EndianSlice::new(&[8, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that ends at -1.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ },
+ data: Expression(EndianSlice::new(&[9, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A DefaultLocation.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0,
+ end: u64::max_value(),
+ },
+ data: Expression(EndianSlice::new(&[10, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A BaseAddressx + OffsetPair
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0100,
+ end: 0x0301_0200,
+ },
+ data: Expression(EndianSlice::new(&[11, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A StartxEndx
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0300,
+ end: 0x0301_0400,
+ },
+ data: Expression(EndianSlice::new(&[12, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A StartxLength
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0500,
+ end: 0x0301_0600,
+ },
+ data: Expression(EndianSlice::new(&[13, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location list end.
+ assert_eq!(locations.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_loclists_64() {
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+
+ let section = Section::with_endian(Endian::Little)
+ .L64(0x0300_0000)
+ .L64(0x0301_0300)
+ .L64(0x0301_0400)
+ .L64(0x0301_0500);
+ let buf = section.get_contents().unwrap();
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let start = Label::new();
+ let first = Label::new();
+ let size = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // Header
+ .mark(&start)
+ .L32(0xffff_ffff)
+ .L64(&size)
+ .L16(encoding.version)
+ .L8(encoding.address_size)
+ .L8(0)
+ .L32(0)
+ .mark(&first)
+ // OffsetPair
+ .L8(4).uleb(0x10200).uleb(0x10300).uleb(4).L32(2)
+ // A base address selection followed by an OffsetPair.
+ .L8(6).L64(0x0200_0000)
+ .L8(4).uleb(0x10400).uleb(0x10500).uleb(4).L32(3)
+ // An empty OffsetPair followed by a normal OffsetPair.
+ .L8(4).uleb(0x10600).uleb(0x10600).uleb(4).L32(4)
+ .L8(4).uleb(0x10800).uleb(0x10900).uleb(4).L32(5)
+ // A StartEnd
+ .L8(7).L64(0x201_0a00).L64(0x201_0b00).uleb(4).L32(6)
+ // A StartLength
+ .L8(8).L64(0x201_0c00).uleb(0x100).uleb(4).L32(7)
+ // An OffsetPair that starts at 0.
+ .L8(4).uleb(0).uleb(1).uleb(4).L32(8)
+ // An OffsetPair that ends at -1.
+ .L8(6).L64(0)
+ .L8(4).uleb(0).uleb(0xffff_ffff).uleb(4).L32(9)
+ // A DefaultLocation
+ .L8(5).uleb(4).L32(10)
+ // A BaseAddressx + OffsetPair
+ .L8(1).uleb(0)
+ .L8(4).uleb(0x10100).uleb(0x10200).uleb(4).L32(11)
+ // A StartxEndx
+ .L8(2).uleb(1).uleb(2).uleb(4).L32(12)
+ // A StartxLength
+ .L8(3).uleb(3).uleb(0x100).uleb(4).L32(13)
+ // A range end.
+ .L8(0)
+ // Some extra data.
+ .L32(0xffff_ffff);
+ size.set_const((&section.here() - &start - 12) as u64);
+
+ let buf = section.get_contents().unwrap();
+ let debug_loc = DebugLoc::new(&[], LittleEndian);
+ let debug_loclists = DebugLocLists::new(&buf, LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let offset = LocationListsOffset((&first - &start) as usize);
+ let mut locations = loclists
+ .locations(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ },
+ data: Expression(EndianSlice::new(&[2, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A base address selection followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ },
+ data: Expression(EndianSlice::new(&[3, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // An empty location range followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ },
+ data: Expression(EndianSlice::new(&[4, 0, 0, 0], LittleEndian)),
+ }))
+ );
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ },
+ data: Expression(EndianSlice::new(&[5, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0a00,
+ end: 0x0201_0b00,
+ },
+ data: Expression(EndianSlice::new(&[6, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0c00,
+ end: 0x0201_0d00,
+ },
+ data: Expression(EndianSlice::new(&[7, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that starts at 0.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ },
+ data: Expression(EndianSlice::new(&[8, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that ends at -1.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ },
+ data: Expression(EndianSlice::new(&[9, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A DefaultLocation.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0,
+ end: u64::max_value(),
+ },
+ data: Expression(EndianSlice::new(&[10, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A BaseAddressx + OffsetPair
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0100,
+ end: 0x0301_0200,
+ },
+ data: Expression(EndianSlice::new(&[11, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A StartxEndx
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0300,
+ end: 0x0301_0400,
+ },
+ data: Expression(EndianSlice::new(&[12, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A StartxLength
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0301_0500,
+ end: 0x0301_0600,
+ },
+ data: Expression(EndianSlice::new(&[13, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location list end.
+ assert_eq!(locations.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_location_list_32() {
+ let start = Label::new();
+ let first = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // A location before the offset.
+ .mark(&start)
+ .L32(0x10000).L32(0x10100).L16(4).L32(1)
+ .mark(&first)
+ // A normal location.
+ .L32(0x10200).L32(0x10300).L16(4).L32(2)
+ // A base address selection followed by a normal location.
+ .L32(0xffff_ffff).L32(0x0200_0000)
+ .L32(0x10400).L32(0x10500).L16(4).L32(3)
+ // An empty location range followed by a normal location.
+ .L32(0x10600).L32(0x10600).L16(4).L32(4)
+ .L32(0x10800).L32(0x10900).L16(4).L32(5)
+ // A location range that starts at 0.
+ .L32(0).L32(1).L16(4).L32(6)
+ // A location range that ends at -1.
+ .L32(0xffff_ffff).L32(0x0000_0000)
+ .L32(0).L32(0xffff_ffff).L16(4).L32(7)
+ // A location list end.
+ .L32(0).L32(0)
+ // Some extra data.
+ .L32(0);
+
+ let buf = section.get_contents().unwrap();
+ let debug_loc = DebugLoc::new(&buf, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let offset = LocationListsOffset((&first - &start) as usize);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut locations = loclists
+ .locations(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ },
+ data: Expression(EndianSlice::new(&[2, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A base address selection followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ },
+ data: Expression(EndianSlice::new(&[3, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // An empty location range followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ },
+ data: Expression(EndianSlice::new(&[4, 0, 0, 0], LittleEndian)),
+ }))
+ );
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ },
+ data: Expression(EndianSlice::new(&[5, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that starts at 0.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ },
+ data: Expression(EndianSlice::new(&[6, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that ends at -1.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ },
+ data: Expression(EndianSlice::new(&[7, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location list end.
+ assert_eq!(locations.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_location_list_64() {
+ let start = Label::new();
+ let first = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // A location before the offset.
+ .mark(&start)
+ .L64(0x10000).L64(0x10100).L16(4).L32(1)
+ .mark(&first)
+ // A normal location.
+ .L64(0x10200).L64(0x10300).L16(4).L32(2)
+ // A base address selection followed by a normal location.
+ .L64(0xffff_ffff_ffff_ffff).L64(0x0200_0000)
+ .L64(0x10400).L64(0x10500).L16(4).L32(3)
+ // An empty location range followed by a normal location.
+ .L64(0x10600).L64(0x10600).L16(4).L32(4)
+ .L64(0x10800).L64(0x10900).L16(4).L32(5)
+ // A location range that starts at 0.
+ .L64(0).L64(1).L16(4).L32(6)
+ // A location range that ends at -1.
+ .L64(0xffff_ffff_ffff_ffff).L64(0x0000_0000)
+ .L64(0).L64(0xffff_ffff_ffff_ffff).L16(4).L32(7)
+ // A location list end.
+ .L64(0).L64(0)
+ // Some extra data.
+ .L64(0);
+
+ let buf = section.get_contents().unwrap();
+ let debug_loc = DebugLoc::new(&buf, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let offset = LocationListsOffset((&first - &start) as usize);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ };
+ let mut locations = loclists
+ .locations(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ },
+ data: Expression(EndianSlice::new(&[2, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A base address selection followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ },
+ data: Expression(EndianSlice::new(&[3, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // An empty location range followed by a normal location.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ },
+ data: Expression(EndianSlice::new(&[4, 0, 0, 0], LittleEndian)),
+ }))
+ );
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ },
+ data: Expression(EndianSlice::new(&[5, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that starts at 0.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ },
+ data: Expression(EndianSlice::new(&[6, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location range that ends at -1.
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0,
+ end: 0xffff_ffff_ffff_ffff,
+ },
+ data: Expression(EndianSlice::new(&[7, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
+ // A location list end.
+ assert_eq!(locations.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_locations_invalid() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // An invalid location range.
+ .L32(0x20000).L32(0x10000).L16(4).L32(1)
+ // An invalid range after wrapping.
+ .L32(0x20000).L32(0xff01_0000).L16(4).L32(2);
+
+ let buf = section.get_contents().unwrap();
+ let debug_loc = DebugLoc::new(&buf, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+
+ // An invalid location range.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(0x0),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Err(Error::InvalidLocationAddressRange));
+
+ // An invalid location range after wrapping.
+ let mut locations = loclists
+ .locations(
+ LocationListsOffset(14),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(locations.next(), Err(Error::InvalidLocationAddressRange));
+
+ // An invalid offset.
+ match loclists.locations(
+ LocationListsOffset(buf.len() + 1),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ ) {
+ Err(Error::UnexpectedEof(_)) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_get_offset() {
+ for format in vec![Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version: 5,
+ address_size: 4,
+ };
+
+ let zero = Label::new();
+ let length = Label::new();
+ let start = Label::new();
+ let first = Label::new();
+ let end = Label::new();
+ let mut section = Section::with_endian(Endian::Little)
+ .mark(&zero)
+ .initial_length(format, &length, &start)
+ .D16(encoding.version)
+ .D8(encoding.address_size)
+ .D8(0)
+ .D32(20)
+ .mark(&first);
+ for i in 0..20 {
+ section = section.word(format.word_size(), 1000 + i);
+ }
+ section = section.mark(&end);
+ length.set_const((&end - &start) as u64);
+ let section = section.get_contents().unwrap();
+
+ let debug_loc = DebugLoc::from(EndianSlice::new(&[], LittleEndian));
+ let debug_loclists = DebugLocLists::from(EndianSlice::new(&section, LittleEndian));
+ let locations = LocationLists::new(debug_loc, debug_loclists);
+
+ let base = DebugLocListsBase((&first - &zero) as usize);
+ assert_eq!(
+ locations.get_offset(encoding, base, DebugLocListsIndex(0)),
+ Ok(LocationListsOffset(base.0 + 1000))
+ );
+ assert_eq!(
+ locations.get_offset(encoding, base, DebugLocListsIndex(19)),
+ Ok(LocationListsOffset(base.0 + 1019))
+ );
+ }
+ }
+
+ #[test]
+ fn test_loclists_gnu_v4_split_dwarf() {
+ #[rustfmt::skip]
+ let buf = [
+ 0x03, // DW_LLE_startx_length
+ 0x00, // ULEB encoded b7
+ 0x08, 0x00, 0x00, 0x00, // Fixed 4 byte length of 8
+ 0x03, 0x00, // Fixed two byte length of the location
+ 0x11, 0x00, // DW_OP_constu 0
+ 0x9f, // DW_OP_stack_value
+ // Padding data
+ //0x99, 0x99, 0x99, 0x99
+ ];
+ let data_buf = [0x11, 0x00, 0x9f];
+ let expected_data = EndianSlice::new(&data_buf, LittleEndian);
+ let debug_loc = DebugLoc::new(&buf, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+ let debug_addr =
+ &DebugAddr::from(EndianSlice::new(&[0x01, 0x02, 0x03, 0x04], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+
+ // An invalid location range.
+ let mut locations = loclists
+ .locations_dwo(
+ LocationListsOffset(0x0),
+ encoding,
+ 0,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0403_0201,
+ end: 0x0403_0209
+ },
+ data: Expression(expected_data),
+ }))
+ );
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/lookup.rs b/vendor/gimli-0.26.2/src/read/lookup.rs
new file mode 100644
index 000000000..1d082f24f
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/lookup.rs
@@ -0,0 +1,202 @@
+use core::marker::PhantomData;
+
+use crate::common::{DebugInfoOffset, Format};
+use crate::read::{parse_debug_info_offset, Error, Reader, ReaderOffset, Result, UnitOffset};
+
+// The various "Accelerated Access" sections (DWARF standard v4 Section 6.1) all have
+// similar structures. They consist of a header with metadata and an offset into the
+// .debug_info section for the entire compilation unit, and a series
+// of following entries that list addresses (for .debug_aranges) or names
+// (for .debug_pubnames and .debug_pubtypes) that are covered.
+//
+// Because these three tables all have similar structures, we abstract out some of
+// the parsing mechanics.
+
+pub trait LookupParser<R: Reader> {
+ /// The type of the produced header.
+ type Header;
+ /// The type of the produced entry.
+ type Entry;
+
+ /// Parse a header from `input`. Returns a tuple of `input` sliced to contain just the entries
+ /// corresponding to this header (without the header itself), and the parsed representation of
+ /// the header itself.
+ fn parse_header(input: &mut R) -> Result<(R, Self::Header)>;
+
+ /// Parse a single entry from `input`. Returns either a parsed representation of the entry
+ /// or None if `input` is exhausted.
+ fn parse_entry(input: &mut R, header: &Self::Header) -> Result<Option<Self::Entry>>;
+}
+
+#[derive(Clone, Debug)]
+pub struct DebugLookup<R, Parser>
+where
+ R: Reader,
+ Parser: LookupParser<R>,
+{
+ input_buffer: R,
+ phantom: PhantomData<Parser>,
+}
+
+impl<R, Parser> From<R> for DebugLookup<R, Parser>
+where
+ R: Reader,
+ Parser: LookupParser<R>,
+{
+ fn from(input_buffer: R) -> Self {
+ DebugLookup {
+ input_buffer,
+ phantom: PhantomData,
+ }
+ }
+}
+
+impl<R, Parser> DebugLookup<R, Parser>
+where
+ R: Reader,
+ Parser: LookupParser<R>,
+{
+ pub fn items(&self) -> LookupEntryIter<R, Parser> {
+ LookupEntryIter {
+ current_set: None,
+ remaining_input: self.input_buffer.clone(),
+ }
+ }
+
+ pub fn reader(&self) -> &R {
+ &self.input_buffer
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct LookupEntryIter<R, Parser>
+where
+ R: Reader,
+ Parser: LookupParser<R>,
+{
+ current_set: Option<(R, Parser::Header)>, // Only none at the very beginning and end.
+ remaining_input: R,
+}
+
+impl<R, Parser> LookupEntryIter<R, Parser>
+where
+ R: Reader,
+ Parser: LookupParser<R>,
+{
+ /// Advance the iterator and return the next entry.
+ ///
+ /// Returns the newly parsed entry as `Ok(Some(Parser::Entry))`. Returns
+ /// `Ok(None)` when iteration is complete and all entries have already been
+ /// parsed and yielded. If an error occurs while parsing the next entry,
+ /// then this error is returned as `Err(e)`, and all subsequent calls return
+ /// `Ok(None)`.
+ ///
+ /// Can be [used with `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn next(&mut self) -> Result<Option<Parser::Entry>> {
+ loop {
+ if let Some((ref mut input, ref header)) = self.current_set {
+ if !input.is_empty() {
+ match Parser::parse_entry(input, header) {
+ Ok(Some(entry)) => return Ok(Some(entry)),
+ Ok(None) => {}
+ Err(e) => {
+ input.empty();
+ self.remaining_input.empty();
+ return Err(e);
+ }
+ }
+ }
+ }
+ if self.remaining_input.is_empty() {
+ self.current_set = None;
+ return Ok(None);
+ }
+ match Parser::parse_header(&mut self.remaining_input) {
+ Ok(set) => {
+ self.current_set = Some(set);
+ }
+ Err(e) => {
+ self.current_set = None;
+ self.remaining_input.empty();
+ return Err(e);
+ }
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct PubStuffHeader<T = usize> {
+ format: Format,
+ length: T,
+ version: u16,
+ unit_offset: DebugInfoOffset<T>,
+ unit_length: T,
+}
+
+pub trait PubStuffEntry<R: Reader> {
+ fn new(
+ die_offset: UnitOffset<R::Offset>,
+ name: R,
+ unit_header_offset: DebugInfoOffset<R::Offset>,
+ ) -> Self;
+}
+
+#[derive(Clone, Debug)]
+pub struct PubStuffParser<R, Entry>
+where
+ R: Reader,
+ Entry: PubStuffEntry<R>,
+{
+ // This struct is never instantiated.
+ phantom: PhantomData<(R, Entry)>,
+}
+
+impl<R, Entry> LookupParser<R> for PubStuffParser<R, Entry>
+where
+ R: Reader,
+ Entry: PubStuffEntry<R>,
+{
+ type Header = PubStuffHeader<R::Offset>;
+ type Entry = Entry;
+
+ /// Parse an pubthings set header. Returns a tuple of the
+ /// pubthings to be parsed for this set, and the newly created PubThingHeader struct.
+ fn parse_header(input: &mut R) -> Result<(R, Self::Header)> {
+ let (length, format) = input.read_initial_length()?;
+ let mut rest = input.split(length)?;
+
+ let version = rest.read_u16()?;
+ if version != 2 {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+
+ let unit_offset = parse_debug_info_offset(&mut rest, format)?;
+ let unit_length = rest.read_length(format)?;
+
+ let header = PubStuffHeader {
+ format,
+ length,
+ version,
+ unit_offset,
+ unit_length,
+ };
+ Ok((rest, header))
+ }
+
+ /// Parse a single pubthing. Return `None` for the null pubthing, `Some` for an actual pubthing.
+ fn parse_entry(input: &mut R, header: &Self::Header) -> Result<Option<Self::Entry>> {
+ let offset = input.read_offset(header.format)?;
+ if offset.into_u64() == 0 {
+ input.empty();
+ Ok(None)
+ } else {
+ let name = input.read_null_terminated_slice()?;
+ Ok(Some(Self::Entry::new(
+ UnitOffset(offset),
+ name,
+ header.unit_offset,
+ )))
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/mod.rs b/vendor/gimli-0.26.2/src/read/mod.rs
new file mode 100644
index 000000000..3110957c2
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/mod.rs
@@ -0,0 +1,821 @@
+//! Read DWARF debugging information.
+//!
+//! * [Example Usage](#example-usage)
+//! * [API Structure](#api-structure)
+//! * [Using with `FallibleIterator`](#using-with-fallibleiterator)
+//!
+//! ## Example Usage
+//!
+//! Print out all of the functions in the debuggee program:
+//!
+//! ```rust,no_run
+//! # fn example() -> Result<(), gimli::Error> {
+//! # type R = gimli::EndianSlice<'static, gimli::LittleEndian>;
+//! # let get_file_section_reader = |name| -> Result<R, gimli::Error> { unimplemented!() };
+//! # let get_sup_file_section_reader = |name| -> Result<R, gimli::Error> { unimplemented!() };
+//! // Read the DWARF sections with whatever object loader you're using.
+//! // These closures should return a `Reader` instance (e.g. `EndianSlice`).
+//! let loader = |section: gimli::SectionId| { get_file_section_reader(section.name()) };
+//! let sup_loader = |section: gimli::SectionId| { get_sup_file_section_reader(section.name()) };
+//! let mut dwarf = gimli::Dwarf::load(loader)?;
+//! dwarf.load_sup(sup_loader)?;
+//!
+//! // Iterate over all compilation units.
+//! let mut iter = dwarf.units();
+//! while let Some(header) = iter.next()? {
+//! // Parse the abbreviations and other information for this compilation unit.
+//! let unit = dwarf.unit(header)?;
+//!
+//! // Iterate over all of this compilation unit's entries.
+//! let mut entries = unit.entries();
+//! while let Some((_, entry)) = entries.next_dfs()? {
+//! // If we find an entry for a function, print it.
+//! if entry.tag() == gimli::DW_TAG_subprogram {
+//! println!("Found a function: {:?}", entry);
+//! }
+//! }
+//! }
+//! # unreachable!()
+//! # }
+//! ```
+//!
+//! Full example programs:
+//!
+//! * [A simple parser](https://github.com/gimli-rs/gimli/blob/master/examples/simple.rs)
+//!
+//! * [A `dwarfdump`
+//! clone](https://github.com/gimli-rs/gimli/blob/master/examples/dwarfdump.rs)
+//!
+//! * [An `addr2line` clone](https://github.com/gimli-rs/addr2line)
+//!
+//! * [`ddbug`](https://github.com/gimli-rs/ddbug), a utility giving insight into
+//! code generation by making debugging information readable
+//!
+//! * [`dwprod`](https://github.com/fitzgen/dwprod), a tiny utility to list the
+//! compilers used to create each compilation unit within a shared library or
+//! executable (via `DW_AT_producer`)
+//!
+//! * [`dwarf-validate`](https://github.com/gimli-rs/gimli/blob/master/examples/dwarf-validate.rs),
+//! a program to validate the integrity of some DWARF and its references
+//! between sections and compilation units.
+//!
+//! ## API Structure
+//!
+//! * Basic familiarity with DWARF is assumed.
+//!
+//! * The [`Dwarf`](./struct.Dwarf.html) type contains the commonly used DWARF
+//! sections. It has methods that simplify access to debugging data that spans
+//! multiple sections. Use of this type is optional, but recommended.
+//!
+//! * Each section gets its own type. Consider these types the entry points to
+//! the library:
+//!
+//! * [`DebugAbbrev`](./struct.DebugAbbrev.html): The `.debug_abbrev` section.
+//!
+//! * [`DebugAddr`](./struct.DebugAddr.html): The `.debug_addr` section.
+//!
+//! * [`DebugAranges`](./struct.DebugAranges.html): The `.debug_aranges`
+//! section.
+//!
+//! * [`DebugFrame`](./struct.DebugFrame.html): The `.debug_frame` section.
+//!
+//! * [`DebugInfo`](./struct.DebugInfo.html): The `.debug_info` section.
+//!
+//! * [`DebugLine`](./struct.DebugLine.html): The `.debug_line` section.
+//!
+//! * [`DebugLineStr`](./struct.DebugLineStr.html): The `.debug_line_str` section.
+//!
+//! * [`DebugLoc`](./struct.DebugLoc.html): The `.debug_loc` section.
+//!
+//! * [`DebugLocLists`](./struct.DebugLocLists.html): The `.debug_loclists` section.
+//!
+//! * [`DebugPubNames`](./struct.DebugPubNames.html): The `.debug_pubnames`
+//! section.
+//!
+//! * [`DebugPubTypes`](./struct.DebugPubTypes.html): The `.debug_pubtypes`
+//! section.
+//!
+//! * [`DebugRanges`](./struct.DebugRanges.html): The `.debug_ranges` section.
+//!
+//! * [`DebugRngLists`](./struct.DebugRngLists.html): The `.debug_rnglists` section.
+//!
+//! * [`DebugStr`](./struct.DebugStr.html): The `.debug_str` section.
+//!
+//! * [`DebugStrOffsets`](./struct.DebugStrOffsets.html): The `.debug_str_offsets` section.
+//!
+//! * [`DebugTypes`](./struct.DebugTypes.html): The `.debug_types` section.
+//!
+//! * [`DebugCuIndex`](./struct.DebugCuIndex.html): The `.debug_cu_index` section.
+//!
+//! * [`DebugTuIndex`](./struct.DebugTuIndex.html): The `.debug_tu_index` section.
+//!
+//! * [`EhFrame`](./struct.EhFrame.html): The `.eh_frame` section.
+//!
+//! * [`EhFrameHdr`](./struct.EhFrameHdr.html): The `.eh_frame_hdr` section.
+//!
+//! * Each section type exposes methods for accessing the debugging data encoded
+//! in that section. For example, the [`DebugInfo`](./struct.DebugInfo.html)
+//! struct has the [`units`](./struct.DebugInfo.html#method.units) method for
+//! iterating over the compilation units defined within it.
+//!
+//! * Offsets into a section are strongly typed: an offset into `.debug_info` is
+//! the [`DebugInfoOffset`](./struct.DebugInfoOffset.html) type. It cannot be
+//! used to index into the [`DebugLine`](./struct.DebugLine.html) type because
+//! `DebugLine` represents the `.debug_line` section. There are similar types
+//! for offsets relative to a compilation unit rather than a section.
+//!
+//! ## Using with `FallibleIterator`
+//!
+//! The standard library's `Iterator` trait and related APIs do not play well
+//! with iterators where the `next` operation is fallible. One can make the
+//! `Iterator`'s associated `Item` type be a `Result<T, E>`, however the
+//! provided methods cannot gracefully handle the case when an `Err` is
+//! returned.
+//!
+//! This situation led to the
+//! [`fallible-iterator`](https://crates.io/crates/fallible-iterator) crate's
+//! existence. You can read more of the rationale for its existence in its
+//! docs. The crate provides the helpers you have come to expect (eg `map`,
+//! `filter`, etc) for iterators that can fail.
+//!
+//! `gimli`'s many lazy parsing iterators are a perfect match for the
+//! `fallible-iterator` crate's `FallibleIterator` trait because parsing is not
+//! done eagerly. Parse errors later in the input might only be discovered after
+//! having iterated through many items.
+//!
+//! To use `gimli` iterators with `FallibleIterator`, import the crate and trait
+//! into your code:
+//!
+//! ```
+//! # #[cfg(feature = "fallible-iterator")]
+//! # fn foo() {
+//! // Use the `FallibleIterator` trait so its methods are in scope!
+//! use fallible_iterator::FallibleIterator;
+//! use gimli::{DebugAranges, EndianSlice, LittleEndian};
+//!
+//! fn find_sum_of_address_range_lengths(aranges: DebugAranges<EndianSlice<LittleEndian>>)
+//! -> gimli::Result<u64>
+//! {
+//! // `DebugAranges::headers` returns a `FallibleIterator`!
+//! aranges.headers()
+//! // `flat_map` is provided by `FallibleIterator`!
+//! .flat_map(|header| Ok(header.entries()))
+//! // `map` is provided by `FallibleIterator`!
+//! .map(|arange| Ok(arange.length()))
+//! // `fold` is provided by `FallibleIterator`!
+//! .fold(0, |sum, len| Ok(sum + len))
+//! }
+//! # }
+//! # fn main() {}
+//! ```
+
+use core::fmt::{self, Debug};
+use core::result;
+#[cfg(feature = "std")]
+use std::{error, io};
+
+use crate::common::{Register, SectionId};
+use crate::constants;
+
+mod util;
+pub use util::*;
+
+mod addr;
+pub use self::addr::*;
+
+mod cfi;
+pub use self::cfi::*;
+
+#[cfg(feature = "read")]
+mod dwarf;
+#[cfg(feature = "read")]
+pub use self::dwarf::*;
+
+mod endian_slice;
+pub use self::endian_slice::*;
+
+#[cfg(feature = "endian-reader")]
+mod endian_reader;
+#[cfg(feature = "endian-reader")]
+pub use self::endian_reader::*;
+
+mod reader;
+pub use self::reader::*;
+
+#[cfg(feature = "read")]
+mod abbrev;
+#[cfg(feature = "read")]
+pub use self::abbrev::*;
+
+mod aranges;
+pub use self::aranges::*;
+
+mod index;
+pub use self::index::*;
+
+#[cfg(feature = "read")]
+mod line;
+#[cfg(feature = "read")]
+pub use self::line::*;
+
+mod lists;
+
+mod loclists;
+pub use self::loclists::*;
+
+#[cfg(feature = "read")]
+mod lookup;
+
+mod op;
+pub use self::op::*;
+
+#[cfg(feature = "read")]
+mod pubnames;
+#[cfg(feature = "read")]
+pub use self::pubnames::*;
+
+#[cfg(feature = "read")]
+mod pubtypes;
+#[cfg(feature = "read")]
+pub use self::pubtypes::*;
+
+mod rnglists;
+pub use self::rnglists::*;
+
+mod str;
+pub use self::str::*;
+
+/// An offset into the current compilation or type unit.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub struct UnitOffset<T = usize>(pub T);
+
+#[cfg(feature = "read")]
+mod unit;
+#[cfg(feature = "read")]
+pub use self::unit::*;
+
+mod value;
+pub use self::value::*;
+
+/// Indicates that storage should be allocated on heap.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct StoreOnHeap;
+
+/// `EndianBuf` has been renamed to `EndianSlice`. For ease of upgrading across
+/// `gimli` versions, we export this type alias.
+#[deprecated(note = "EndianBuf has been renamed to EndianSlice, use that instead.")]
+pub type EndianBuf<'input, Endian> = EndianSlice<'input, Endian>;
+
+/// An error that occurred when parsing.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum Error {
+ /// An I/O error occurred while reading.
+ Io,
+ /// Found a PC relative pointer, but the section base is undefined.
+ PcRelativePointerButSectionBaseIsUndefined,
+ /// Found a `.text` relative pointer, but the `.text` base is undefined.
+ TextRelativePointerButTextBaseIsUndefined,
+ /// Found a data relative pointer, but the data base is undefined.
+ DataRelativePointerButDataBaseIsUndefined,
+ /// Found a function relative pointer in a context that does not have a
+ /// function base.
+ FuncRelativePointerInBadContext,
+ /// Cannot parse a pointer with a `DW_EH_PE_omit` encoding.
+ CannotParseOmitPointerEncoding,
+ /// An error parsing an unsigned LEB128 value.
+ BadUnsignedLeb128,
+ /// An error parsing a signed LEB128 value.
+ BadSignedLeb128,
+ /// An abbreviation declared that its tag is zero, but zero is reserved for
+ /// null records.
+ AbbreviationTagZero,
+ /// An attribute specification declared that its form is zero, but zero is
+ /// reserved for null records.
+ AttributeFormZero,
+ /// The abbreviation's has-children byte was not one of
+ /// `DW_CHILDREN_{yes,no}`.
+ BadHasChildren,
+ /// The specified length is impossible.
+ BadLength,
+ /// Found an unknown `DW_FORM_*` type.
+ UnknownForm,
+ /// Expected a zero, found something else.
+ ExpectedZero,
+ /// Found an abbreviation code that has already been used.
+ DuplicateAbbreviationCode,
+ /// Found a duplicate arange.
+ DuplicateArange,
+ /// Found an unknown reserved length value.
+ UnknownReservedLength,
+ /// Found an unknown DWARF version.
+ UnknownVersion(u64),
+ /// Found a record with an unknown abbreviation code.
+ UnknownAbbreviation,
+ /// Hit the end of input before it was expected.
+ UnexpectedEof(ReaderOffsetId),
+ /// Read a null entry before it was expected.
+ UnexpectedNull,
+ /// Found an unknown standard opcode.
+ UnknownStandardOpcode(constants::DwLns),
+ /// Found an unknown extended opcode.
+ UnknownExtendedOpcode(constants::DwLne),
+ /// The specified address size is not supported.
+ UnsupportedAddressSize(u8),
+ /// The specified offset size is not supported.
+ UnsupportedOffsetSize(u8),
+ /// The specified field size is not supported.
+ UnsupportedFieldSize(u8),
+ /// The minimum instruction length must not be zero.
+ MinimumInstructionLengthZero,
+ /// The maximum operations per instruction must not be zero.
+ MaximumOperationsPerInstructionZero,
+ /// The line range must not be zero.
+ LineRangeZero,
+ /// The opcode base must not be zero.
+ OpcodeBaseZero,
+ /// Found an invalid UTF-8 string.
+ BadUtf8,
+ /// Expected to find the CIE ID, but found something else.
+ NotCieId,
+ /// Expected to find a pointer to a CIE, but found the CIE ID instead.
+ NotCiePointer,
+ /// Expected to find a pointer to an FDE, but found a CIE instead.
+ NotFdePointer,
+ /// Invalid branch target for a DW_OP_bra or DW_OP_skip.
+ BadBranchTarget(u64),
+ /// DW_OP_push_object_address used but no address passed in.
+ InvalidPushObjectAddress,
+ /// Not enough items on the stack when evaluating an expression.
+ NotEnoughStackItems,
+ /// Too many iterations to compute the expression.
+ TooManyIterations,
+ /// An unrecognized operation was found while parsing a DWARF
+ /// expression.
+ InvalidExpression(constants::DwOp),
+ /// An unsupported operation was found while evaluating a DWARF expression.
+ UnsupportedEvaluation,
+ /// The expression had a piece followed by an expression
+ /// terminator without a piece.
+ InvalidPiece,
+ /// An expression-terminating operation was followed by something
+ /// other than the end of the expression or a piece operation.
+ InvalidExpressionTerminator(u64),
+ /// Division or modulus by zero when evaluating an expression.
+ DivisionByZero,
+ /// An expression operation used mismatching types.
+ TypeMismatch,
+ /// An expression operation required an integral type but saw a
+ /// floating point type.
+ IntegralTypeRequired,
+ /// An expression operation used types that are not supported.
+ UnsupportedTypeOperation,
+ /// The shift value in an expression must be a non-negative integer.
+ InvalidShiftExpression,
+ /// An unknown DW_CFA_* instruction.
+ UnknownCallFrameInstruction(constants::DwCfa),
+ /// The end of an address range was before the beginning.
+ InvalidAddressRange,
+ /// The end offset of a loc list entry was before the beginning.
+ InvalidLocationAddressRange,
+ /// Encountered a call frame instruction in a context in which it is not
+ /// valid.
+ CfiInstructionInInvalidContext,
+ /// When evaluating call frame instructions, found a `DW_CFA_restore_state`
+ /// stack pop instruction, but the stack was empty, and had nothing to pop.
+ PopWithEmptyStack,
+ /// Do not have unwind info for the given address.
+ NoUnwindInfoForAddress,
+ /// An offset value was larger than the maximum supported value.
+ UnsupportedOffset,
+ /// The given pointer encoding is either unknown or invalid.
+ UnknownPointerEncoding,
+ /// Did not find an entry at the given offset.
+ NoEntryAtGivenOffset,
+ /// The given offset is out of bounds.
+ OffsetOutOfBounds,
+ /// Found an unknown CFI augmentation.
+ UnknownAugmentation,
+ /// We do not support the given pointer encoding yet.
+ UnsupportedPointerEncoding,
+ /// Registers larger than `u16` are not supported.
+ UnsupportedRegister(u64),
+ /// The CFI program defined more register rules than we have storage for.
+ TooManyRegisterRules,
+ /// Attempted to push onto the CFI or evaluation stack, but it was already
+ /// at full capacity.
+ StackFull,
+ /// The `.eh_frame_hdr` binary search table claims to be variable-length encoded,
+ /// which makes binary search impossible.
+ VariableLengthSearchTable,
+ /// The `DW_UT_*` value for this unit is not supported yet.
+ UnsupportedUnitType,
+ /// Ranges using AddressIndex are not supported yet.
+ UnsupportedAddressIndex,
+ /// Nonzero segment selector sizes aren't supported yet.
+ UnsupportedSegmentSize,
+ /// A compilation unit or type unit is missing its top level DIE.
+ MissingUnitDie,
+ /// A DIE attribute used an unsupported form.
+ UnsupportedAttributeForm,
+ /// Missing DW_LNCT_path in file entry format.
+ MissingFileEntryFormatPath,
+ /// Expected an attribute value to be a string form.
+ ExpectedStringAttributeValue,
+ /// `DW_FORM_implicit_const` used in an invalid context.
+ InvalidImplicitConst,
+ /// Invalid section count in `.dwp` index.
+ InvalidIndexSectionCount,
+ /// Invalid slot count in `.dwp` index.
+ InvalidIndexSlotCount,
+ /// Invalid hash row in `.dwp` index.
+ InvalidIndexRow,
+ /// Unknown section type in `.dwp` index.
+ UnknownIndexSection,
+}
+
+impl fmt::Display for Error {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter) -> ::core::result::Result<(), fmt::Error> {
+ write!(f, "{}", self.description())
+ }
+}
+
+impl Error {
+ /// A short description of the error.
+ pub fn description(&self) -> &str {
+ match *self {
+ Error::Io => "An I/O error occurred while reading.",
+ Error::PcRelativePointerButSectionBaseIsUndefined => {
+ "Found a PC relative pointer, but the section base is undefined."
+ }
+ Error::TextRelativePointerButTextBaseIsUndefined => {
+ "Found a `.text` relative pointer, but the `.text` base is undefined."
+ }
+ Error::DataRelativePointerButDataBaseIsUndefined => {
+ "Found a data relative pointer, but the data base is undefined."
+ }
+ Error::FuncRelativePointerInBadContext => {
+ "Found a function relative pointer in a context that does not have a function base."
+ }
+ Error::CannotParseOmitPointerEncoding => {
+ "Cannot parse a pointer with a `DW_EH_PE_omit` encoding."
+ }
+ Error::BadUnsignedLeb128 => "An error parsing an unsigned LEB128 value",
+ Error::BadSignedLeb128 => "An error parsing a signed LEB128 value",
+ Error::AbbreviationTagZero => {
+ "An abbreviation declared that its tag is zero,
+ but zero is reserved for null records"
+ }
+ Error::AttributeFormZero => {
+ "An attribute specification declared that its form is zero,
+ but zero is reserved for null records"
+ }
+ Error::BadHasChildren => {
+ "The abbreviation's has-children byte was not one of
+ `DW_CHILDREN_{yes,no}`"
+ }
+ Error::BadLength => "The specified length is impossible",
+ Error::UnknownForm => "Found an unknown `DW_FORM_*` type",
+ Error::ExpectedZero => "Expected a zero, found something else",
+ Error::DuplicateAbbreviationCode => {
+ "Found an abbreviation code that has already been used"
+ }
+ Error::DuplicateArange => "Found a duplicate arange",
+ Error::UnknownReservedLength => "Found an unknown reserved length value",
+ Error::UnknownVersion(_) => "Found an unknown DWARF version",
+ Error::UnknownAbbreviation => "Found a record with an unknown abbreviation code",
+ Error::UnexpectedEof(_) => "Hit the end of input before it was expected",
+ Error::UnexpectedNull => "Read a null entry before it was expected.",
+ Error::UnknownStandardOpcode(_) => "Found an unknown standard opcode",
+ Error::UnknownExtendedOpcode(_) => "Found an unknown extended opcode",
+ Error::UnsupportedAddressSize(_) => "The specified address size is not supported",
+ Error::UnsupportedOffsetSize(_) => "The specified offset size is not supported",
+ Error::UnsupportedFieldSize(_) => "The specified field size is not supported",
+ Error::MinimumInstructionLengthZero => {
+ "The minimum instruction length must not be zero."
+ }
+ Error::MaximumOperationsPerInstructionZero => {
+ "The maximum operations per instruction must not be zero."
+ }
+ Error::LineRangeZero => "The line range must not be zero.",
+ Error::OpcodeBaseZero => "The opcode base must not be zero.",
+ Error::BadUtf8 => "Found an invalid UTF-8 string.",
+ Error::NotCieId => "Expected to find the CIE ID, but found something else.",
+ Error::NotCiePointer => "Expected to find a CIE pointer, but found the CIE ID instead.",
+ Error::NotFdePointer => {
+ "Expected to find an FDE pointer, but found a CIE pointer instead."
+ }
+ Error::BadBranchTarget(_) => "Invalid branch target in DWARF expression",
+ Error::InvalidPushObjectAddress => {
+ "DW_OP_push_object_address used but no object address given"
+ }
+ Error::NotEnoughStackItems => "Not enough items on stack when evaluating expression",
+ Error::TooManyIterations => "Too many iterations to evaluate DWARF expression",
+ Error::InvalidExpression(_) => "Invalid opcode in DWARF expression",
+ Error::UnsupportedEvaluation => "Unsupported operation when evaluating expression",
+ Error::InvalidPiece => {
+ "DWARF expression has piece followed by non-piece expression at end"
+ }
+ Error::InvalidExpressionTerminator(_) => "Expected DW_OP_piece or DW_OP_bit_piece",
+ Error::DivisionByZero => "Division or modulus by zero when evaluating expression",
+ Error::TypeMismatch => "Type mismatch when evaluating expression",
+ Error::IntegralTypeRequired => "Integral type expected when evaluating expression",
+ Error::UnsupportedTypeOperation => {
+ "An expression operation used types that are not supported"
+ }
+ Error::InvalidShiftExpression => {
+ "The shift value in an expression must be a non-negative integer."
+ }
+ Error::UnknownCallFrameInstruction(_) => "An unknown DW_CFA_* instructiion",
+ Error::InvalidAddressRange => {
+ "The end of an address range must not be before the beginning."
+ }
+ Error::InvalidLocationAddressRange => {
+ "The end offset of a location list entry must not be before the beginning."
+ }
+ Error::CfiInstructionInInvalidContext => {
+ "Encountered a call frame instruction in a context in which it is not valid."
+ }
+ Error::PopWithEmptyStack => {
+ "When evaluating call frame instructions, found a `DW_CFA_restore_state` stack pop \
+ instruction, but the stack was empty, and had nothing to pop."
+ }
+ Error::NoUnwindInfoForAddress => "Do not have unwind info for the given address.",
+ Error::UnsupportedOffset => {
+ "An offset value was larger than the maximum supported value."
+ }
+ Error::UnknownPointerEncoding => {
+ "The given pointer encoding is either unknown or invalid."
+ }
+ Error::NoEntryAtGivenOffset => "Did not find an entry at the given offset.",
+ Error::OffsetOutOfBounds => "The given offset is out of bounds.",
+ Error::UnknownAugmentation => "Found an unknown CFI augmentation.",
+ Error::UnsupportedPointerEncoding => {
+ "We do not support the given pointer encoding yet."
+ }
+ Error::UnsupportedRegister(_) => "Registers larger than `u16` are not supported.",
+ Error::TooManyRegisterRules => {
+ "The CFI program defined more register rules than we have storage for."
+ }
+ Error::StackFull => {
+ "Attempted to push onto the CFI stack, but it was already at full capacity."
+ }
+ Error::VariableLengthSearchTable => {
+ "The `.eh_frame_hdr` binary search table claims to be variable-length encoded, \
+ which makes binary search impossible."
+ }
+ Error::UnsupportedUnitType => "The `DW_UT_*` value for this unit is not supported yet",
+ Error::UnsupportedAddressIndex => "Ranges involving AddressIndex are not supported yet",
+ Error::UnsupportedSegmentSize => "Nonzero segment size not supported yet",
+ Error::MissingUnitDie => {
+ "A compilation unit or type unit is missing its top level DIE."
+ }
+ Error::UnsupportedAttributeForm => "A DIE attribute used an unsupported form.",
+ Error::MissingFileEntryFormatPath => "Missing DW_LNCT_path in file entry format.",
+ Error::ExpectedStringAttributeValue => {
+ "Expected an attribute value to be a string form."
+ }
+ Error::InvalidImplicitConst => "DW_FORM_implicit_const used in an invalid context.",
+ Error::InvalidIndexSectionCount => "Invalid section count in `.dwp` index.",
+ Error::InvalidIndexSlotCount => "Invalid slot count in `.dwp` index.",
+ Error::InvalidIndexRow => "Invalid hash row in `.dwp` index.",
+ Error::UnknownIndexSection => "Unknown section type in `.dwp` index.",
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+impl error::Error for Error {}
+
+#[cfg(feature = "std")]
+impl From<io::Error> for Error {
+ fn from(_: io::Error) -> Self {
+ Error::Io
+ }
+}
+
+/// The result of a parse.
+pub type Result<T> = result::Result<T, Error>;
+
+/// A convenience trait for loading DWARF sections from object files. To be
+/// used like:
+///
+/// ```
+/// use gimli::{DebugInfo, EndianSlice, LittleEndian, Reader, Section};
+///
+/// let buf = [0x00, 0x01, 0x02, 0x03];
+/// let reader = EndianSlice::new(&buf, LittleEndian);
+/// let loader = |name| -> Result<_, ()> { Ok(reader) };
+///
+/// let debug_info: DebugInfo<_> = Section::load(loader).unwrap();
+/// ```
+pub trait Section<R>: From<R> {
+ /// Returns the section id for this type.
+ fn id() -> SectionId;
+
+ /// Returns the ELF section name for this type.
+ fn section_name() -> &'static str {
+ Self::id().name()
+ }
+
+ /// Returns the ELF section name (if any) for this type when used in a dwo
+ /// file.
+ fn dwo_section_name() -> Option<&'static str> {
+ Self::id().dwo_name()
+ }
+
+ /// Try to load the section using the given loader function.
+ fn load<F, E>(f: F) -> core::result::Result<Self, E>
+ where
+ F: FnOnce(SectionId) -> core::result::Result<R, E>,
+ {
+ f(Self::id()).map(From::from)
+ }
+
+ /// Returns the `Reader` for this section.
+ fn reader(&self) -> &R
+ where
+ R: Reader;
+
+ /// Returns the subrange of the section that is the contribution of
+ /// a unit in a `.dwp` file.
+ fn dwp_range(&self, offset: u32, size: u32) -> Result<Self>
+ where
+ R: Reader,
+ {
+ let mut data = self.reader().clone();
+ data.skip(R::Offset::from_u32(offset))?;
+ data.truncate(R::Offset::from_u32(size))?;
+ Ok(data.into())
+ }
+
+ /// Returns the `Reader` for this section.
+ fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<(SectionId, R::Offset)>
+ where
+ R: Reader,
+ {
+ self.reader()
+ .lookup_offset_id(id)
+ .map(|offset| (Self::id(), offset))
+ }
+}
+
+impl Register {
+ pub(crate) fn from_u64(x: u64) -> Result<Register> {
+ let y = x as u16;
+ if u64::from(y) == x {
+ Ok(Register(y))
+ } else {
+ Err(Error::UnsupportedRegister(x))
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::common::Format;
+ use crate::endianity::LittleEndian;
+ use test_assembler::{Endian, Section};
+
+ #[test]
+ fn test_parse_initial_length_32_ok() {
+ let section = Section::with_endian(Endian::Little).L32(0x7856_3412);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_initial_length() {
+ Ok((length, format)) => {
+ assert_eq!(input.len(), 0);
+ assert_eq!(format, Format::Dwarf32);
+ assert_eq!(0x7856_3412, length);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_parse_initial_length_64_ok() {
+ let section = Section::with_endian(Endian::Little)
+ // Dwarf_64_INITIAL_UNIT_LENGTH
+ .L32(0xffff_ffff)
+ // Actual length
+ .L64(0xffde_bc9a_7856_3412);
+ let buf = section.get_contents().unwrap();
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+
+ #[cfg(target_pointer_width = "64")]
+ match input.read_initial_length() {
+ Ok((length, format)) => {
+ assert_eq!(input.len(), 0);
+ assert_eq!(format, Format::Dwarf64);
+ assert_eq!(0xffde_bc9a_7856_3412, length);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+
+ #[cfg(target_pointer_width = "32")]
+ match input.read_initial_length() {
+ Err(Error::UnsupportedOffset) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_initial_length_unknown_reserved_value() {
+ let section = Section::with_endian(Endian::Little).L32(0xffff_fffe);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_initial_length() {
+ Err(Error::UnknownReservedLength) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_initial_length_incomplete() {
+ let buf = [0xff, 0xff, 0xff]; // Need at least 4 bytes.
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_initial_length() {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_initial_length_64_incomplete() {
+ let section = Section::with_endian(Endian::Little)
+ // Dwarf_64_INITIAL_UNIT_LENGTH
+ .L32(0xffff_ffff)
+ // Actual length is not long enough.
+ .L32(0x7856_3412);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_initial_length() {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_offset_32() {
+ let section = Section::with_endian(Endian::Little).L32(0x0123_4567);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_offset(Format::Dwarf32) {
+ Ok(val) => {
+ assert_eq!(input.len(), 0);
+ assert_eq!(val, 0x0123_4567);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_offset_64_small() {
+ let section = Section::with_endian(Endian::Little).L64(0x0123_4567);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_offset(Format::Dwarf64) {
+ Ok(val) => {
+ assert_eq!(input.len(), 0);
+ assert_eq!(val, 0x0123_4567);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_offset_64_large() {
+ let section = Section::with_endian(Endian::Little).L64(0x0123_4567_89ab_cdef);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_offset(Format::Dwarf64) {
+ Ok(val) => {
+ assert_eq!(input.len(), 0);
+ assert_eq!(val, 0x0123_4567_89ab_cdef);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "32")]
+ fn test_parse_offset_64_large() {
+ let section = Section::with_endian(Endian::Little).L64(0x0123_4567_89ab_cdef);
+ let buf = section.get_contents().unwrap();
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ match input.read_offset(Format::Dwarf64) {
+ Err(Error::UnsupportedOffset) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/op.rs b/vendor/gimli-0.26.2/src/read/op.rs
new file mode 100644
index 000000000..88ea20297
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/op.rs
@@ -0,0 +1,4114 @@
+//! Functions for parsing and evaluating DWARF expressions.
+
+#[cfg(feature = "read")]
+use alloc::vec::Vec;
+use core::mem;
+
+use super::util::{ArrayLike, ArrayVec};
+use crate::common::{DebugAddrIndex, DebugInfoOffset, Encoding, Register};
+use crate::constants;
+use crate::read::{Error, Reader, ReaderOffset, Result, StoreOnHeap, UnitOffset, Value, ValueType};
+
+/// A reference to a DIE, either relative to the current CU or
+/// relative to the section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum DieReference<T = usize> {
+ /// A CU-relative reference.
+ UnitRef(UnitOffset<T>),
+ /// A section-relative reference.
+ DebugInfoRef(DebugInfoOffset<T>),
+}
+
+/// A single decoded DWARF expression operation.
+///
+/// DWARF expression evaluation is done in two parts: first the raw
+/// bytes of the next part of the expression are decoded; and then the
+/// decoded operation is evaluated. This approach lets other
+/// consumers inspect the DWARF expression without reimplementing the
+/// decoding operation.
+///
+/// Multiple DWARF opcodes may decode into a single `Operation`. For
+/// example, both `DW_OP_deref` and `DW_OP_xderef` are represented
+/// using `Operation::Deref`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum Operation<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Dereference the topmost value of the stack.
+ Deref {
+ /// The DIE of the base type or 0 to indicate the generic type
+ base_type: UnitOffset<Offset>,
+ /// The size of the data to dereference.
+ size: u8,
+ /// True if the dereference operation takes an address space
+ /// argument from the stack; false otherwise.
+ space: bool,
+ },
+ /// Drop an item from the stack.
+ Drop,
+ /// Pick an item from the stack and push it on top of the stack.
+ /// This operation handles `DW_OP_pick`, `DW_OP_dup`, and
+ /// `DW_OP_over`.
+ Pick {
+ /// The index, from the top of the stack, of the item to copy.
+ index: u8,
+ },
+ /// Swap the top two stack items.
+ Swap,
+ /// Rotate the top three stack items.
+ Rot,
+ /// Take the absolute value of the top of the stack.
+ Abs,
+ /// Bitwise `and` of the top two values on the stack.
+ And,
+ /// Divide the top two values on the stack.
+ Div,
+ /// Subtract the top two values on the stack.
+ Minus,
+ /// Modulus of the top two values on the stack.
+ Mod,
+ /// Multiply the top two values on the stack.
+ Mul,
+ /// Negate the top of the stack.
+ Neg,
+ /// Bitwise `not` of the top of the stack.
+ Not,
+ /// Bitwise `or` of the top two values on the stack.
+ Or,
+ /// Add the top two values on the stack.
+ Plus,
+ /// Add a constant to the topmost value on the stack.
+ PlusConstant {
+ /// The value to add.
+ value: u64,
+ },
+ /// Logical left shift of the 2nd value on the stack by the number
+ /// of bits given by the topmost value on the stack.
+ Shl,
+ /// Right shift of the 2nd value on the stack by the number of
+ /// bits given by the topmost value on the stack.
+ Shr,
+ /// Arithmetic left shift of the 2nd value on the stack by the
+ /// number of bits given by the topmost value on the stack.
+ Shra,
+ /// Bitwise `xor` of the top two values on the stack.
+ Xor,
+ /// Branch to the target location if the top of stack is nonzero.
+ Bra {
+ /// The relative offset to the target bytecode.
+ target: i16,
+ },
+ /// Compare the top two stack values for equality.
+ Eq,
+ /// Compare the top two stack values using `>=`.
+ Ge,
+ /// Compare the top two stack values using `>`.
+ Gt,
+ /// Compare the top two stack values using `<=`.
+ Le,
+ /// Compare the top two stack values using `<`.
+ Lt,
+ /// Compare the top two stack values using `!=`.
+ Ne,
+ /// Unconditional branch to the target location.
+ Skip {
+ /// The relative offset to the target bytecode.
+ target: i16,
+ },
+ /// Push an unsigned constant value on the stack. This handles multiple
+ /// DWARF opcodes.
+ UnsignedConstant {
+ /// The value to push.
+ value: u64,
+ },
+ /// Push a signed constant value on the stack. This handles multiple
+ /// DWARF opcodes.
+ SignedConstant {
+ /// The value to push.
+ value: i64,
+ },
+ /// Indicate that this piece's location is in the given register.
+ ///
+ /// Completes the piece or expression.
+ Register {
+ /// The register number.
+ register: Register,
+ },
+ /// Find the value of the given register, add the offset, and then
+ /// push the resulting sum on the stack.
+ RegisterOffset {
+ /// The register number.
+ register: Register,
+ /// The offset to add.
+ offset: i64,
+ /// The DIE of the base type or 0 to indicate the generic type
+ base_type: UnitOffset<Offset>,
+ },
+ /// Compute the frame base (using `DW_AT_frame_base`), add the
+ /// given offset, and then push the resulting sum on the stack.
+ FrameOffset {
+ /// The offset to add.
+ offset: i64,
+ },
+ /// No operation.
+ Nop,
+ /// Push the object address on the stack.
+ PushObjectAddress,
+ /// Evaluate a DWARF expression as a subroutine. The expression
+ /// comes from the `DW_AT_location` attribute of the indicated
+ /// DIE.
+ Call {
+ /// The DIE to use.
+ offset: DieReference<Offset>,
+ },
+ /// Compute the address of a thread-local variable and push it on
+ /// the stack.
+ TLS,
+ /// Compute the call frame CFA and push it on the stack.
+ CallFrameCFA,
+ /// Terminate a piece.
+ Piece {
+ /// The size of this piece in bits.
+ size_in_bits: u64,
+ /// The bit offset of this piece. If `None`, then this piece
+ /// was specified using `DW_OP_piece` and should start at the
+ /// next byte boundary.
+ bit_offset: Option<u64>,
+ },
+ /// The object has no location, but has a known constant value.
+ ///
+ /// Represents `DW_OP_implicit_value`.
+ /// Completes the piece or expression.
+ ImplicitValue {
+ /// The implicit value to use.
+ data: R,
+ },
+ /// The object has no location, but its value is at the top of the stack.
+ ///
+ /// Represents `DW_OP_stack_value`.
+ /// Completes the piece or expression.
+ StackValue,
+ /// The object is a pointer to a value which has no actual location,
+ /// such as an implicit value or a stack value.
+ ///
+ /// Represents `DW_OP_implicit_pointer`.
+ /// Completes the piece or expression.
+ ImplicitPointer {
+ /// The `.debug_info` offset of the value that this is an implicit pointer into.
+ value: DebugInfoOffset<Offset>,
+ /// The byte offset into the value that the implicit pointer points to.
+ byte_offset: i64,
+ },
+ /// Evaluate an expression at the entry to the current subprogram, and push it on the stack.
+ ///
+ /// Represents `DW_OP_entry_value`.
+ EntryValue {
+ /// The expression to be evaluated.
+ expression: R,
+ },
+ /// This represents a parameter that was optimized out.
+ ///
+ /// The offset points to the definition of the parameter, and is
+ /// matched to the `DW_TAG_GNU_call_site_parameter` in the caller that also
+ /// points to the same definition of the parameter.
+ ///
+ /// Represents `DW_OP_GNU_parameter_ref`.
+ ParameterRef {
+ /// The DIE to use.
+ offset: UnitOffset<Offset>,
+ },
+ /// Relocate the address if needed, and push it on the stack.
+ ///
+ /// Represents `DW_OP_addr`.
+ Address {
+ /// The offset to add.
+ address: u64,
+ },
+ /// Read the address at the given index in `.debug_addr, relocate the address if needed,
+ /// and push it on the stack.
+ ///
+ /// Represents `DW_OP_addrx`.
+ AddressIndex {
+ /// The index of the address in `.debug_addr`.
+ index: DebugAddrIndex<Offset>,
+ },
+ /// Read the address at the given index in `.debug_addr, and push it on the stack.
+ /// Do not relocate the address.
+ ///
+ /// Represents `DW_OP_constx`.
+ ConstantIndex {
+ /// The index of the address in `.debug_addr`.
+ index: DebugAddrIndex<Offset>,
+ },
+ /// Interpret the value bytes as a constant of a given type, and push it on the stack.
+ ///
+ /// Represents `DW_OP_const_type`.
+ TypedLiteral {
+ /// The DIE of the base type.
+ base_type: UnitOffset<Offset>,
+ /// The value bytes.
+ value: R,
+ },
+ /// Pop the top stack entry, convert it to a different type, and push it on the stack.
+ ///
+ /// Represents `DW_OP_convert`.
+ Convert {
+ /// The DIE of the base type.
+ base_type: UnitOffset<Offset>,
+ },
+ /// Pop the top stack entry, reinterpret the bits in its value as a different type,
+ /// and push it on the stack.
+ ///
+ /// Represents `DW_OP_reinterpret`.
+ Reinterpret {
+ /// The DIE of the base type.
+ base_type: UnitOffset<Offset>,
+ },
+ /// The index of a local in the currently executing function.
+ ///
+ /// Represents `DW_OP_WASM_location 0x00`.
+ /// Completes the piece or expression.
+ WasmLocal {
+ /// The index of the local.
+ index: u32,
+ },
+ /// The index of a global.
+ ///
+ /// Represents `DW_OP_WASM_location 0x01` or `DW_OP_WASM_location 0x03`.
+ /// Completes the piece or expression.
+ WasmGlobal {
+ /// The index of the global.
+ index: u32,
+ },
+ /// The index of an item on the operand stack.
+ ///
+ /// Represents `DW_OP_WASM_location 0x02`.
+ /// Completes the piece or expression.
+ WasmStack {
+ /// The index of the stack item. 0 is the bottom of the operand stack.
+ index: u32,
+ },
+}
+
+#[derive(Debug)]
+enum OperationEvaluationResult<R: Reader> {
+ Piece,
+ Incomplete,
+ Complete { location: Location<R> },
+ Waiting(EvaluationWaiting<R>, EvaluationResult<R>),
+}
+
+/// A single location of a piece of the result of a DWARF expression.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Location<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// The piece is empty. Ordinarily this means the piece has been
+ /// optimized away.
+ Empty,
+ /// The piece is found in a register.
+ Register {
+ /// The register number.
+ register: Register,
+ },
+ /// The piece is found in memory.
+ Address {
+ /// The address.
+ address: u64,
+ },
+ /// The piece has no location but its value is known.
+ Value {
+ /// The value.
+ value: Value,
+ },
+ /// The piece is represented by some constant bytes.
+ Bytes {
+ /// The value.
+ value: R,
+ },
+ /// The piece is a pointer to a value which has no actual location.
+ ImplicitPointer {
+ /// The `.debug_info` offset of the value that this is an implicit pointer into.
+ value: DebugInfoOffset<Offset>,
+ /// The byte offset into the value that the implicit pointer points to.
+ byte_offset: i64,
+ },
+}
+
+impl<R, Offset> Location<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Return true if the piece is empty.
+ pub fn is_empty(&self) -> bool {
+ match *self {
+ Location::Empty => true,
+ _ => false,
+ }
+ }
+}
+
+/// The description of a single piece of the result of a DWARF
+/// expression.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub struct Piece<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// If given, the size of the piece in bits. If `None`, there
+ /// must be only one piece whose size is all of the object.
+ pub size_in_bits: Option<u64>,
+ /// If given, the bit offset of the piece within the location.
+ /// If the location is a `Location::Register` or `Location::Value`,
+ /// then this offset is from the least significant bit end of
+ /// the register or value.
+ /// If the location is a `Location::Address` then the offset uses
+ /// the bit numbering and direction conventions of the language
+ /// and target system.
+ ///
+ /// If `None`, the piece starts at the location. If the
+ /// location is a register whose size is larger than the piece,
+ /// then placement within the register is defined by the ABI.
+ pub bit_offset: Option<u64>,
+ /// Where this piece is to be found.
+ pub location: Location<R, Offset>,
+}
+
+// A helper function to handle branch offsets.
+fn compute_pc<R: Reader>(pc: &R, bytecode: &R, offset: i16) -> Result<R> {
+ let pc_offset = pc.offset_from(bytecode);
+ let new_pc_offset = pc_offset.wrapping_add(R::Offset::from_i16(offset));
+ if new_pc_offset > bytecode.len() {
+ Err(Error::BadBranchTarget(new_pc_offset.into_u64()))
+ } else {
+ let mut new_pc = bytecode.clone();
+ new_pc.skip(new_pc_offset)?;
+ Ok(new_pc)
+ }
+}
+
+fn generic_type<O: ReaderOffset>() -> UnitOffset<O> {
+ UnitOffset(O::from_u64(0).unwrap())
+}
+
+impl<R, Offset> Operation<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Parse a single DWARF expression operation.
+ ///
+ /// This is useful when examining a DWARF expression for reasons other
+ /// than direct evaluation.
+ ///
+ /// `bytes` points to a the operation to decode. It should point into
+ /// the same array as `bytecode`, which should be the entire
+ /// expression.
+ pub fn parse(bytes: &mut R, encoding: Encoding) -> Result<Operation<R, Offset>> {
+ let opcode = bytes.read_u8()?;
+ let name = constants::DwOp(opcode);
+ match name {
+ constants::DW_OP_addr => {
+ let address = bytes.read_address(encoding.address_size)?;
+ Ok(Operation::Address { address })
+ }
+ constants::DW_OP_deref => Ok(Operation::Deref {
+ base_type: generic_type(),
+ size: encoding.address_size,
+ space: false,
+ }),
+ constants::DW_OP_const1u => {
+ let value = bytes.read_u8()?;
+ Ok(Operation::UnsignedConstant {
+ value: u64::from(value),
+ })
+ }
+ constants::DW_OP_const1s => {
+ let value = bytes.read_i8()?;
+ Ok(Operation::SignedConstant {
+ value: i64::from(value),
+ })
+ }
+ constants::DW_OP_const2u => {
+ let value = bytes.read_u16()?;
+ Ok(Operation::UnsignedConstant {
+ value: u64::from(value),
+ })
+ }
+ constants::DW_OP_const2s => {
+ let value = bytes.read_i16()?;
+ Ok(Operation::SignedConstant {
+ value: i64::from(value),
+ })
+ }
+ constants::DW_OP_const4u => {
+ let value = bytes.read_u32()?;
+ Ok(Operation::UnsignedConstant {
+ value: u64::from(value),
+ })
+ }
+ constants::DW_OP_const4s => {
+ let value = bytes.read_i32()?;
+ Ok(Operation::SignedConstant {
+ value: i64::from(value),
+ })
+ }
+ constants::DW_OP_const8u => {
+ let value = bytes.read_u64()?;
+ Ok(Operation::UnsignedConstant { value })
+ }
+ constants::DW_OP_const8s => {
+ let value = bytes.read_i64()?;
+ Ok(Operation::SignedConstant { value })
+ }
+ constants::DW_OP_constu => {
+ let value = bytes.read_uleb128()?;
+ Ok(Operation::UnsignedConstant { value })
+ }
+ constants::DW_OP_consts => {
+ let value = bytes.read_sleb128()?;
+ Ok(Operation::SignedConstant { value })
+ }
+ constants::DW_OP_dup => Ok(Operation::Pick { index: 0 }),
+ constants::DW_OP_drop => Ok(Operation::Drop),
+ constants::DW_OP_over => Ok(Operation::Pick { index: 1 }),
+ constants::DW_OP_pick => {
+ let value = bytes.read_u8()?;
+ Ok(Operation::Pick { index: value })
+ }
+ constants::DW_OP_swap => Ok(Operation::Swap),
+ constants::DW_OP_rot => Ok(Operation::Rot),
+ constants::DW_OP_xderef => Ok(Operation::Deref {
+ base_type: generic_type(),
+ size: encoding.address_size,
+ space: true,
+ }),
+ constants::DW_OP_abs => Ok(Operation::Abs),
+ constants::DW_OP_and => Ok(Operation::And),
+ constants::DW_OP_div => Ok(Operation::Div),
+ constants::DW_OP_minus => Ok(Operation::Minus),
+ constants::DW_OP_mod => Ok(Operation::Mod),
+ constants::DW_OP_mul => Ok(Operation::Mul),
+ constants::DW_OP_neg => Ok(Operation::Neg),
+ constants::DW_OP_not => Ok(Operation::Not),
+ constants::DW_OP_or => Ok(Operation::Or),
+ constants::DW_OP_plus => Ok(Operation::Plus),
+ constants::DW_OP_plus_uconst => {
+ let value = bytes.read_uleb128()?;
+ Ok(Operation::PlusConstant { value })
+ }
+ constants::DW_OP_shl => Ok(Operation::Shl),
+ constants::DW_OP_shr => Ok(Operation::Shr),
+ constants::DW_OP_shra => Ok(Operation::Shra),
+ constants::DW_OP_xor => Ok(Operation::Xor),
+ constants::DW_OP_bra => {
+ let target = bytes.read_i16()?;
+ Ok(Operation::Bra { target })
+ }
+ constants::DW_OP_eq => Ok(Operation::Eq),
+ constants::DW_OP_ge => Ok(Operation::Ge),
+ constants::DW_OP_gt => Ok(Operation::Gt),
+ constants::DW_OP_le => Ok(Operation::Le),
+ constants::DW_OP_lt => Ok(Operation::Lt),
+ constants::DW_OP_ne => Ok(Operation::Ne),
+ constants::DW_OP_skip => {
+ let target = bytes.read_i16()?;
+ Ok(Operation::Skip { target })
+ }
+ constants::DW_OP_lit0
+ | constants::DW_OP_lit1
+ | constants::DW_OP_lit2
+ | constants::DW_OP_lit3
+ | constants::DW_OP_lit4
+ | constants::DW_OP_lit5
+ | constants::DW_OP_lit6
+ | constants::DW_OP_lit7
+ | constants::DW_OP_lit8
+ | constants::DW_OP_lit9
+ | constants::DW_OP_lit10
+ | constants::DW_OP_lit11
+ | constants::DW_OP_lit12
+ | constants::DW_OP_lit13
+ | constants::DW_OP_lit14
+ | constants::DW_OP_lit15
+ | constants::DW_OP_lit16
+ | constants::DW_OP_lit17
+ | constants::DW_OP_lit18
+ | constants::DW_OP_lit19
+ | constants::DW_OP_lit20
+ | constants::DW_OP_lit21
+ | constants::DW_OP_lit22
+ | constants::DW_OP_lit23
+ | constants::DW_OP_lit24
+ | constants::DW_OP_lit25
+ | constants::DW_OP_lit26
+ | constants::DW_OP_lit27
+ | constants::DW_OP_lit28
+ | constants::DW_OP_lit29
+ | constants::DW_OP_lit30
+ | constants::DW_OP_lit31 => Ok(Operation::UnsignedConstant {
+ value: (opcode - constants::DW_OP_lit0.0).into(),
+ }),
+ constants::DW_OP_reg0
+ | constants::DW_OP_reg1
+ | constants::DW_OP_reg2
+ | constants::DW_OP_reg3
+ | constants::DW_OP_reg4
+ | constants::DW_OP_reg5
+ | constants::DW_OP_reg6
+ | constants::DW_OP_reg7
+ | constants::DW_OP_reg8
+ | constants::DW_OP_reg9
+ | constants::DW_OP_reg10
+ | constants::DW_OP_reg11
+ | constants::DW_OP_reg12
+ | constants::DW_OP_reg13
+ | constants::DW_OP_reg14
+ | constants::DW_OP_reg15
+ | constants::DW_OP_reg16
+ | constants::DW_OP_reg17
+ | constants::DW_OP_reg18
+ | constants::DW_OP_reg19
+ | constants::DW_OP_reg20
+ | constants::DW_OP_reg21
+ | constants::DW_OP_reg22
+ | constants::DW_OP_reg23
+ | constants::DW_OP_reg24
+ | constants::DW_OP_reg25
+ | constants::DW_OP_reg26
+ | constants::DW_OP_reg27
+ | constants::DW_OP_reg28
+ | constants::DW_OP_reg29
+ | constants::DW_OP_reg30
+ | constants::DW_OP_reg31 => Ok(Operation::Register {
+ register: Register((opcode - constants::DW_OP_reg0.0).into()),
+ }),
+ constants::DW_OP_breg0
+ | constants::DW_OP_breg1
+ | constants::DW_OP_breg2
+ | constants::DW_OP_breg3
+ | constants::DW_OP_breg4
+ | constants::DW_OP_breg5
+ | constants::DW_OP_breg6
+ | constants::DW_OP_breg7
+ | constants::DW_OP_breg8
+ | constants::DW_OP_breg9
+ | constants::DW_OP_breg10
+ | constants::DW_OP_breg11
+ | constants::DW_OP_breg12
+ | constants::DW_OP_breg13
+ | constants::DW_OP_breg14
+ | constants::DW_OP_breg15
+ | constants::DW_OP_breg16
+ | constants::DW_OP_breg17
+ | constants::DW_OP_breg18
+ | constants::DW_OP_breg19
+ | constants::DW_OP_breg20
+ | constants::DW_OP_breg21
+ | constants::DW_OP_breg22
+ | constants::DW_OP_breg23
+ | constants::DW_OP_breg24
+ | constants::DW_OP_breg25
+ | constants::DW_OP_breg26
+ | constants::DW_OP_breg27
+ | constants::DW_OP_breg28
+ | constants::DW_OP_breg29
+ | constants::DW_OP_breg30
+ | constants::DW_OP_breg31 => {
+ let value = bytes.read_sleb128()?;
+ Ok(Operation::RegisterOffset {
+ register: Register((opcode - constants::DW_OP_breg0.0).into()),
+ offset: value,
+ base_type: generic_type(),
+ })
+ }
+ constants::DW_OP_regx => {
+ let register = bytes.read_uleb128().and_then(Register::from_u64)?;
+ Ok(Operation::Register { register })
+ }
+ constants::DW_OP_fbreg => {
+ let value = bytes.read_sleb128()?;
+ Ok(Operation::FrameOffset { offset: value })
+ }
+ constants::DW_OP_bregx => {
+ let register = bytes.read_uleb128().and_then(Register::from_u64)?;
+ let offset = bytes.read_sleb128()?;
+ Ok(Operation::RegisterOffset {
+ register,
+ offset,
+ base_type: generic_type(),
+ })
+ }
+ constants::DW_OP_piece => {
+ let size = bytes.read_uleb128()?;
+ Ok(Operation::Piece {
+ size_in_bits: 8 * size,
+ bit_offset: None,
+ })
+ }
+ constants::DW_OP_deref_size => {
+ let size = bytes.read_u8()?;
+ Ok(Operation::Deref {
+ base_type: generic_type(),
+ size,
+ space: false,
+ })
+ }
+ constants::DW_OP_xderef_size => {
+ let size = bytes.read_u8()?;
+ Ok(Operation::Deref {
+ base_type: generic_type(),
+ size,
+ space: true,
+ })
+ }
+ constants::DW_OP_nop => Ok(Operation::Nop),
+ constants::DW_OP_push_object_address => Ok(Operation::PushObjectAddress),
+ constants::DW_OP_call2 => {
+ let value = bytes.read_u16().map(R::Offset::from_u16)?;
+ Ok(Operation::Call {
+ offset: DieReference::UnitRef(UnitOffset(value)),
+ })
+ }
+ constants::DW_OP_call4 => {
+ let value = bytes.read_u32().map(R::Offset::from_u32)?;
+ Ok(Operation::Call {
+ offset: DieReference::UnitRef(UnitOffset(value)),
+ })
+ }
+ constants::DW_OP_call_ref => {
+ let value = bytes.read_offset(encoding.format)?;
+ Ok(Operation::Call {
+ offset: DieReference::DebugInfoRef(DebugInfoOffset(value)),
+ })
+ }
+ constants::DW_OP_form_tls_address | constants::DW_OP_GNU_push_tls_address => {
+ Ok(Operation::TLS)
+ }
+ constants::DW_OP_call_frame_cfa => Ok(Operation::CallFrameCFA),
+ constants::DW_OP_bit_piece => {
+ let size = bytes.read_uleb128()?;
+ let offset = bytes.read_uleb128()?;
+ Ok(Operation::Piece {
+ size_in_bits: size,
+ bit_offset: Some(offset),
+ })
+ }
+ constants::DW_OP_implicit_value => {
+ let len = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ let data = bytes.split(len)?;
+ Ok(Operation::ImplicitValue { data })
+ }
+ constants::DW_OP_stack_value => Ok(Operation::StackValue),
+ constants::DW_OP_implicit_pointer | constants::DW_OP_GNU_implicit_pointer => {
+ let value = if encoding.version == 2 {
+ bytes
+ .read_address(encoding.address_size)
+ .and_then(Offset::from_u64)?
+ } else {
+ bytes.read_offset(encoding.format)?
+ };
+ let byte_offset = bytes.read_sleb128()?;
+ Ok(Operation::ImplicitPointer {
+ value: DebugInfoOffset(value),
+ byte_offset,
+ })
+ }
+ constants::DW_OP_addrx | constants::DW_OP_GNU_addr_index => {
+ let index = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::AddressIndex {
+ index: DebugAddrIndex(index),
+ })
+ }
+ constants::DW_OP_constx | constants::DW_OP_GNU_const_index => {
+ let index = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::ConstantIndex {
+ index: DebugAddrIndex(index),
+ })
+ }
+ constants::DW_OP_entry_value | constants::DW_OP_GNU_entry_value => {
+ let len = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ let expression = bytes.split(len)?;
+ Ok(Operation::EntryValue { expression })
+ }
+ constants::DW_OP_GNU_parameter_ref => {
+ let value = bytes.read_u32().map(R::Offset::from_u32)?;
+ Ok(Operation::ParameterRef {
+ offset: UnitOffset(value),
+ })
+ }
+ constants::DW_OP_const_type | constants::DW_OP_GNU_const_type => {
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ let len = bytes.read_u8()?;
+ let value = bytes.split(R::Offset::from_u8(len))?;
+ Ok(Operation::TypedLiteral {
+ base_type: UnitOffset(base_type),
+ value,
+ })
+ }
+ constants::DW_OP_regval_type | constants::DW_OP_GNU_regval_type => {
+ let register = bytes.read_uleb128().and_then(Register::from_u64)?;
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::RegisterOffset {
+ register,
+ offset: 0,
+ base_type: UnitOffset(base_type),
+ })
+ }
+ constants::DW_OP_deref_type | constants::DW_OP_GNU_deref_type => {
+ let size = bytes.read_u8()?;
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::Deref {
+ base_type: UnitOffset(base_type),
+ size,
+ space: false,
+ })
+ }
+ constants::DW_OP_xderef_type => {
+ let size = bytes.read_u8()?;
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::Deref {
+ base_type: UnitOffset(base_type),
+ size,
+ space: true,
+ })
+ }
+ constants::DW_OP_convert | constants::DW_OP_GNU_convert => {
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::Convert {
+ base_type: UnitOffset(base_type),
+ })
+ }
+ constants::DW_OP_reinterpret | constants::DW_OP_GNU_reinterpret => {
+ let base_type = bytes.read_uleb128().and_then(R::Offset::from_u64)?;
+ Ok(Operation::Reinterpret {
+ base_type: UnitOffset(base_type),
+ })
+ }
+ constants::DW_OP_WASM_location => match bytes.read_u8()? {
+ 0x0 => {
+ let index = bytes.read_uleb128_u32()?;
+ Ok(Operation::WasmLocal { index })
+ }
+ 0x1 => {
+ let index = bytes.read_uleb128_u32()?;
+ Ok(Operation::WasmGlobal { index })
+ }
+ 0x2 => {
+ let index = bytes.read_uleb128_u32()?;
+ Ok(Operation::WasmStack { index })
+ }
+ 0x3 => {
+ let index = bytes.read_u32()?;
+ Ok(Operation::WasmGlobal { index })
+ }
+ _ => Err(Error::InvalidExpression(name)),
+ },
+ _ => Err(Error::InvalidExpression(name)),
+ }
+ }
+}
+
+#[derive(Debug)]
+enum EvaluationState<R: Reader> {
+ Start(Option<u64>),
+ Ready,
+ Error(Error),
+ Complete,
+ Waiting(EvaluationWaiting<R>),
+}
+
+#[derive(Debug)]
+enum EvaluationWaiting<R: Reader> {
+ Memory,
+ Register { offset: i64 },
+ FrameBase { offset: i64 },
+ Tls,
+ Cfa,
+ AtLocation,
+ EntryValue,
+ ParameterRef,
+ RelocatedAddress,
+ IndexedAddress,
+ TypedLiteral { value: R },
+ Convert,
+ Reinterpret,
+}
+
+/// The state of an `Evaluation` after evaluating a DWARF expression.
+/// The evaluation is either `Complete`, or it requires more data
+/// to continue, as described by the variant.
+#[derive(Debug, PartialEq)]
+pub enum EvaluationResult<R: Reader> {
+ /// The `Evaluation` is complete, and `Evaluation::result()` can be called.
+ Complete,
+ /// The `Evaluation` needs a value from memory to proceed further. Once the
+ /// caller determines what value to provide it should resume the `Evaluation`
+ /// by calling `Evaluation::resume_with_memory`.
+ RequiresMemory {
+ /// The address of the value required.
+ address: u64,
+ /// The size of the value required. This is guaranteed to be at most the
+ /// word size of the target architecture.
+ size: u8,
+ /// If not `None`, a target-specific address space value.
+ space: Option<u64>,
+ /// The DIE of the base type or 0 to indicate the generic type
+ base_type: UnitOffset<R::Offset>,
+ },
+ /// The `Evaluation` needs a value from a register to proceed further. Once
+ /// the caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_register`.
+ RequiresRegister {
+ /// The register number.
+ register: Register,
+ /// The DIE of the base type or 0 to indicate the generic type
+ base_type: UnitOffset<R::Offset>,
+ },
+ /// The `Evaluation` needs the frame base address to proceed further. Once
+ /// the caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_frame_base`. The frame
+ /// base address is the address produced by the location description in the
+ /// `DW_AT_frame_base` attribute of the current function.
+ RequiresFrameBase,
+ /// The `Evaluation` needs a value from TLS to proceed further. Once the
+ /// caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_tls`.
+ RequiresTls(u64),
+ /// The `Evaluation` needs the CFA to proceed further. Once the caller
+ /// determines what value to provide it should resume the `Evaluation` by
+ /// calling `Evaluation::resume_with_call_frame_cfa`.
+ RequiresCallFrameCfa,
+ /// The `Evaluation` needs the DWARF expression at the given location to
+ /// proceed further. Once the caller determines what value to provide it
+ /// should resume the `Evaluation` by calling
+ /// `Evaluation::resume_with_at_location`.
+ RequiresAtLocation(DieReference<R::Offset>),
+ /// The `Evaluation` needs the value produced by evaluating a DWARF
+ /// expression at the entry point of the current subprogram. Once the
+ /// caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_entry_value`.
+ RequiresEntryValue(Expression<R>),
+ /// The `Evaluation` needs the value of the parameter at the given location
+ /// in the current function's caller. Once the caller determines what value
+ /// to provide it should resume the `Evaluation` by calling
+ /// `Evaluation::resume_with_parameter_ref`.
+ RequiresParameterRef(UnitOffset<R::Offset>),
+ /// The `Evaluation` needs an address to be relocated to proceed further.
+ /// Once the caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_relocated_address`.
+ RequiresRelocatedAddress(u64),
+ /// The `Evaluation` needs an address from the `.debug_addr` section.
+ /// This address may also need to be relocated.
+ /// Once the caller determines what value to provide it should resume the
+ /// `Evaluation` by calling `Evaluation::resume_with_indexed_address`.
+ RequiresIndexedAddress {
+ /// The index of the address in the `.debug_addr` section,
+ /// relative to the `DW_AT_addr_base` of the compilation unit.
+ index: DebugAddrIndex<R::Offset>,
+ /// Whether the address also needs to be relocated.
+ relocate: bool,
+ },
+ /// The `Evaluation` needs the `ValueType` for the base type DIE at
+ /// the give unit offset. Once the caller determines what value to provide it
+ /// should resume the `Evaluation` by calling
+ /// `Evaluation::resume_with_base_type`.
+ RequiresBaseType(UnitOffset<R::Offset>),
+}
+
+/// The bytecode for a DWARF expression or location description.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Expression<R: Reader>(pub R);
+
+impl<R: Reader> Expression<R> {
+ /// Create an evaluation for this expression.
+ ///
+ /// The `encoding` is determined by the
+ /// [`CompilationUnitHeader`](struct.CompilationUnitHeader.html) or
+ /// [`TypeUnitHeader`](struct.TypeUnitHeader.html) that this expression
+ /// relates to.
+ ///
+ /// # Examples
+ /// ```rust,no_run
+ /// use gimli::Expression;
+ /// # let endian = gimli::LittleEndian;
+ /// # let debug_info = gimli::DebugInfo::from(gimli::EndianSlice::new(&[], endian));
+ /// # let unit = debug_info.units().next().unwrap().unwrap();
+ /// # let bytecode = gimli::EndianSlice::new(&[], endian);
+ /// let expression = gimli::Expression(bytecode);
+ /// let mut eval = expression.evaluation(unit.encoding());
+ /// let mut result = eval.evaluate().unwrap();
+ /// ```
+ #[cfg(feature = "read")]
+ #[inline]
+ pub fn evaluation(self, encoding: Encoding) -> Evaluation<R> {
+ Evaluation::new(self.0, encoding)
+ }
+
+ /// Return an iterator for the operations in the expression.
+ pub fn operations(self, encoding: Encoding) -> OperationIter<R> {
+ OperationIter {
+ input: self.0,
+ encoding,
+ }
+ }
+}
+
+/// An iterator for the operations in an expression.
+#[derive(Debug, Clone, Copy)]
+pub struct OperationIter<R: Reader> {
+ input: R,
+ encoding: Encoding,
+}
+
+impl<R: Reader> OperationIter<R> {
+ /// Read the next operation in an expression.
+ pub fn next(&mut self) -> Result<Option<Operation<R>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+ match Operation::parse(&mut self.input, self.encoding) {
+ Ok(op) => Ok(Some(op)),
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+
+ /// Return the current byte offset of the iterator.
+ pub fn offset_from(&self, expression: &Expression<R>) -> R::Offset {
+ self.input.offset_from(&expression.0)
+ }
+}
+
+/// Specification of what storage should be used for [`Evaluation`].
+///
+#[cfg_attr(
+ feature = "read",
+ doc = "
+Normally you would only need to use [`StoreOnHeap`], which places the stacks and the results
+on the heap using [`Vec`]. This is the default storage type parameter for [`Evaluation`].
+"
+)]
+///
+/// If you need to avoid [`Evaluation`] from allocating memory, e.g. for signal safety,
+/// you can provide you own storage specification:
+/// ```rust,no_run
+/// # use gimli::*;
+/// # let bytecode = EndianSlice::new(&[], LittleEndian);
+/// # let encoding = unimplemented!();
+/// # let get_register_value = |_, _| Value::Generic(42);
+/// # let get_frame_base = || 0xdeadbeef;
+/// #
+/// struct StoreOnStack;
+///
+/// impl<R: Reader> EvaluationStorage<R> for StoreOnStack {
+/// type Stack = [Value; 64];
+/// type ExpressionStack = [(R, R); 4];
+/// type Result = [Piece<R>; 1];
+/// }
+///
+/// let mut eval = Evaluation::<_, StoreOnStack>::new_in(bytecode, encoding);
+/// let mut result = eval.evaluate().unwrap();
+/// while result != EvaluationResult::Complete {
+/// match result {
+/// EvaluationResult::RequiresRegister { register, base_type } => {
+/// let value = get_register_value(register, base_type);
+/// result = eval.resume_with_register(value).unwrap();
+/// },
+/// EvaluationResult::RequiresFrameBase => {
+/// let frame_base = get_frame_base();
+/// result = eval.resume_with_frame_base(frame_base).unwrap();
+/// },
+/// _ => unimplemented!(),
+/// };
+/// }
+///
+/// let result = eval.as_result();
+/// println!("{:?}", result);
+/// ```
+pub trait EvaluationStorage<R: Reader> {
+ /// The storage used for the evaluation stack.
+ type Stack: ArrayLike<Item = Value>;
+ /// The storage used for the expression stack.
+ type ExpressionStack: ArrayLike<Item = (R, R)>;
+ /// The storage used for the results.
+ type Result: ArrayLike<Item = Piece<R>>;
+}
+
+#[cfg(feature = "read")]
+impl<R: Reader> EvaluationStorage<R> for StoreOnHeap {
+ type Stack = Vec<Value>;
+ type ExpressionStack = Vec<(R, R)>;
+ type Result = Vec<Piece<R>>;
+}
+
+/// A DWARF expression evaluator.
+///
+/// # Usage
+/// A DWARF expression may require additional data to produce a final result,
+/// such as the value of a register or a memory location. Once initial setup
+/// is complete (i.e. `set_initial_value()`, `set_object_address()`) the
+/// consumer calls the `evaluate()` method. That returns an `EvaluationResult`,
+/// which is either `EvaluationResult::Complete` or a value indicating what
+/// data is needed to resume the `Evaluation`. The consumer is responsible for
+/// producing that data and resuming the computation with the correct method,
+/// as documented for `EvaluationResult`. Only once an `EvaluationResult::Complete`
+/// is returned can the consumer call `result()`.
+///
+/// This design allows the consumer of `Evaluation` to decide how and when to
+/// produce the required data and resume the computation. The `Evaluation` can
+/// be driven synchronously (as shown below) or by some asynchronous mechanism
+/// such as futures.
+///
+/// # Examples
+/// ```rust,no_run
+/// use gimli::{EndianSlice, Evaluation, EvaluationResult, Format, LittleEndian, Value};
+/// # let bytecode = EndianSlice::new(&[], LittleEndian);
+/// # let encoding = unimplemented!();
+/// # let get_register_value = |_, _| Value::Generic(42);
+/// # let get_frame_base = || 0xdeadbeef;
+///
+/// let mut eval = Evaluation::new(bytecode, encoding);
+/// let mut result = eval.evaluate().unwrap();
+/// while result != EvaluationResult::Complete {
+/// match result {
+/// EvaluationResult::RequiresRegister { register, base_type } => {
+/// let value = get_register_value(register, base_type);
+/// result = eval.resume_with_register(value).unwrap();
+/// },
+/// EvaluationResult::RequiresFrameBase => {
+/// let frame_base = get_frame_base();
+/// result = eval.resume_with_frame_base(frame_base).unwrap();
+/// },
+/// _ => unimplemented!(),
+/// };
+/// }
+///
+/// let result = eval.result();
+/// println!("{:?}", result);
+/// ```
+#[derive(Debug)]
+pub struct Evaluation<R: Reader, S: EvaluationStorage<R> = StoreOnHeap> {
+ bytecode: R,
+ encoding: Encoding,
+ object_address: Option<u64>,
+ max_iterations: Option<u32>,
+ iteration: u32,
+ state: EvaluationState<R>,
+
+ // Stack operations are done on word-sized values. We do all
+ // operations on 64-bit values, and then mask the results
+ // appropriately when popping.
+ addr_mask: u64,
+
+ // The stack.
+ stack: ArrayVec<S::Stack>,
+
+ // The next operation to decode and evaluate.
+ pc: R,
+
+ // If we see a DW_OP_call* operation, the previous PC and bytecode
+ // is stored here while evaluating the subroutine.
+ expression_stack: ArrayVec<S::ExpressionStack>,
+
+ result: ArrayVec<S::Result>,
+}
+
+#[cfg(feature = "read")]
+impl<R: Reader> Evaluation<R> {
+ /// Create a new DWARF expression evaluator.
+ ///
+ /// The new evaluator is created without an initial value, without
+ /// an object address, and without a maximum number of iterations.
+ pub fn new(bytecode: R, encoding: Encoding) -> Self {
+ Self::new_in(bytecode, encoding)
+ }
+
+ /// Get the result of this `Evaluation`.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` has not been driven to completion.
+ pub fn result(self) -> Vec<Piece<R>> {
+ match self.state {
+ EvaluationState::Complete => self.result.into_vec(),
+ _ => {
+ panic!("Called `Evaluation::result` on an `Evaluation` that has not been completed")
+ }
+ }
+ }
+}
+
+impl<R: Reader, S: EvaluationStorage<R>> Evaluation<R, S> {
+ /// Create a new DWARF expression evaluator.
+ ///
+ /// The new evaluator is created without an initial value, without
+ /// an object address, and without a maximum number of iterations.
+ pub fn new_in(bytecode: R, encoding: Encoding) -> Self {
+ let pc = bytecode.clone();
+ Evaluation {
+ bytecode,
+ encoding,
+ object_address: None,
+ max_iterations: None,
+ iteration: 0,
+ state: EvaluationState::Start(None),
+ addr_mask: if encoding.address_size == 8 {
+ !0u64
+ } else {
+ (1 << (8 * u64::from(encoding.address_size))) - 1
+ },
+ stack: Default::default(),
+ expression_stack: Default::default(),
+ pc,
+ result: Default::default(),
+ }
+ }
+
+ /// Set an initial value to be pushed on the DWARF expression
+ /// evaluator's stack. This can be used in cases like
+ /// `DW_AT_vtable_elem_location`, which require a value on the
+ /// stack before evaluation commences. If no initial value is
+ /// set, and the expression uses an opcode requiring the initial
+ /// value, then evaluation will fail with an error.
+ ///
+ /// # Panics
+ /// Panics if `set_initial_value()` has already been called, or if
+ /// `evaluate()` has already been called.
+ pub fn set_initial_value(&mut self, value: u64) {
+ match self.state {
+ EvaluationState::Start(None) => {
+ self.state = EvaluationState::Start(Some(value));
+ }
+ _ => panic!(
+ "`Evaluation::set_initial_value` was called twice, or after evaluation began."
+ ),
+ };
+ }
+
+ /// Set the enclosing object's address, as used by
+ /// `DW_OP_push_object_address`. If no object address is set, and
+ /// the expression uses an opcode requiring the object address,
+ /// then evaluation will fail with an error.
+ pub fn set_object_address(&mut self, value: u64) {
+ self.object_address = Some(value);
+ }
+
+ /// Set the maximum number of iterations to be allowed by the
+ /// expression evaluator.
+ ///
+ /// An iteration corresponds approximately to the evaluation of a
+ /// single operation in an expression ("approximately" because the
+ /// implementation may allow two such operations in some cases).
+ /// The default is not to have a maximum; once set, it's not
+ /// possible to go back to this default state. This value can be
+ /// set to avoid denial of service attacks by bad DWARF bytecode.
+ pub fn set_max_iterations(&mut self, value: u32) {
+ self.max_iterations = Some(value);
+ }
+
+ fn pop(&mut self) -> Result<Value> {
+ match self.stack.pop() {
+ Some(value) => Ok(value),
+ None => Err(Error::NotEnoughStackItems),
+ }
+ }
+
+ fn push(&mut self, value: Value) -> Result<()> {
+ self.stack.try_push(value).map_err(|_| Error::StackFull)
+ }
+
+ #[allow(clippy::cyclomatic_complexity)]
+ fn evaluate_one_operation(&mut self) -> Result<OperationEvaluationResult<R>> {
+ let operation = Operation::parse(&mut self.pc, self.encoding)?;
+
+ match operation {
+ Operation::Deref {
+ base_type,
+ size,
+ space,
+ } => {
+ let entry = self.pop()?;
+ let addr = entry.to_u64(self.addr_mask)?;
+ let addr_space = if space {
+ let entry = self.pop()?;
+ let value = entry.to_u64(self.addr_mask)?;
+ Some(value)
+ } else {
+ None
+ };
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Memory,
+ EvaluationResult::RequiresMemory {
+ address: addr,
+ size,
+ space: addr_space,
+ base_type,
+ },
+ ));
+ }
+
+ Operation::Drop => {
+ self.pop()?;
+ }
+ Operation::Pick { index } => {
+ let len = self.stack.len();
+ let index = index as usize;
+ if index >= len {
+ return Err(Error::NotEnoughStackItems);
+ }
+ let value = self.stack[len - index - 1];
+ self.push(value)?;
+ }
+ Operation::Swap => {
+ let top = self.pop()?;
+ let next = self.pop()?;
+ self.push(top)?;
+ self.push(next)?;
+ }
+ Operation::Rot => {
+ let one = self.pop()?;
+ let two = self.pop()?;
+ let three = self.pop()?;
+ self.push(one)?;
+ self.push(three)?;
+ self.push(two)?;
+ }
+
+ Operation::Abs => {
+ let value = self.pop()?;
+ let result = value.abs(self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::And => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.and(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Div => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.div(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Minus => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.sub(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Mod => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.rem(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Mul => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.mul(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Neg => {
+ let v = self.pop()?;
+ let result = v.neg(self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Not => {
+ let value = self.pop()?;
+ let result = value.not(self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Or => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.or(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Plus => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.add(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::PlusConstant { value } => {
+ let lhs = self.pop()?;
+ let rhs = Value::from_u64(lhs.value_type(), value)?;
+ let result = lhs.add(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Shl => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.shl(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Shr => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.shr(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Shra => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.shra(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Xor => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.xor(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+
+ Operation::Bra { target } => {
+ let entry = self.pop()?;
+ let v = entry.to_u64(self.addr_mask)?;
+ if v != 0 {
+ self.pc = compute_pc(&self.pc, &self.bytecode, target)?;
+ }
+ }
+
+ Operation::Eq => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.eq(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Ge => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.ge(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Gt => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.gt(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Le => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.le(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Lt => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.lt(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+ Operation::Ne => {
+ let rhs = self.pop()?;
+ let lhs = self.pop()?;
+ let result = lhs.ne(rhs, self.addr_mask)?;
+ self.push(result)?;
+ }
+
+ Operation::Skip { target } => {
+ self.pc = compute_pc(&self.pc, &self.bytecode, target)?;
+ }
+
+ Operation::UnsignedConstant { value } => {
+ self.push(Value::Generic(value))?;
+ }
+
+ Operation::SignedConstant { value } => {
+ self.push(Value::Generic(value as u64))?;
+ }
+
+ Operation::RegisterOffset {
+ register,
+ offset,
+ base_type,
+ } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Register { offset },
+ EvaluationResult::RequiresRegister {
+ register,
+ base_type,
+ },
+ ));
+ }
+
+ Operation::FrameOffset { offset } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::FrameBase { offset },
+ EvaluationResult::RequiresFrameBase,
+ ));
+ }
+
+ Operation::Nop => {}
+
+ Operation::PushObjectAddress => {
+ if let Some(value) = self.object_address {
+ self.push(Value::Generic(value))?;
+ } else {
+ return Err(Error::InvalidPushObjectAddress);
+ }
+ }
+
+ Operation::Call { offset } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::AtLocation,
+ EvaluationResult::RequiresAtLocation(offset),
+ ));
+ }
+
+ Operation::TLS => {
+ let entry = self.pop()?;
+ let index = entry.to_u64(self.addr_mask)?;
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Tls,
+ EvaluationResult::RequiresTls(index),
+ ));
+ }
+
+ Operation::CallFrameCFA => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Cfa,
+ EvaluationResult::RequiresCallFrameCfa,
+ ));
+ }
+
+ Operation::Register { register } => {
+ let location = Location::Register { register };
+ return Ok(OperationEvaluationResult::Complete { location });
+ }
+
+ Operation::ImplicitValue { ref data } => {
+ let location = Location::Bytes {
+ value: data.clone(),
+ };
+ return Ok(OperationEvaluationResult::Complete { location });
+ }
+
+ Operation::StackValue => {
+ let value = self.pop()?;
+ let location = Location::Value { value };
+ return Ok(OperationEvaluationResult::Complete { location });
+ }
+
+ Operation::ImplicitPointer { value, byte_offset } => {
+ let location = Location::ImplicitPointer { value, byte_offset };
+ return Ok(OperationEvaluationResult::Complete { location });
+ }
+
+ Operation::EntryValue { ref expression } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::EntryValue,
+ EvaluationResult::RequiresEntryValue(Expression(expression.clone())),
+ ));
+ }
+
+ Operation::ParameterRef { offset } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::ParameterRef,
+ EvaluationResult::RequiresParameterRef(offset),
+ ));
+ }
+
+ Operation::Address { address } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::RelocatedAddress,
+ EvaluationResult::RequiresRelocatedAddress(address),
+ ));
+ }
+
+ Operation::AddressIndex { index } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::IndexedAddress,
+ EvaluationResult::RequiresIndexedAddress {
+ index,
+ relocate: true,
+ },
+ ));
+ }
+
+ Operation::ConstantIndex { index } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::IndexedAddress,
+ EvaluationResult::RequiresIndexedAddress {
+ index,
+ relocate: false,
+ },
+ ));
+ }
+
+ Operation::Piece {
+ size_in_bits,
+ bit_offset,
+ } => {
+ let location = if self.stack.is_empty() {
+ Location::Empty
+ } else {
+ let entry = self.pop()?;
+ let address = entry.to_u64(self.addr_mask)?;
+ Location::Address { address }
+ };
+ self.result
+ .try_push(Piece {
+ size_in_bits: Some(size_in_bits),
+ bit_offset,
+ location,
+ })
+ .map_err(|_| Error::StackFull)?;
+ return Ok(OperationEvaluationResult::Piece);
+ }
+
+ Operation::TypedLiteral { base_type, value } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::TypedLiteral { value },
+ EvaluationResult::RequiresBaseType(base_type),
+ ));
+ }
+ Operation::Convert { base_type } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Convert,
+ EvaluationResult::RequiresBaseType(base_type),
+ ));
+ }
+ Operation::Reinterpret { base_type } => {
+ return Ok(OperationEvaluationResult::Waiting(
+ EvaluationWaiting::Reinterpret,
+ EvaluationResult::RequiresBaseType(base_type),
+ ));
+ }
+ Operation::WasmLocal { .. }
+ | Operation::WasmGlobal { .. }
+ | Operation::WasmStack { .. } => {
+ return Err(Error::UnsupportedEvaluation);
+ }
+ }
+
+ Ok(OperationEvaluationResult::Incomplete)
+ }
+
+ /// Get the result of this `Evaluation`.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` has not been driven to completion.
+ pub fn as_result(&self) -> &[Piece<R>] {
+ match self.state {
+ EvaluationState::Complete => &self.result,
+ _ => {
+ panic!("Called `Evaluation::result` on an `Evaluation` that has not been completed")
+ }
+ }
+ }
+
+ /// Evaluate a DWARF expression. This method should only ever be called
+ /// once. If the returned `EvaluationResult` is not
+ /// `EvaluationResult::Complete`, the caller should provide the required
+ /// value and resume the evaluation by calling the appropriate resume_with
+ /// method on `Evaluation`.
+ pub fn evaluate(&mut self) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Start(initial_value) => {
+ if let Some(value) = initial_value {
+ self.push(Value::Generic(value))?;
+ }
+ self.state = EvaluationState::Ready;
+ }
+ EvaluationState::Ready => {}
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Complete => return Ok(EvaluationResult::Complete),
+ EvaluationState::Waiting(_) => panic!(),
+ };
+
+ match self.evaluate_internal() {
+ Ok(r) => Ok(r),
+ Err(e) => {
+ self.state = EvaluationState::Error(e);
+ Err(e)
+ }
+ }
+ }
+
+ /// Resume the `Evaluation` with the provided memory `value`. This will apply
+ /// the provided memory value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresMemory`.
+ pub fn resume_with_memory(&mut self, value: Value) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::Memory) => {
+ self.push(value)?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_memory` without a preceding `EvaluationResult::RequiresMemory`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `register` value. This will apply
+ /// the provided register value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresRegister`.
+ pub fn resume_with_register(&mut self, value: Value) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::Register { offset }) => {
+ let offset = Value::from_u64(value.value_type(), offset as u64)?;
+ let value = value.add(offset, self.addr_mask)?;
+ self.push(value)?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_register` without a preceding `EvaluationResult::RequiresRegister`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `frame_base`. This will
+ /// apply the provided frame base value to the evaluation and continue
+ /// evaluating opcodes until the evaluation is completed, reaches an error,
+ /// or needs more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresFrameBase`.
+ pub fn resume_with_frame_base(&mut self, frame_base: u64) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::FrameBase { offset }) => {
+ self.push(Value::Generic(frame_base.wrapping_add(offset as u64)))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_frame_base` without a preceding `EvaluationResult::RequiresFrameBase`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `value`. This will apply
+ /// the provided TLS value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresTls`.
+ pub fn resume_with_tls(&mut self, value: u64) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::Tls) => {
+ self.push(Value::Generic(value))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_tls` without a preceding `EvaluationResult::RequiresTls`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `cfa`. This will
+ /// apply the provided CFA value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresCallFrameCfa`.
+ pub fn resume_with_call_frame_cfa(&mut self, cfa: u64) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::Cfa) => {
+ self.push(Value::Generic(cfa))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_call_frame_cfa` without a preceding `EvaluationResult::RequiresCallFrameCfa`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `bytes`. This will
+ /// continue processing the evaluation with the new expression provided
+ /// until the evaluation is completed, reaches an error, or needs more
+ /// information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresAtLocation`.
+ pub fn resume_with_at_location(&mut self, mut bytes: R) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::AtLocation) => {
+ if !bytes.is_empty() {
+ let mut pc = bytes.clone();
+ mem::swap(&mut pc, &mut self.pc);
+ mem::swap(&mut bytes, &mut self.bytecode);
+ self.expression_stack.try_push((pc, bytes)).map_err(|_| Error::StackFull)?;
+ }
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_at_location` without a precedeing `EvaluationResult::RequiresAtLocation`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `entry_value`. This will
+ /// apply the provided entry value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresEntryValue`.
+ pub fn resume_with_entry_value(&mut self, entry_value: Value) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::EntryValue) => {
+ self.push(entry_value)?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_entry_value` without a preceding `EvaluationResult::RequiresEntryValue`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `parameter_value`. This will
+ /// apply the provided parameter value to the evaluation and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresParameterRef`.
+ pub fn resume_with_parameter_ref(
+ &mut self,
+ parameter_value: u64,
+ ) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::ParameterRef) => {
+ self.push(Value::Generic(parameter_value))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_parameter_ref` without a preceding `EvaluationResult::RequiresParameterRef`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided relocated `address`. This will use the
+ /// provided relocated address for the operation that required it, and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with
+ /// `EvaluationResult::RequiresRelocatedAddress`.
+ pub fn resume_with_relocated_address(&mut self, address: u64) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::RelocatedAddress) => {
+ self.push(Value::Generic(address))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_relocated_address` without a preceding `EvaluationResult::RequiresRelocatedAddress`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided indexed `address`. This will use the
+ /// provided indexed address for the operation that required it, and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with
+ /// `EvaluationResult::RequiresIndexedAddress`.
+ pub fn resume_with_indexed_address(&mut self, address: u64) -> Result<EvaluationResult<R>> {
+ match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::IndexedAddress) => {
+ self.push(Value::Generic(address))?;
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_indexed_address` without a preceding `EvaluationResult::RequiresIndexedAddress`"
+ ),
+ };
+
+ self.evaluate_internal()
+ }
+
+ /// Resume the `Evaluation` with the provided `base_type`. This will use the
+ /// provided base type for the operation that required it, and continue evaluating
+ /// opcodes until the evaluation is completed, reaches an error, or needs
+ /// more information again.
+ ///
+ /// # Panics
+ /// Panics if this `Evaluation` did not previously stop with `EvaluationResult::RequiresBaseType`.
+ pub fn resume_with_base_type(&mut self, base_type: ValueType) -> Result<EvaluationResult<R>> {
+ let value = match self.state {
+ EvaluationState::Error(err) => return Err(err),
+ EvaluationState::Waiting(EvaluationWaiting::TypedLiteral { ref value }) => {
+ Value::parse(base_type, value.clone())?
+ }
+ EvaluationState::Waiting(EvaluationWaiting::Convert) => {
+ let entry = self.pop()?;
+ entry.convert(base_type, self.addr_mask)?
+ }
+ EvaluationState::Waiting(EvaluationWaiting::Reinterpret) => {
+ let entry = self.pop()?;
+ entry.reinterpret(base_type, self.addr_mask)?
+ }
+ _ => panic!(
+ "Called `Evaluation::resume_with_base_type` without a preceding `EvaluationResult::RequiresBaseType`"
+ ),
+ };
+ self.push(value)?;
+ self.evaluate_internal()
+ }
+
+ fn end_of_expression(&mut self) -> bool {
+ while self.pc.is_empty() {
+ match self.expression_stack.pop() {
+ Some((newpc, newbytes)) => {
+ self.pc = newpc;
+ self.bytecode = newbytes;
+ }
+ None => return true,
+ }
+ }
+ false
+ }
+
+ fn evaluate_internal(&mut self) -> Result<EvaluationResult<R>> {
+ while !self.end_of_expression() {
+ self.iteration += 1;
+ if let Some(max_iterations) = self.max_iterations {
+ if self.iteration > max_iterations {
+ return Err(Error::TooManyIterations);
+ }
+ }
+
+ let op_result = self.evaluate_one_operation()?;
+ match op_result {
+ OperationEvaluationResult::Piece => {}
+ OperationEvaluationResult::Incomplete => {
+ if self.end_of_expression() && !self.result.is_empty() {
+ // We saw a piece earlier and then some
+ // unterminated piece. It's not clear this is
+ // well-defined.
+ return Err(Error::InvalidPiece);
+ }
+ }
+ OperationEvaluationResult::Complete { location } => {
+ if self.end_of_expression() {
+ if !self.result.is_empty() {
+ // We saw a piece earlier and then some
+ // unterminated piece. It's not clear this is
+ // well-defined.
+ return Err(Error::InvalidPiece);
+ }
+ self.result
+ .try_push(Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location,
+ })
+ .map_err(|_| Error::StackFull)?;
+ } else {
+ // If there are more operations, then the next operation must
+ // be a Piece.
+ match Operation::parse(&mut self.pc, self.encoding)? {
+ Operation::Piece {
+ size_in_bits,
+ bit_offset,
+ } => {
+ self.result
+ .try_push(Piece {
+ size_in_bits: Some(size_in_bits),
+ bit_offset,
+ location,
+ })
+ .map_err(|_| Error::StackFull)?;
+ }
+ _ => {
+ let value =
+ self.bytecode.len().into_u64() - self.pc.len().into_u64() - 1;
+ return Err(Error::InvalidExpressionTerminator(value));
+ }
+ }
+ }
+ }
+ OperationEvaluationResult::Waiting(waiting, result) => {
+ self.state = EvaluationState::Waiting(waiting);
+ return Ok(result);
+ }
+ };
+ }
+
+ // If no pieces have been seen, use the stack top as the
+ // result.
+ if self.result.is_empty() {
+ let entry = self.pop()?;
+ let addr = entry.to_u64(self.addr_mask)?;
+ self.result
+ .try_push(Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Address { address: addr },
+ })
+ .map_err(|_| Error::StackFull)?;
+ }
+
+ self.state = EvaluationState::Complete;
+ Ok(EvaluationResult::Complete)
+ }
+}
+
+#[cfg(test)]
+// Tests require leb128::write.
+#[cfg(feature = "write")]
+mod tests {
+ use super::*;
+ use crate::common::Format;
+ use crate::constants;
+ use crate::endianity::LittleEndian;
+ use crate::leb128;
+ use crate::read::{EndianSlice, Error, Result, UnitOffset};
+ use crate::test_util::GimliSectionMethods;
+ use core::usize;
+ use test_assembler::{Endian, Section};
+
+ fn encoding4() -> Encoding {
+ Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ }
+ }
+
+ fn encoding8() -> Encoding {
+ Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ }
+ }
+
+ #[test]
+ fn test_compute_pc() {
+ // Contents don't matter for this test, just length.
+ let bytes = [0, 1, 2, 3, 4];
+ let bytecode = &bytes[..];
+ let ebuf = &EndianSlice::new(bytecode, LittleEndian);
+
+ assert_eq!(compute_pc(ebuf, ebuf, 0), Ok(*ebuf));
+ assert_eq!(
+ compute_pc(ebuf, ebuf, -1),
+ Err(Error::BadBranchTarget(usize::MAX as u64))
+ );
+ assert_eq!(compute_pc(ebuf, ebuf, 5), Ok(ebuf.range_from(5..)));
+ assert_eq!(
+ compute_pc(&ebuf.range_from(3..), ebuf, -2),
+ Ok(ebuf.range_from(1..))
+ );
+ assert_eq!(
+ compute_pc(&ebuf.range_from(2..), ebuf, 2),
+ Ok(ebuf.range_from(4..))
+ );
+ }
+
+ fn check_op_parse_simple<'input>(
+ input: &'input [u8],
+ expect: &Operation<EndianSlice<'input, LittleEndian>>,
+ encoding: Encoding,
+ ) {
+ let buf = EndianSlice::new(input, LittleEndian);
+ let mut pc = buf;
+ let value = Operation::parse(&mut pc, encoding);
+ match value {
+ Ok(val) => {
+ assert_eq!(val, *expect);
+ assert_eq!(pc.len(), 0);
+ }
+ _ => panic!("Unexpected result"),
+ }
+ }
+
+ fn check_op_parse_eof(input: &[u8], encoding: Encoding) {
+ let buf = EndianSlice::new(input, LittleEndian);
+ let mut pc = buf;
+ match Operation::parse(&mut pc, encoding) {
+ Err(Error::UnexpectedEof(id)) => {
+ assert!(buf.lookup_offset_id(id).is_some());
+ }
+
+ _ => panic!("Unexpected result"),
+ }
+ }
+
+ fn check_op_parse<F>(
+ input: F,
+ expect: &Operation<EndianSlice<LittleEndian>>,
+ encoding: Encoding,
+ ) where
+ F: Fn(Section) -> Section,
+ {
+ let input = input(Section::with_endian(Endian::Little))
+ .get_contents()
+ .unwrap();
+ for i in 1..input.len() {
+ check_op_parse_eof(&input[..i], encoding);
+ }
+ check_op_parse_simple(&input, expect, encoding);
+ }
+
+ #[test]
+ fn test_op_parse_onebyte() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ // Test all single-byte opcodes.
+ #[rustfmt::skip]
+ let inputs = [
+ (
+ constants::DW_OP_deref,
+ Operation::Deref {
+ base_type: generic_type(),
+ size: encoding.address_size,
+ space: false,
+ },
+ ),
+ (constants::DW_OP_dup, Operation::Pick { index: 0 }),
+ (constants::DW_OP_drop, Operation::Drop),
+ (constants::DW_OP_over, Operation::Pick { index: 1 }),
+ (constants::DW_OP_swap, Operation::Swap),
+ (constants::DW_OP_rot, Operation::Rot),
+ (
+ constants::DW_OP_xderef,
+ Operation::Deref {
+ base_type: generic_type(),
+ size: encoding.address_size,
+ space: true,
+ },
+ ),
+ (constants::DW_OP_abs, Operation::Abs),
+ (constants::DW_OP_and, Operation::And),
+ (constants::DW_OP_div, Operation::Div),
+ (constants::DW_OP_minus, Operation::Minus),
+ (constants::DW_OP_mod, Operation::Mod),
+ (constants::DW_OP_mul, Operation::Mul),
+ (constants::DW_OP_neg, Operation::Neg),
+ (constants::DW_OP_not, Operation::Not),
+ (constants::DW_OP_or, Operation::Or),
+ (constants::DW_OP_plus, Operation::Plus),
+ (constants::DW_OP_shl, Operation::Shl),
+ (constants::DW_OP_shr, Operation::Shr),
+ (constants::DW_OP_shra, Operation::Shra),
+ (constants::DW_OP_xor, Operation::Xor),
+ (constants::DW_OP_eq, Operation::Eq),
+ (constants::DW_OP_ge, Operation::Ge),
+ (constants::DW_OP_gt, Operation::Gt),
+ (constants::DW_OP_le, Operation::Le),
+ (constants::DW_OP_lt, Operation::Lt),
+ (constants::DW_OP_ne, Operation::Ne),
+ (constants::DW_OP_lit0, Operation::UnsignedConstant { value: 0 }),
+ (constants::DW_OP_lit1, Operation::UnsignedConstant { value: 1 }),
+ (constants::DW_OP_lit2, Operation::UnsignedConstant { value: 2 }),
+ (constants::DW_OP_lit3, Operation::UnsignedConstant { value: 3 }),
+ (constants::DW_OP_lit4, Operation::UnsignedConstant { value: 4 }),
+ (constants::DW_OP_lit5, Operation::UnsignedConstant { value: 5 }),
+ (constants::DW_OP_lit6, Operation::UnsignedConstant { value: 6 }),
+ (constants::DW_OP_lit7, Operation::UnsignedConstant { value: 7 }),
+ (constants::DW_OP_lit8, Operation::UnsignedConstant { value: 8 }),
+ (constants::DW_OP_lit9, Operation::UnsignedConstant { value: 9 }),
+ (constants::DW_OP_lit10, Operation::UnsignedConstant { value: 10 }),
+ (constants::DW_OP_lit11, Operation::UnsignedConstant { value: 11 }),
+ (constants::DW_OP_lit12, Operation::UnsignedConstant { value: 12 }),
+ (constants::DW_OP_lit13, Operation::UnsignedConstant { value: 13 }),
+ (constants::DW_OP_lit14, Operation::UnsignedConstant { value: 14 }),
+ (constants::DW_OP_lit15, Operation::UnsignedConstant { value: 15 }),
+ (constants::DW_OP_lit16, Operation::UnsignedConstant { value: 16 }),
+ (constants::DW_OP_lit17, Operation::UnsignedConstant { value: 17 }),
+ (constants::DW_OP_lit18, Operation::UnsignedConstant { value: 18 }),
+ (constants::DW_OP_lit19, Operation::UnsignedConstant { value: 19 }),
+ (constants::DW_OP_lit20, Operation::UnsignedConstant { value: 20 }),
+ (constants::DW_OP_lit21, Operation::UnsignedConstant { value: 21 }),
+ (constants::DW_OP_lit22, Operation::UnsignedConstant { value: 22 }),
+ (constants::DW_OP_lit23, Operation::UnsignedConstant { value: 23 }),
+ (constants::DW_OP_lit24, Operation::UnsignedConstant { value: 24 }),
+ (constants::DW_OP_lit25, Operation::UnsignedConstant { value: 25 }),
+ (constants::DW_OP_lit26, Operation::UnsignedConstant { value: 26 }),
+ (constants::DW_OP_lit27, Operation::UnsignedConstant { value: 27 }),
+ (constants::DW_OP_lit28, Operation::UnsignedConstant { value: 28 }),
+ (constants::DW_OP_lit29, Operation::UnsignedConstant { value: 29 }),
+ (constants::DW_OP_lit30, Operation::UnsignedConstant { value: 30 }),
+ (constants::DW_OP_lit31, Operation::UnsignedConstant { value: 31 }),
+ (constants::DW_OP_reg0, Operation::Register { register: Register(0) }),
+ (constants::DW_OP_reg1, Operation::Register { register: Register(1) }),
+ (constants::DW_OP_reg2, Operation::Register { register: Register(2) }),
+ (constants::DW_OP_reg3, Operation::Register { register: Register(3) }),
+ (constants::DW_OP_reg4, Operation::Register { register: Register(4) }),
+ (constants::DW_OP_reg5, Operation::Register { register: Register(5) }),
+ (constants::DW_OP_reg6, Operation::Register { register: Register(6) }),
+ (constants::DW_OP_reg7, Operation::Register { register: Register(7) }),
+ (constants::DW_OP_reg8, Operation::Register { register: Register(8) }),
+ (constants::DW_OP_reg9, Operation::Register { register: Register(9) }),
+ (constants::DW_OP_reg10, Operation::Register { register: Register(10) }),
+ (constants::DW_OP_reg11, Operation::Register { register: Register(11) }),
+ (constants::DW_OP_reg12, Operation::Register { register: Register(12) }),
+ (constants::DW_OP_reg13, Operation::Register { register: Register(13) }),
+ (constants::DW_OP_reg14, Operation::Register { register: Register(14) }),
+ (constants::DW_OP_reg15, Operation::Register { register: Register(15) }),
+ (constants::DW_OP_reg16, Operation::Register { register: Register(16) }),
+ (constants::DW_OP_reg17, Operation::Register { register: Register(17) }),
+ (constants::DW_OP_reg18, Operation::Register { register: Register(18) }),
+ (constants::DW_OP_reg19, Operation::Register { register: Register(19) }),
+ (constants::DW_OP_reg20, Operation::Register { register: Register(20) }),
+ (constants::DW_OP_reg21, Operation::Register { register: Register(21) }),
+ (constants::DW_OP_reg22, Operation::Register { register: Register(22) }),
+ (constants::DW_OP_reg23, Operation::Register { register: Register(23) }),
+ (constants::DW_OP_reg24, Operation::Register { register: Register(24) }),
+ (constants::DW_OP_reg25, Operation::Register { register: Register(25) }),
+ (constants::DW_OP_reg26, Operation::Register { register: Register(26) }),
+ (constants::DW_OP_reg27, Operation::Register { register: Register(27) }),
+ (constants::DW_OP_reg28, Operation::Register { register: Register(28) }),
+ (constants::DW_OP_reg29, Operation::Register { register: Register(29) }),
+ (constants::DW_OP_reg30, Operation::Register { register: Register(30) }),
+ (constants::DW_OP_reg31, Operation::Register { register: Register(31) }),
+ (constants::DW_OP_nop, Operation::Nop),
+ (constants::DW_OP_push_object_address, Operation::PushObjectAddress),
+ (constants::DW_OP_form_tls_address, Operation::TLS),
+ (constants::DW_OP_GNU_push_tls_address, Operation::TLS),
+ (constants::DW_OP_call_frame_cfa, Operation::CallFrameCFA),
+ (constants::DW_OP_stack_value, Operation::StackValue),
+ ];
+
+ let input = [];
+ check_op_parse_eof(&input[..], encoding);
+
+ for item in inputs.iter() {
+ let (opcode, ref result) = *item;
+ check_op_parse(|s| s.D8(opcode.0), result, encoding);
+ }
+ }
+
+ #[test]
+ fn test_op_parse_twobyte() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let inputs = [
+ (
+ constants::DW_OP_const1u,
+ 23,
+ Operation::UnsignedConstant { value: 23 },
+ ),
+ (
+ constants::DW_OP_const1s,
+ (-23i8) as u8,
+ Operation::SignedConstant { value: -23 },
+ ),
+ (constants::DW_OP_pick, 7, Operation::Pick { index: 7 }),
+ (
+ constants::DW_OP_deref_size,
+ 19,
+ Operation::Deref {
+ base_type: generic_type(),
+ size: 19,
+ space: false,
+ },
+ ),
+ (
+ constants::DW_OP_xderef_size,
+ 19,
+ Operation::Deref {
+ base_type: generic_type(),
+ size: 19,
+ space: true,
+ },
+ ),
+ ];
+
+ for item in inputs.iter() {
+ let (opcode, arg, ref result) = *item;
+ check_op_parse(|s| s.D8(opcode.0).D8(arg), result, encoding);
+ }
+ }
+
+ #[test]
+ fn test_op_parse_threebyte() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ // While bra and skip are 3-byte opcodes, they aren't tested here,
+ // but rather specially in their own function.
+ let inputs = [
+ (
+ constants::DW_OP_const2u,
+ 23,
+ Operation::UnsignedConstant { value: 23 },
+ ),
+ (
+ constants::DW_OP_const2s,
+ (-23i16) as u16,
+ Operation::SignedConstant { value: -23 },
+ ),
+ (
+ constants::DW_OP_call2,
+ 1138,
+ Operation::Call {
+ offset: DieReference::UnitRef(UnitOffset(1138)),
+ },
+ ),
+ (
+ constants::DW_OP_bra,
+ (-23i16) as u16,
+ Operation::Bra { target: -23 },
+ ),
+ (
+ constants::DW_OP_skip,
+ (-23i16) as u16,
+ Operation::Skip { target: -23 },
+ ),
+ ];
+
+ for item in inputs.iter() {
+ let (opcode, arg, ref result) = *item;
+ check_op_parse(|s| s.D8(opcode.0).L16(arg), result, encoding);
+ }
+ }
+
+ #[test]
+ fn test_op_parse_fivebyte() {
+ // There are some tests here that depend on address size.
+ let encoding = encoding4();
+
+ let inputs = [
+ (
+ constants::DW_OP_addr,
+ 0x1234_5678,
+ Operation::Address {
+ address: 0x1234_5678,
+ },
+ ),
+ (
+ constants::DW_OP_const4u,
+ 0x1234_5678,
+ Operation::UnsignedConstant { value: 0x1234_5678 },
+ ),
+ (
+ constants::DW_OP_const4s,
+ (-23i32) as u32,
+ Operation::SignedConstant { value: -23 },
+ ),
+ (
+ constants::DW_OP_call4,
+ 0x1234_5678,
+ Operation::Call {
+ offset: DieReference::UnitRef(UnitOffset(0x1234_5678)),
+ },
+ ),
+ (
+ constants::DW_OP_call_ref,
+ 0x1234_5678,
+ Operation::Call {
+ offset: DieReference::DebugInfoRef(DebugInfoOffset(0x1234_5678)),
+ },
+ ),
+ ];
+
+ for item in inputs.iter() {
+ let (op, arg, ref expect) = *item;
+ check_op_parse(|s| s.D8(op.0).L32(arg), expect, encoding);
+ }
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_op_parse_ninebyte() {
+ // There are some tests here that depend on address size.
+ let encoding = encoding8();
+
+ let inputs = [
+ (
+ constants::DW_OP_addr,
+ 0x1234_5678_1234_5678,
+ Operation::Address {
+ address: 0x1234_5678_1234_5678,
+ },
+ ),
+ (
+ constants::DW_OP_const8u,
+ 0x1234_5678_1234_5678,
+ Operation::UnsignedConstant {
+ value: 0x1234_5678_1234_5678,
+ },
+ ),
+ (
+ constants::DW_OP_const8s,
+ (-23i64) as u64,
+ Operation::SignedConstant { value: -23 },
+ ),
+ (
+ constants::DW_OP_call_ref,
+ 0x1234_5678_1234_5678,
+ Operation::Call {
+ offset: DieReference::DebugInfoRef(DebugInfoOffset(0x1234_5678_1234_5678)),
+ },
+ ),
+ ];
+
+ for item in inputs.iter() {
+ let (op, arg, ref expect) = *item;
+ check_op_parse(|s| s.D8(op.0).L64(arg), expect, encoding);
+ }
+ }
+
+ #[test]
+ fn test_op_parse_sleb() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let values = [
+ -1i64,
+ 0,
+ 1,
+ 0x100,
+ 0x1eee_eeee,
+ 0x7fff_ffff_ffff_ffff,
+ -0x100,
+ -0x1eee_eeee,
+ -0x7fff_ffff_ffff_ffff,
+ ];
+ for value in values.iter() {
+ let mut inputs = vec![
+ (
+ constants::DW_OP_consts.0,
+ Operation::SignedConstant { value: *value },
+ ),
+ (
+ constants::DW_OP_fbreg.0,
+ Operation::FrameOffset { offset: *value },
+ ),
+ ];
+
+ for i in 0..32 {
+ inputs.push((
+ constants::DW_OP_breg0.0 + i,
+ Operation::RegisterOffset {
+ register: Register(i.into()),
+ offset: *value,
+ base_type: UnitOffset(0),
+ },
+ ));
+ }
+
+ for item in inputs.iter() {
+ let (op, ref expect) = *item;
+ check_op_parse(|s| s.D8(op).sleb(*value), expect, encoding);
+ }
+ }
+ }
+
+ #[test]
+ fn test_op_parse_uleb() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let values = [
+ 0,
+ 1,
+ 0x100,
+ (!0u16).into(),
+ 0x1eee_eeee,
+ 0x7fff_ffff_ffff_ffff,
+ !0u64,
+ ];
+ for value in values.iter() {
+ let mut inputs = vec![
+ (
+ constants::DW_OP_constu,
+ Operation::UnsignedConstant { value: *value },
+ ),
+ (
+ constants::DW_OP_plus_uconst,
+ Operation::PlusConstant { value: *value },
+ ),
+ ];
+
+ if *value <= (!0u16).into() {
+ inputs.push((
+ constants::DW_OP_regx,
+ Operation::Register {
+ register: Register::from_u64(*value).unwrap(),
+ },
+ ));
+ }
+
+ if *value <= (!0u32).into() {
+ inputs.extend(&[
+ (
+ constants::DW_OP_addrx,
+ Operation::AddressIndex {
+ index: DebugAddrIndex(*value as usize),
+ },
+ ),
+ (
+ constants::DW_OP_constx,
+ Operation::ConstantIndex {
+ index: DebugAddrIndex(*value as usize),
+ },
+ ),
+ ]);
+ }
+
+ // FIXME
+ if *value < !0u64 / 8 {
+ inputs.push((
+ constants::DW_OP_piece,
+ Operation::Piece {
+ size_in_bits: 8 * value,
+ bit_offset: None,
+ },
+ ));
+ }
+
+ for item in inputs.iter() {
+ let (op, ref expect) = *item;
+ let input = Section::with_endian(Endian::Little)
+ .D8(op.0)
+ .uleb(*value)
+ .get_contents()
+ .unwrap();
+ check_op_parse_simple(&input, expect, encoding);
+ }
+ }
+ }
+
+ #[test]
+ fn test_op_parse_bregx() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let uvalues = [0, 1, 0x100, !0u16];
+ let svalues = [
+ -1i64,
+ 0,
+ 1,
+ 0x100,
+ 0x1eee_eeee,
+ 0x7fff_ffff_ffff_ffff,
+ -0x100,
+ -0x1eee_eeee,
+ -0x7fff_ffff_ffff_ffff,
+ ];
+
+ for v1 in uvalues.iter() {
+ for v2 in svalues.iter() {
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_bregx.0).uleb((*v1).into()).sleb(*v2),
+ &Operation::RegisterOffset {
+ register: Register(*v1),
+ offset: *v2,
+ base_type: UnitOffset(0),
+ },
+ encoding,
+ );
+ }
+ }
+ }
+
+ #[test]
+ fn test_op_parse_bit_piece() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let values = [0, 1, 0x100, 0x1eee_eeee, 0x7fff_ffff_ffff_ffff, !0u64];
+
+ for v1 in values.iter() {
+ for v2 in values.iter() {
+ let input = Section::with_endian(Endian::Little)
+ .D8(constants::DW_OP_bit_piece.0)
+ .uleb(*v1)
+ .uleb(*v2)
+ .get_contents()
+ .unwrap();
+ check_op_parse_simple(
+ &input,
+ &Operation::Piece {
+ size_in_bits: *v1,
+ bit_offset: Some(*v2),
+ },
+ encoding,
+ );
+ }
+ }
+ }
+
+ #[test]
+ fn test_op_parse_implicit_value() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let data = b"hello";
+
+ check_op_parse(
+ |s| {
+ s.D8(constants::DW_OP_implicit_value.0)
+ .uleb(data.len() as u64)
+ .append_bytes(&data[..])
+ },
+ &Operation::ImplicitValue {
+ data: EndianSlice::new(&data[..], LittleEndian),
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_parse_const_type() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ let data = b"hello";
+
+ check_op_parse(
+ |s| {
+ s.D8(constants::DW_OP_const_type.0)
+ .uleb(100)
+ .D8(data.len() as u8)
+ .append_bytes(&data[..])
+ },
+ &Operation::TypedLiteral {
+ base_type: UnitOffset(100),
+ value: EndianSlice::new(&data[..], LittleEndian),
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| {
+ s.D8(constants::DW_OP_GNU_const_type.0)
+ .uleb(100)
+ .D8(data.len() as u8)
+ .append_bytes(&data[..])
+ },
+ &Operation::TypedLiteral {
+ base_type: UnitOffset(100),
+ value: EndianSlice::new(&data[..], LittleEndian),
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_parse_regval_type() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_regval_type.0).uleb(1).uleb(100),
+ &Operation::RegisterOffset {
+ register: Register(1),
+ offset: 0,
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_GNU_regval_type.0).uleb(1).uleb(100),
+ &Operation::RegisterOffset {
+ register: Register(1),
+ offset: 0,
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_parse_deref_type() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_deref_type.0).D8(8).uleb(100),
+ &Operation::Deref {
+ base_type: UnitOffset(100),
+ size: 8,
+ space: false,
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_GNU_deref_type.0).D8(8).uleb(100),
+ &Operation::Deref {
+ base_type: UnitOffset(100),
+ size: 8,
+ space: false,
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_xderef_type.0).D8(8).uleb(100),
+ &Operation::Deref {
+ base_type: UnitOffset(100),
+ size: 8,
+ space: true,
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_convert() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_convert.0).uleb(100),
+ &Operation::Convert {
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_GNU_convert.0).uleb(100),
+ &Operation::Convert {
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_reinterpret() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_reinterpret.0).uleb(100),
+ &Operation::Reinterpret {
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_GNU_reinterpret.0).uleb(100),
+ &Operation::Reinterpret {
+ base_type: UnitOffset(100),
+ },
+ encoding,
+ );
+ }
+
+ #[test]
+ fn test_op_parse_implicit_pointer() {
+ for op in &[
+ constants::DW_OP_implicit_pointer,
+ constants::DW_OP_GNU_implicit_pointer,
+ ] {
+ check_op_parse(
+ |s| s.D8(op.0).D32(0x1234_5678).sleb(0x123),
+ &Operation::ImplicitPointer {
+ value: DebugInfoOffset(0x1234_5678),
+ byte_offset: 0x123,
+ },
+ encoding4(),
+ );
+
+ check_op_parse(
+ |s| s.D8(op.0).D64(0x1234_5678).sleb(0x123),
+ &Operation::ImplicitPointer {
+ value: DebugInfoOffset(0x1234_5678),
+ byte_offset: 0x123,
+ },
+ encoding8(),
+ );
+
+ check_op_parse(
+ |s| s.D8(op.0).D64(0x1234_5678).sleb(0x123),
+ &Operation::ImplicitPointer {
+ value: DebugInfoOffset(0x1234_5678),
+ byte_offset: 0x123,
+ },
+ Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 8,
+ },
+ )
+ }
+ }
+
+ #[test]
+ fn test_op_parse_entry_value() {
+ for op in &[
+ constants::DW_OP_entry_value,
+ constants::DW_OP_GNU_entry_value,
+ ] {
+ let data = b"hello";
+ check_op_parse(
+ |s| s.D8(op.0).uleb(data.len() as u64).append_bytes(&data[..]),
+ &Operation::EntryValue {
+ expression: EndianSlice::new(&data[..], LittleEndian),
+ },
+ encoding4(),
+ );
+ }
+ }
+
+ #[test]
+ fn test_op_parse_gnu_parameter_ref() {
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_GNU_parameter_ref.0).D32(0x1234_5678),
+ &Operation::ParameterRef {
+ offset: UnitOffset(0x1234_5678),
+ },
+ encoding4(),
+ )
+ }
+
+ #[test]
+ fn test_op_wasm() {
+ // Doesn't matter for this test.
+ let encoding = encoding4();
+
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_WASM_location.0).D8(0).uleb(1000),
+ &Operation::WasmLocal { index: 1000 },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_WASM_location.0).D8(1).uleb(1000),
+ &Operation::WasmGlobal { index: 1000 },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_WASM_location.0).D8(2).uleb(1000),
+ &Operation::WasmStack { index: 1000 },
+ encoding,
+ );
+ check_op_parse(
+ |s| s.D8(constants::DW_OP_WASM_location.0).D8(3).D32(1000),
+ &Operation::WasmGlobal { index: 1000 },
+ encoding,
+ );
+ }
+
+ enum AssemblerEntry {
+ Op(constants::DwOp),
+ Mark(u8),
+ Branch(u8),
+ U8(u8),
+ U16(u16),
+ U32(u32),
+ U64(u64),
+ Uleb(u64),
+ Sleb(u64),
+ }
+
+ fn assemble(entries: &[AssemblerEntry]) -> Vec<u8> {
+ let mut result = Vec::new();
+
+ struct Marker(Option<usize>, Vec<usize>);
+
+ let mut markers = Vec::new();
+ for _ in 0..256 {
+ markers.push(Marker(None, Vec::new()));
+ }
+
+ fn write(stack: &mut Vec<u8>, index: usize, mut num: u64, nbytes: u8) {
+ for i in 0..nbytes as usize {
+ stack[index + i] = (num & 0xff) as u8;
+ num >>= 8;
+ }
+ }
+
+ fn push(stack: &mut Vec<u8>, num: u64, nbytes: u8) {
+ let index = stack.len();
+ for _ in 0..nbytes {
+ stack.push(0);
+ }
+ write(stack, index, num, nbytes);
+ }
+
+ for item in entries {
+ match *item {
+ AssemblerEntry::Op(op) => result.push(op.0),
+ AssemblerEntry::Mark(num) => {
+ assert!(markers[num as usize].0.is_none());
+ markers[num as usize].0 = Some(result.len());
+ }
+ AssemblerEntry::Branch(num) => {
+ markers[num as usize].1.push(result.len());
+ push(&mut result, 0, 2);
+ }
+ AssemblerEntry::U8(num) => result.push(num),
+ AssemblerEntry::U16(num) => push(&mut result, u64::from(num), 2),
+ AssemblerEntry::U32(num) => push(&mut result, u64::from(num), 4),
+ AssemblerEntry::U64(num) => push(&mut result, num, 8),
+ AssemblerEntry::Uleb(num) => {
+ leb128::write::unsigned(&mut result, num).unwrap();
+ }
+ AssemblerEntry::Sleb(num) => {
+ leb128::write::signed(&mut result, num as i64).unwrap();
+ }
+ }
+ }
+
+ // Update all the branches.
+ for marker in markers {
+ if let Some(offset) = marker.0 {
+ for branch_offset in marker.1 {
+ let delta = offset.wrapping_sub(branch_offset + 2) as u64;
+ write(&mut result, branch_offset, delta, 2);
+ }
+ }
+ }
+
+ result
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn check_eval_with_args<F>(
+ program: &[AssemblerEntry],
+ expect: Result<&[Piece<EndianSlice<LittleEndian>>]>,
+ encoding: Encoding,
+ object_address: Option<u64>,
+ initial_value: Option<u64>,
+ max_iterations: Option<u32>,
+ f: F,
+ ) where
+ for<'a> F: Fn(
+ &mut Evaluation<EndianSlice<'a, LittleEndian>>,
+ EvaluationResult<EndianSlice<'a, LittleEndian>>,
+ ) -> Result<EvaluationResult<EndianSlice<'a, LittleEndian>>>,
+ {
+ let bytes = assemble(program);
+ let bytes = EndianSlice::new(&bytes, LittleEndian);
+
+ let mut eval = Evaluation::new(bytes, encoding);
+
+ if let Some(val) = object_address {
+ eval.set_object_address(val);
+ }
+ if let Some(val) = initial_value {
+ eval.set_initial_value(val);
+ }
+ if let Some(val) = max_iterations {
+ eval.set_max_iterations(val);
+ }
+
+ let result = match eval.evaluate() {
+ Err(e) => Err(e),
+ Ok(r) => f(&mut eval, r),
+ };
+
+ match (result, expect) {
+ (Ok(EvaluationResult::Complete), Ok(pieces)) => {
+ let vec = eval.result();
+ assert_eq!(vec.len(), pieces.len());
+ for i in 0..pieces.len() {
+ assert_eq!(vec[i], pieces[i]);
+ }
+ }
+ (Err(f1), Err(f2)) => {
+ assert_eq!(f1, f2);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ fn check_eval(
+ program: &[AssemblerEntry],
+ expect: Result<&[Piece<EndianSlice<LittleEndian>>]>,
+ encoding: Encoding,
+ ) {
+ check_eval_with_args(program, expect, encoding, None, None, None, |_, result| {
+ Ok(result)
+ });
+ }
+
+ #[test]
+ fn test_eval_arith() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Indices of marks in the assembly.
+ let done = 0;
+ let fail = 1;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_const1u), U8(23),
+ Op(DW_OP_const1s), U8((-23i8) as u8),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const2u), U16(23),
+ Op(DW_OP_const2s), U16((-23i16) as u16),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4u), U32(0x1111_2222),
+ Op(DW_OP_const4s), U32((-0x1111_2222i32) as u32),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Plus should overflow.
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1u), U8(1),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_plus_uconst), Uleb(1),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Minus should underflow.
+ Op(DW_OP_const1s), U8(0),
+ Op(DW_OP_const1u), U8(1),
+ Op(DW_OP_minus),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_abs),
+ Op(DW_OP_const1u), U8(1),
+ Op(DW_OP_minus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4u), U32(0xf078_fffe),
+ Op(DW_OP_const4u), U32(0x0f87_0001),
+ Op(DW_OP_and),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4u), U32(0xf078_fffe),
+ Op(DW_OP_const4u), U32(0xf000_00fe),
+ Op(DW_OP_and),
+ Op(DW_OP_const4u), U32(0xf000_00fe),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Division is signed.
+ Op(DW_OP_const1s), U8(0xfe),
+ Op(DW_OP_const1s), U8(2),
+ Op(DW_OP_div),
+ Op(DW_OP_plus_uconst), Uleb(1),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Mod is unsigned.
+ Op(DW_OP_const1s), U8(0xfd),
+ Op(DW_OP_const1s), U8(2),
+ Op(DW_OP_mod),
+ Op(DW_OP_neg),
+ Op(DW_OP_plus_uconst), Uleb(1),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Overflow is defined for multiplication.
+ Op(DW_OP_const4u), U32(0x8000_0001),
+ Op(DW_OP_lit2),
+ Op(DW_OP_mul),
+ Op(DW_OP_lit2),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4u), U32(0xf0f0_f0f0),
+ Op(DW_OP_const4u), U32(0xf0f0_f0f0),
+ Op(DW_OP_xor),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4u), U32(0xf0f0_f0f0),
+ Op(DW_OP_const4u), U32(0x0f0f_0f0f),
+ Op(DW_OP_or),
+ Op(DW_OP_not),
+ Op(DW_OP_bra), Branch(fail),
+
+ // In 32 bit mode, values are truncated.
+ Op(DW_OP_const8u), U64(0xffff_ffff_0000_0000),
+ Op(DW_OP_lit2),
+ Op(DW_OP_div),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1u), U8(0xff),
+ Op(DW_OP_lit1),
+ Op(DW_OP_shl),
+ Op(DW_OP_const2u), U16(0x1fe),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1u), U8(0xff),
+ Op(DW_OP_const1u), U8(50),
+ Op(DW_OP_shl),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Absurd shift.
+ Op(DW_OP_const1u), U8(0xff),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_shl),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_lit1),
+ Op(DW_OP_shr),
+ Op(DW_OP_const4u), U32(0x7fff_ffff),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1u), U8(0xff),
+ Op(DW_OP_shr),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_lit1),
+ Op(DW_OP_shra),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1u), U8(0xff),
+ Op(DW_OP_shra),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Success.
+ Op(DW_OP_lit0),
+ Op(DW_OP_nop),
+ Op(DW_OP_skip), Branch(done),
+
+ Mark(fail),
+ Op(DW_OP_lit1),
+
+ Mark(done),
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+ }
+
+ #[test]
+ fn test_eval_arith64() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Indices of marks in the assembly.
+ let done = 0;
+ let fail = 1;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_const8u), U64(0x1111_2222_3333_4444),
+ Op(DW_OP_const8s), U64((-0x1111_2222_3333_4444i64) as u64),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_constu), Uleb(0x1111_2222_3333_4444),
+ Op(DW_OP_consts), Sleb((-0x1111_2222_3333_4444i64) as u64),
+ Op(DW_OP_plus),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit1),
+ Op(DW_OP_plus_uconst), Uleb(!0u64),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit1),
+ Op(DW_OP_neg),
+ Op(DW_OP_not),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const8u), U64(0x8000_0000_0000_0000),
+ Op(DW_OP_const1u), U8(63),
+ Op(DW_OP_shr),
+ Op(DW_OP_lit1),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const8u), U64(0x8000_0000_0000_0000),
+ Op(DW_OP_const1u), U8(62),
+ Op(DW_OP_shra),
+ Op(DW_OP_plus_uconst), Uleb(2),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit1),
+ Op(DW_OP_const1u), U8(63),
+ Op(DW_OP_shl),
+ Op(DW_OP_const8u), U64(0x8000_0000_0000_0000),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Success.
+ Op(DW_OP_lit0),
+ Op(DW_OP_nop),
+ Op(DW_OP_skip), Branch(done),
+
+ Mark(fail),
+ Op(DW_OP_lit1),
+
+ Mark(done),
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding8());
+ }
+
+ #[test]
+ fn test_eval_compare() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Indices of marks in the assembly.
+ let done = 0;
+ let fail = 1;
+
+ #[rustfmt::skip]
+ let program = [
+ // Comparisons are signed.
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_lt),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_gt),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_le),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_ge),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const1s), U8(0xff),
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_eq),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_const4s), U32(1),
+ Op(DW_OP_const1s), U8(1),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Success.
+ Op(DW_OP_lit0),
+ Op(DW_OP_nop),
+ Op(DW_OP_skip), Branch(done),
+
+ Mark(fail),
+ Op(DW_OP_lit1),
+
+ Mark(done),
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+ }
+
+ #[test]
+ fn test_eval_stack() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_lit17), // -- 17
+ Op(DW_OP_dup), // -- 17 17
+ Op(DW_OP_over), // -- 17 17 17
+ Op(DW_OP_minus), // -- 17 0
+ Op(DW_OP_swap), // -- 0 17
+ Op(DW_OP_dup), // -- 0 17 17
+ Op(DW_OP_plus_uconst), Uleb(1), // -- 0 17 18
+ Op(DW_OP_rot), // -- 18 0 17
+ Op(DW_OP_pick), U8(2), // -- 18 0 17 18
+ Op(DW_OP_pick), U8(3), // -- 18 0 17 18 18
+ Op(DW_OP_minus), // -- 18 0 17 0
+ Op(DW_OP_drop), // -- 18 0 17
+ Op(DW_OP_swap), // -- 18 17 0
+ Op(DW_OP_drop), // -- 18 17
+ Op(DW_OP_minus), // -- 1
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(1),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+ }
+
+ #[test]
+ fn test_eval_lit_and_reg() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ let mut program = Vec::new();
+ program.push(Op(DW_OP_lit0));
+ for i in 0..32 {
+ program.push(Op(DwOp(DW_OP_lit0.0 + i)));
+ program.push(Op(DwOp(DW_OP_breg0.0 + i)));
+ program.push(Sleb(u64::from(i)));
+ program.push(Op(DW_OP_plus));
+ program.push(Op(DW_OP_plus));
+ }
+
+ program.push(Op(DW_OP_bregx));
+ program.push(Uleb(0x1234));
+ program.push(Sleb(0x1234));
+ program.push(Op(DW_OP_plus));
+
+ program.push(Op(DW_OP_stack_value));
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(496),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, mut result| {
+ while result != EvaluationResult::Complete {
+ result = eval.resume_with_register(match result {
+ EvaluationResult::RequiresRegister {
+ register,
+ base_type,
+ } => {
+ assert_eq!(base_type, UnitOffset(0));
+ Value::Generic(u64::from(register.0).wrapping_neg())
+ }
+ _ => panic!(),
+ })?;
+ }
+ Ok(result)
+ },
+ );
+ }
+
+ #[test]
+ fn test_eval_memory() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Indices of marks in the assembly.
+ let done = 0;
+ let fail = 1;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_deref),
+ Op(DW_OP_const4u), U32(0xffff_fffc),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_deref_size), U8(2),
+ Op(DW_OP_const4u), U32(0xfffc),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit1),
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_xderef),
+ Op(DW_OP_const4u), U32(0xffff_fffd),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit1),
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_xderef_size), U8(2),
+ Op(DW_OP_const4u), U32(0xfffd),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit17),
+ Op(DW_OP_form_tls_address),
+ Op(DW_OP_constu), Uleb(!17),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_lit17),
+ Op(DW_OP_GNU_push_tls_address),
+ Op(DW_OP_constu), Uleb(!17),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_addrx), Uleb(0x10),
+ Op(DW_OP_deref),
+ Op(DW_OP_const4u), U32(0x4040),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ Op(DW_OP_constx), Uleb(17),
+ Op(DW_OP_form_tls_address),
+ Op(DW_OP_constu), Uleb(!27),
+ Op(DW_OP_ne),
+ Op(DW_OP_bra), Branch(fail),
+
+ // Success.
+ Op(DW_OP_lit0),
+ Op(DW_OP_nop),
+ Op(DW_OP_skip), Branch(done),
+
+ Mark(fail),
+ Op(DW_OP_lit1),
+
+ Mark(done),
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, mut result| {
+ while result != EvaluationResult::Complete {
+ result = match result {
+ EvaluationResult::RequiresMemory {
+ address,
+ size,
+ space,
+ base_type,
+ } => {
+ assert_eq!(base_type, UnitOffset(0));
+ let mut v = address << 2;
+ if let Some(value) = space {
+ v += value;
+ }
+ v &= (1u64 << (8 * size)) - 1;
+ eval.resume_with_memory(Value::Generic(v))?
+ }
+ EvaluationResult::RequiresTls(slot) => eval.resume_with_tls(!slot)?,
+ EvaluationResult::RequiresRelocatedAddress(address) => {
+ eval.resume_with_relocated_address(address)?
+ }
+ EvaluationResult::RequiresIndexedAddress { index, relocate } => {
+ if relocate {
+ eval.resume_with_indexed_address(0x1000 + index.0 as u64)?
+ } else {
+ eval.resume_with_indexed_address(10 + index.0 as u64)?
+ }
+ }
+ _ => panic!(),
+ };
+ }
+
+ Ok(result)
+ },
+ );
+ }
+
+ #[test]
+ fn test_eval_register() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ for i in 0..32 {
+ #[rustfmt::skip]
+ let program = [
+ Op(DwOp(DW_OP_reg0.0 + i)),
+ // Included only in the "bad" run.
+ Op(DW_OP_lit23),
+ ];
+ let ok_result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Register {
+ register: Register(i.into()),
+ },
+ }];
+
+ check_eval(&program[..1], Ok(&ok_result), encoding4());
+
+ check_eval(
+ &program,
+ Err(Error::InvalidExpressionTerminator(1)),
+ encoding4(),
+ );
+ }
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_regx), Uleb(0x1234)
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Register {
+ register: Register(0x1234),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+ }
+
+ #[test]
+ fn test_eval_context() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Test `frame_base` and `call_frame_cfa` callbacks.
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_fbreg), Sleb((-8i8) as u64),
+ Op(DW_OP_call_frame_cfa),
+ Op(DW_OP_plus),
+ Op(DW_OP_neg),
+ Op(DW_OP_stack_value)
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(9),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding8(),
+ None,
+ None,
+ None,
+ |eval, result| {
+ match result {
+ EvaluationResult::RequiresFrameBase => {}
+ _ => panic!(),
+ };
+ match eval.resume_with_frame_base(0x0123_4567_89ab_cdef)? {
+ EvaluationResult::RequiresCallFrameCfa => {}
+ _ => panic!(),
+ };
+ eval.resume_with_call_frame_cfa(0xfedc_ba98_7654_3210)
+ },
+ );
+
+ // Test `evaluate_entry_value` callback.
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_entry_value), Uleb(8), U64(0x1234_5678),
+ Op(DW_OP_stack_value)
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0x1234_5678),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding8(),
+ None,
+ None,
+ None,
+ |eval, result| {
+ let entry_value = match result {
+ EvaluationResult::RequiresEntryValue(mut expression) => {
+ expression.0.read_u64()?
+ }
+ _ => panic!(),
+ };
+ eval.resume_with_entry_value(Value::Generic(entry_value))
+ },
+ );
+
+ // Test missing `object_address` field.
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_push_object_address),
+ ];
+
+ check_eval_with_args(
+ &program,
+ Err(Error::InvalidPushObjectAddress),
+ encoding4(),
+ None,
+ None,
+ None,
+ |_, _| panic!(),
+ );
+
+ // Test `object_address` field.
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_push_object_address),
+ Op(DW_OP_stack_value),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(0xff),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding8(),
+ Some(0xff),
+ None,
+ None,
+ |_, result| Ok(result),
+ );
+
+ // Test `initial_value` field.
+ #[rustfmt::skip]
+ let program = [
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Address {
+ address: 0x1234_5678,
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding8(),
+ None,
+ Some(0x1234_5678),
+ None,
+ |_, result| Ok(result),
+ );
+ }
+
+ #[test]
+ fn test_eval_empty_stack() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_stack_value)
+ ];
+
+ check_eval(&program, Err(Error::NotEnoughStackItems), encoding4());
+ }
+
+ #[test]
+ fn test_eval_call() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_lit23),
+ Op(DW_OP_call2), U16(0x7755),
+ Op(DW_OP_call4), U32(0x7755_aaee),
+ Op(DW_OP_call_ref), U32(0x7755_aaee),
+ Op(DW_OP_stack_value)
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(23),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, result| {
+ let buf = EndianSlice::new(&[], LittleEndian);
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)?;
+
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)?;
+
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)
+ },
+ );
+
+ // DW_OP_lit2 DW_OP_mul
+ const SUBR: &[u8] = &[0x32, 0x1e];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value {
+ value: Value::Generic(184),
+ },
+ }];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, result| {
+ let buf = EndianSlice::new(SUBR, LittleEndian);
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)?;
+
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)?;
+
+ match result {
+ EvaluationResult::RequiresAtLocation(_) => {}
+ _ => panic!(),
+ };
+
+ eval.resume_with_at_location(buf)
+ },
+ );
+ }
+
+ #[test]
+ fn test_eval_pieces() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ // Example from DWARF 2.6.1.3.
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_reg3),
+ Op(DW_OP_piece), Uleb(4),
+ Op(DW_OP_reg4),
+ Op(DW_OP_piece), Uleb(2),
+ ];
+
+ let result = [
+ Piece {
+ size_in_bits: Some(32),
+ bit_offset: None,
+ location: Location::Register {
+ register: Register(3),
+ },
+ },
+ Piece {
+ size_in_bits: Some(16),
+ bit_offset: None,
+ location: Location::Register {
+ register: Register(4),
+ },
+ },
+ ];
+
+ check_eval(&program, Ok(&result), encoding4());
+
+ // Example from DWARF 2.6.1.3 (but hacked since dealing with fbreg
+ // in the tests is a pain).
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_reg0),
+ Op(DW_OP_piece), Uleb(4),
+ Op(DW_OP_piece), Uleb(4),
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_piece), Uleb(4),
+ ];
+
+ let result = [
+ Piece {
+ size_in_bits: Some(32),
+ bit_offset: None,
+ location: Location::Register {
+ register: Register(0),
+ },
+ },
+ Piece {
+ size_in_bits: Some(32),
+ bit_offset: None,
+ location: Location::Empty,
+ },
+ Piece {
+ size_in_bits: Some(32),
+ bit_offset: None,
+ location: Location::Address {
+ address: 0x7fff_ffff,
+ },
+ },
+ ];
+
+ check_eval_with_args(
+ &program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, mut result| {
+ while result != EvaluationResult::Complete {
+ result = match result {
+ EvaluationResult::RequiresRelocatedAddress(address) => {
+ eval.resume_with_relocated_address(address)?
+ }
+ _ => panic!(),
+ };
+ }
+
+ Ok(result)
+ },
+ );
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_implicit_value), Uleb(5),
+ U8(23), U8(24), U8(25), U8(26), U8(0),
+ ];
+
+ const BYTES: &[u8] = &[23, 24, 25, 26, 0];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Bytes {
+ value: EndianSlice::new(BYTES, LittleEndian),
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_lit7),
+ Op(DW_OP_stack_value),
+ Op(DW_OP_bit_piece), Uleb(5), Uleb(0),
+ Op(DW_OP_bit_piece), Uleb(3), Uleb(0),
+ ];
+
+ let result = [
+ Piece {
+ size_in_bits: Some(5),
+ bit_offset: Some(0),
+ location: Location::Value {
+ value: Value::Generic(7),
+ },
+ },
+ Piece {
+ size_in_bits: Some(3),
+ bit_offset: Some(0),
+ location: Location::Empty,
+ },
+ ];
+
+ check_eval(&program, Ok(&result), encoding4());
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_lit7),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Address { address: 7 },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_implicit_pointer), U32(0x1234_5678), Sleb(0x123),
+ ];
+
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::ImplicitPointer {
+ value: DebugInfoOffset(0x1234_5678),
+ byte_offset: 0x123,
+ },
+ }];
+
+ check_eval(&program, Ok(&result), encoding4());
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_reg3),
+ Op(DW_OP_piece), Uleb(4),
+ Op(DW_OP_reg4),
+ ];
+
+ check_eval(&program, Err(Error::InvalidPiece), encoding4());
+
+ #[rustfmt::skip]
+ let program = [
+ Op(DW_OP_reg3),
+ Op(DW_OP_piece), Uleb(4),
+ Op(DW_OP_lit0),
+ ];
+
+ check_eval(&program, Err(Error::InvalidPiece), encoding4());
+ }
+
+ #[test]
+ fn test_eval_max_iterations() {
+ // It's nice if an operation and its arguments can fit on a single
+ // line in the test program.
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ #[rustfmt::skip]
+ let program = [
+ Mark(1),
+ Op(DW_OP_skip), Branch(1),
+ ];
+
+ check_eval_with_args(
+ &program,
+ Err(Error::TooManyIterations),
+ encoding4(),
+ None,
+ None,
+ Some(150),
+ |_, _| panic!(),
+ );
+ }
+
+ #[test]
+ fn test_eval_typed_stack() {
+ use self::AssemblerEntry::*;
+ use crate::constants::*;
+
+ let base_types = [
+ ValueType::Generic,
+ ValueType::U16,
+ ValueType::U32,
+ ValueType::F32,
+ ];
+
+ // TODO: convert, reinterpret
+ #[rustfmt::skip]
+ let tests = [
+ (
+ &[
+ Op(DW_OP_const_type), Uleb(1), U8(2), U16(0x1234),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::U16(0x1234),
+ ),
+ (
+ &[
+ Op(DW_OP_regval_type), Uleb(0x1234), Uleb(1),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::U16(0x2340),
+ ),
+ (
+ &[
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_deref_type), U8(2), Uleb(1),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::U16(0xfff0),
+ ),
+ (
+ &[
+ Op(DW_OP_lit1),
+ Op(DW_OP_addr), U32(0x7fff_ffff),
+ Op(DW_OP_xderef_type), U8(2), Uleb(1),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::U16(0xfff1),
+ ),
+ (
+ &[
+ Op(DW_OP_const_type), Uleb(1), U8(2), U16(0x1234),
+ Op(DW_OP_convert), Uleb(2),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::U32(0x1234),
+ ),
+ (
+ &[
+ Op(DW_OP_const_type), Uleb(2), U8(4), U32(0x3f80_0000),
+ Op(DW_OP_reinterpret), Uleb(3),
+ Op(DW_OP_stack_value),
+ ][..],
+ Value::F32(1.0),
+ ),
+ ];
+ for &(program, value) in &tests {
+ let result = [Piece {
+ size_in_bits: None,
+ bit_offset: None,
+ location: Location::Value { value },
+ }];
+
+ check_eval_with_args(
+ program,
+ Ok(&result),
+ encoding4(),
+ None,
+ None,
+ None,
+ |eval, mut result| {
+ while result != EvaluationResult::Complete {
+ result = match result {
+ EvaluationResult::RequiresMemory {
+ address,
+ size,
+ space,
+ base_type,
+ } => {
+ let mut v = address << 4;
+ if let Some(value) = space {
+ v += value;
+ }
+ v &= (1u64 << (8 * size)) - 1;
+ let v = Value::from_u64(base_types[base_type.0], v)?;
+ eval.resume_with_memory(v)?
+ }
+ EvaluationResult::RequiresRegister {
+ register,
+ base_type,
+ } => {
+ let v = Value::from_u64(
+ base_types[base_type.0],
+ u64::from(register.0) << 4,
+ )?;
+ eval.resume_with_register(v)?
+ }
+ EvaluationResult::RequiresBaseType(offset) => {
+ eval.resume_with_base_type(base_types[offset.0])?
+ }
+ EvaluationResult::RequiresRelocatedAddress(address) => {
+ eval.resume_with_relocated_address(address)?
+ }
+ _ => panic!("Unexpected result {:?}", result),
+ }
+ }
+ Ok(result)
+ },
+ );
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/pubnames.rs b/vendor/gimli-0.26.2/src/read/pubnames.rs
new file mode 100644
index 000000000..e8b7e5528
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/pubnames.rs
@@ -0,0 +1,141 @@
+use crate::common::{DebugInfoOffset, SectionId};
+use crate::endianity::Endianity;
+use crate::read::lookup::{DebugLookup, LookupEntryIter, PubStuffEntry, PubStuffParser};
+use crate::read::{EndianSlice, Reader, Result, Section, UnitOffset};
+
+/// A single parsed pubname.
+#[derive(Debug, Clone)]
+pub struct PubNamesEntry<R: Reader> {
+ unit_header_offset: DebugInfoOffset<R::Offset>,
+ die_offset: UnitOffset<R::Offset>,
+ name: R,
+}
+
+impl<R: Reader> PubNamesEntry<R> {
+ /// Returns the name this entry refers to.
+ pub fn name(&self) -> &R {
+ &self.name
+ }
+
+ /// Returns the offset into the .debug_info section for the header of the compilation unit
+ /// which contains this name.
+ pub fn unit_header_offset(&self) -> DebugInfoOffset<R::Offset> {
+ self.unit_header_offset
+ }
+
+ /// Returns the offset into the compilation unit for the debugging information entry which
+ /// has this name.
+ pub fn die_offset(&self) -> UnitOffset<R::Offset> {
+ self.die_offset
+ }
+}
+
+impl<R: Reader> PubStuffEntry<R> for PubNamesEntry<R> {
+ fn new(
+ die_offset: UnitOffset<R::Offset>,
+ name: R,
+ unit_header_offset: DebugInfoOffset<R::Offset>,
+ ) -> Self {
+ PubNamesEntry {
+ unit_header_offset,
+ die_offset,
+ name,
+ }
+ }
+}
+
+/// The `DebugPubNames` struct represents the DWARF public names information
+/// found in the `.debug_pubnames` section.
+#[derive(Debug, Clone)]
+pub struct DebugPubNames<R: Reader>(DebugLookup<R, PubStuffParser<R, PubNamesEntry<R>>>);
+
+impl<'input, Endian> DebugPubNames<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugPubNames` instance from the data in the `.debug_pubnames`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_pubnames` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugPubNames, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_pubnames_section_somehow = || &buf;
+ /// let debug_pubnames =
+ /// DebugPubNames::new(read_debug_pubnames_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_pubnames_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_pubnames_section, endian))
+ }
+}
+
+impl<R: Reader> DebugPubNames<R> {
+ /// Iterate the pubnames in the `.debug_pubnames` section.
+ ///
+ /// ```
+ /// use gimli::{DebugPubNames, EndianSlice, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_pubnames_section_somehow = || &buf;
+ /// let debug_pubnames =
+ /// DebugPubNames::new(read_debug_pubnames_section_somehow(), LittleEndian);
+ ///
+ /// let mut iter = debug_pubnames.items();
+ /// while let Some(pubname) = iter.next().unwrap() {
+ /// println!("pubname {} found!", pubname.name().to_string_lossy());
+ /// }
+ /// ```
+ pub fn items(&self) -> PubNamesEntryIter<R> {
+ PubNamesEntryIter(self.0.items())
+ }
+}
+
+impl<R: Reader> Section<R> for DebugPubNames<R> {
+ fn id() -> SectionId {
+ SectionId::DebugPubNames
+ }
+
+ fn reader(&self) -> &R {
+ self.0.reader()
+ }
+}
+
+impl<R: Reader> From<R> for DebugPubNames<R> {
+ fn from(debug_pubnames_section: R) -> Self {
+ DebugPubNames(DebugLookup::from(debug_pubnames_section))
+ }
+}
+
+/// An iterator over the pubnames from a `.debug_pubnames` section.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+#[derive(Debug, Clone)]
+pub struct PubNamesEntryIter<R: Reader>(LookupEntryIter<R, PubStuffParser<R, PubNamesEntry<R>>>);
+
+impl<R: Reader> PubNamesEntryIter<R> {
+ /// Advance the iterator and return the next pubname.
+ ///
+ /// Returns the newly parsed pubname as `Ok(Some(pubname))`. Returns
+ /// `Ok(None)` when iteration is complete and all pubnames have already been
+ /// parsed and yielded. If an error occurs while parsing the next pubname,
+ /// then this error is returned as `Err(e)`, and all subsequent calls return
+ /// `Ok(None)`.
+ pub fn next(&mut self) -> Result<Option<PubNamesEntry<R>>> {
+ self.0.next()
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for PubNamesEntryIter<R> {
+ type Item = PubNamesEntry<R>;
+ type Error = crate::read::Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ self.0.next()
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/pubtypes.rs b/vendor/gimli-0.26.2/src/read/pubtypes.rs
new file mode 100644
index 000000000..6723b4222
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/pubtypes.rs
@@ -0,0 +1,141 @@
+use crate::common::{DebugInfoOffset, SectionId};
+use crate::endianity::Endianity;
+use crate::read::lookup::{DebugLookup, LookupEntryIter, PubStuffEntry, PubStuffParser};
+use crate::read::{EndianSlice, Reader, Result, Section, UnitOffset};
+
+/// A single parsed pubtype.
+#[derive(Debug, Clone)]
+pub struct PubTypesEntry<R: Reader> {
+ unit_header_offset: DebugInfoOffset<R::Offset>,
+ die_offset: UnitOffset<R::Offset>,
+ name: R,
+}
+
+impl<R: Reader> PubTypesEntry<R> {
+ /// Returns the name of the type this entry refers to.
+ pub fn name(&self) -> &R {
+ &self.name
+ }
+
+ /// Returns the offset into the .debug_info section for the header of the compilation unit
+ /// which contains the type with this name.
+ pub fn unit_header_offset(&self) -> DebugInfoOffset<R::Offset> {
+ self.unit_header_offset
+ }
+
+ /// Returns the offset into the compilation unit for the debugging information entry which
+ /// the type with this name.
+ pub fn die_offset(&self) -> UnitOffset<R::Offset> {
+ self.die_offset
+ }
+}
+
+impl<R: Reader> PubStuffEntry<R> for PubTypesEntry<R> {
+ fn new(
+ die_offset: UnitOffset<R::Offset>,
+ name: R,
+ unit_header_offset: DebugInfoOffset<R::Offset>,
+ ) -> Self {
+ PubTypesEntry {
+ unit_header_offset,
+ die_offset,
+ name,
+ }
+ }
+}
+
+/// The `DebugPubTypes` struct represents the DWARF public types information
+/// found in the `.debug_info` section.
+#[derive(Debug, Clone)]
+pub struct DebugPubTypes<R: Reader>(DebugLookup<R, PubStuffParser<R, PubTypesEntry<R>>>);
+
+impl<'input, Endian> DebugPubTypes<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugPubTypes` instance from the data in the `.debug_pubtypes`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_pubtypes` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugPubTypes, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_pubtypes_somehow = || &buf;
+ /// let debug_pubtypes =
+ /// DebugPubTypes::new(read_debug_pubtypes_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_pubtypes_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_pubtypes_section, endian))
+ }
+}
+
+impl<R: Reader> DebugPubTypes<R> {
+ /// Iterate the pubtypes in the `.debug_pubtypes` section.
+ ///
+ /// ```
+ /// use gimli::{DebugPubTypes, EndianSlice, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_pubtypes_section_somehow = || &buf;
+ /// let debug_pubtypes =
+ /// DebugPubTypes::new(read_debug_pubtypes_section_somehow(), LittleEndian);
+ ///
+ /// let mut iter = debug_pubtypes.items();
+ /// while let Some(pubtype) = iter.next().unwrap() {
+ /// println!("pubtype {} found!", pubtype.name().to_string_lossy());
+ /// }
+ /// ```
+ pub fn items(&self) -> PubTypesEntryIter<R> {
+ PubTypesEntryIter(self.0.items())
+ }
+}
+
+impl<R: Reader> Section<R> for DebugPubTypes<R> {
+ fn id() -> SectionId {
+ SectionId::DebugPubTypes
+ }
+
+ fn reader(&self) -> &R {
+ self.0.reader()
+ }
+}
+
+impl<R: Reader> From<R> for DebugPubTypes<R> {
+ fn from(debug_pubtypes_section: R) -> Self {
+ DebugPubTypes(DebugLookup::from(debug_pubtypes_section))
+ }
+}
+
+/// An iterator over the pubtypes from a `.debug_pubtypes` section.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+#[derive(Debug, Clone)]
+pub struct PubTypesEntryIter<R: Reader>(LookupEntryIter<R, PubStuffParser<R, PubTypesEntry<R>>>);
+
+impl<R: Reader> PubTypesEntryIter<R> {
+ /// Advance the iterator and return the next pubtype.
+ ///
+ /// Returns the newly parsed pubtype as `Ok(Some(pubtype))`. Returns
+ /// `Ok(None)` when iteration is complete and all pubtypes have already been
+ /// parsed and yielded. If an error occurs while parsing the next pubtype,
+ /// then this error is returned as `Err(e)`, and all subsequent calls return
+ /// `Ok(None)`.
+ pub fn next(&mut self) -> Result<Option<PubTypesEntry<R>>> {
+ self.0.next()
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for PubTypesEntryIter<R> {
+ type Item = PubTypesEntry<R>;
+ type Error = crate::read::Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ self.0.next()
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/reader.rs b/vendor/gimli-0.26.2/src/read/reader.rs
new file mode 100644
index 000000000..1bb748bb8
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/reader.rs
@@ -0,0 +1,502 @@
+#[cfg(feature = "read")]
+use alloc::borrow::Cow;
+use core::convert::TryInto;
+use core::fmt::Debug;
+use core::hash::Hash;
+use core::ops::{Add, AddAssign, Sub};
+
+use crate::common::Format;
+use crate::endianity::Endianity;
+use crate::leb128;
+use crate::read::{Error, Result};
+
+/// An identifier for an offset within a section reader.
+///
+/// This is used for error reporting. The meaning of this value is specific to
+/// each reader implementation. The values should be chosen to be unique amongst
+/// all readers. If values are not unique then errors may point to the wrong reader.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct ReaderOffsetId(pub u64);
+
+/// A trait for offsets with a DWARF section.
+///
+/// This allows consumers to choose a size that is appropriate for their address space.
+pub trait ReaderOffset:
+ Debug + Copy + Eq + Ord + Hash + Add<Output = Self> + AddAssign + Sub<Output = Self>
+{
+ /// Convert a u8 to an offset.
+ fn from_u8(offset: u8) -> Self;
+
+ /// Convert a u16 to an offset.
+ fn from_u16(offset: u16) -> Self;
+
+ /// Convert an i16 to an offset.
+ fn from_i16(offset: i16) -> Self;
+
+ /// Convert a u32 to an offset.
+ fn from_u32(offset: u32) -> Self;
+
+ /// Convert a u64 to an offset.
+ ///
+ /// Returns `Error::UnsupportedOffset` if the value is too large.
+ fn from_u64(offset: u64) -> Result<Self>;
+
+ /// Convert an offset to a u64.
+ fn into_u64(self) -> u64;
+
+ /// Wrapping (modular) addition. Computes `self + other`.
+ fn wrapping_add(self, other: Self) -> Self;
+
+ /// Checked subtraction. Computes `self - other`.
+ fn checked_sub(self, other: Self) -> Option<Self>;
+}
+
+impl ReaderOffset for u64 {
+ #[inline]
+ fn from_u8(offset: u8) -> Self {
+ u64::from(offset)
+ }
+
+ #[inline]
+ fn from_u16(offset: u16) -> Self {
+ u64::from(offset)
+ }
+
+ #[inline]
+ fn from_i16(offset: i16) -> Self {
+ offset as u64
+ }
+
+ #[inline]
+ fn from_u32(offset: u32) -> Self {
+ u64::from(offset)
+ }
+
+ #[inline]
+ fn from_u64(offset: u64) -> Result<Self> {
+ Ok(offset)
+ }
+
+ #[inline]
+ fn into_u64(self) -> u64 {
+ self
+ }
+
+ #[inline]
+ fn wrapping_add(self, other: Self) -> Self {
+ self.wrapping_add(other)
+ }
+
+ #[inline]
+ fn checked_sub(self, other: Self) -> Option<Self> {
+ self.checked_sub(other)
+ }
+}
+
+impl ReaderOffset for u32 {
+ #[inline]
+ fn from_u8(offset: u8) -> Self {
+ u32::from(offset)
+ }
+
+ #[inline]
+ fn from_u16(offset: u16) -> Self {
+ u32::from(offset)
+ }
+
+ #[inline]
+ fn from_i16(offset: i16) -> Self {
+ offset as u32
+ }
+
+ #[inline]
+ fn from_u32(offset: u32) -> Self {
+ offset
+ }
+
+ #[inline]
+ fn from_u64(offset64: u64) -> Result<Self> {
+ let offset = offset64 as u32;
+ if u64::from(offset) == offset64 {
+ Ok(offset)
+ } else {
+ Err(Error::UnsupportedOffset)
+ }
+ }
+
+ #[inline]
+ fn into_u64(self) -> u64 {
+ u64::from(self)
+ }
+
+ #[inline]
+ fn wrapping_add(self, other: Self) -> Self {
+ self.wrapping_add(other)
+ }
+
+ #[inline]
+ fn checked_sub(self, other: Self) -> Option<Self> {
+ self.checked_sub(other)
+ }
+}
+
+impl ReaderOffset for usize {
+ #[inline]
+ fn from_u8(offset: u8) -> Self {
+ offset as usize
+ }
+
+ #[inline]
+ fn from_u16(offset: u16) -> Self {
+ offset as usize
+ }
+
+ #[inline]
+ fn from_i16(offset: i16) -> Self {
+ offset as usize
+ }
+
+ #[inline]
+ fn from_u32(offset: u32) -> Self {
+ offset as usize
+ }
+
+ #[inline]
+ fn from_u64(offset64: u64) -> Result<Self> {
+ let offset = offset64 as usize;
+ if offset as u64 == offset64 {
+ Ok(offset)
+ } else {
+ Err(Error::UnsupportedOffset)
+ }
+ }
+
+ #[inline]
+ fn into_u64(self) -> u64 {
+ self as u64
+ }
+
+ #[inline]
+ fn wrapping_add(self, other: Self) -> Self {
+ self.wrapping_add(other)
+ }
+
+ #[inline]
+ fn checked_sub(self, other: Self) -> Option<Self> {
+ self.checked_sub(other)
+ }
+}
+
+#[cfg(not(feature = "read"))]
+pub(crate) mod seal_if_no_alloc {
+ #[derive(Debug)]
+ pub struct Sealed;
+}
+
+/// A trait for reading the data from a DWARF section.
+///
+/// All read operations advance the section offset of the reader
+/// unless specified otherwise.
+///
+/// ## Choosing a `Reader` Implementation
+///
+/// `gimli` comes with a few different `Reader` implementations and lets you
+/// choose the one that is right for your use case. A `Reader` is essentially a
+/// view into the raw bytes that make up some DWARF, but this view might borrow
+/// the underlying data or use reference counting ownership, and it might be
+/// thread safe or not.
+///
+/// | Implementation | Ownership | Thread Safe | Notes |
+/// |:------------------|:------------------|:------------|:------|
+/// | [`EndianSlice`](./struct.EndianSlice.html) | Borrowed | Yes | Fastest, but requires that all of your code work with borrows. |
+/// | [`EndianRcSlice`](./struct.EndianRcSlice.html) | Reference counted | No | Shared ownership via reference counting, which alleviates the borrow restrictions of `EndianSlice` but imposes reference counting increments and decrements. Cannot be sent across threads, because the reference count is not atomic. |
+/// | [`EndianArcSlice`](./struct.EndianArcSlice.html) | Reference counted | Yes | The same as `EndianRcSlice`, but uses atomic reference counting, and therefore reference counting operations are slower but `EndianArcSlice`s may be sent across threads. |
+/// | [`EndianReader<T>`](./struct.EndianReader.html) | Same as `T` | Same as `T` | Escape hatch for easily defining your own type of `Reader`. |
+pub trait Reader: Debug + Clone {
+ /// The endianity of bytes that are read.
+ type Endian: Endianity;
+
+ /// The type used for offsets and lengths.
+ type Offset: ReaderOffset;
+
+ /// Return the endianity of bytes that are read.
+ fn endian(&self) -> Self::Endian;
+
+ /// Return the number of bytes remaining.
+ fn len(&self) -> Self::Offset;
+
+ /// Set the number of bytes remaining to zero.
+ fn empty(&mut self);
+
+ /// Set the number of bytes remaining to the specified length.
+ fn truncate(&mut self, len: Self::Offset) -> Result<()>;
+
+ /// Return the offset of this reader's data relative to the start of
+ /// the given base reader's data.
+ ///
+ /// May panic if this reader's data is not contained within the given
+ /// base reader's data.
+ fn offset_from(&self, base: &Self) -> Self::Offset;
+
+ /// Return an identifier for the current reader offset.
+ fn offset_id(&self) -> ReaderOffsetId;
+
+ /// Return the offset corresponding to the given `id` if
+ /// it is associated with this reader.
+ fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<Self::Offset>;
+
+ /// Find the index of the first occurence of the given byte.
+ /// The offset of the reader is not changed.
+ fn find(&self, byte: u8) -> Result<Self::Offset>;
+
+ /// Discard the specified number of bytes.
+ fn skip(&mut self, len: Self::Offset) -> Result<()>;
+
+ /// Split a reader in two.
+ ///
+ /// A new reader is returned that can be used to read the next
+ /// `len` bytes, and `self` is advanced so that it reads the remainder.
+ fn split(&mut self, len: Self::Offset) -> Result<Self>;
+
+ /// This trait cannot be implemented if "read" feature is not enabled.
+ ///
+ /// `Reader` trait has a few methods that depend on `alloc` crate.
+ /// Disallowing `Reader` trait implementation prevents a crate that only depends on
+ /// "read-core" from being broken if another crate depending on `gimli` enables
+ /// "read" feature.
+ #[cfg(not(feature = "read"))]
+ fn cannot_implement() -> seal_if_no_alloc::Sealed;
+
+ /// Return all remaining data as a clone-on-write slice.
+ ///
+ /// The slice will be borrowed where possible, but some readers may
+ /// always return an owned vector.
+ ///
+ /// Does not advance the reader.
+ #[cfg(feature = "read")]
+ fn to_slice(&self) -> Result<Cow<[u8]>>;
+
+ /// Convert all remaining data to a clone-on-write string.
+ ///
+ /// The string will be borrowed where possible, but some readers may
+ /// always return an owned string.
+ ///
+ /// Does not advance the reader.
+ ///
+ /// Returns an error if the data contains invalid characters.
+ #[cfg(feature = "read")]
+ fn to_string(&self) -> Result<Cow<str>>;
+
+ /// Convert all remaining data to a clone-on-write string, including invalid characters.
+ ///
+ /// The string will be borrowed where possible, but some readers may
+ /// always return an owned string.
+ ///
+ /// Does not advance the reader.
+ #[cfg(feature = "read")]
+ fn to_string_lossy(&self) -> Result<Cow<str>>;
+
+ /// Read exactly `buf.len()` bytes into `buf`.
+ fn read_slice(&mut self, buf: &mut [u8]) -> Result<()>;
+
+ /// Read a u8 array.
+ #[inline]
+ fn read_u8_array<A>(&mut self) -> Result<A>
+ where
+ A: Sized + Default + AsMut<[u8]>,
+ {
+ let mut val = Default::default();
+ self.read_slice(<A as AsMut<[u8]>>::as_mut(&mut val))?;
+ Ok(val)
+ }
+
+ /// Return true if the number of bytes remaining is zero.
+ #[inline]
+ fn is_empty(&self) -> bool {
+ self.len() == Self::Offset::from_u8(0)
+ }
+
+ /// Read a u8.
+ #[inline]
+ fn read_u8(&mut self) -> Result<u8> {
+ let a: [u8; 1] = self.read_u8_array()?;
+ Ok(a[0])
+ }
+
+ /// Read an i8.
+ #[inline]
+ fn read_i8(&mut self) -> Result<i8> {
+ let a: [u8; 1] = self.read_u8_array()?;
+ Ok(a[0] as i8)
+ }
+
+ /// Read a u16.
+ #[inline]
+ fn read_u16(&mut self) -> Result<u16> {
+ let a: [u8; 2] = self.read_u8_array()?;
+ Ok(self.endian().read_u16(&a))
+ }
+
+ /// Read an i16.
+ #[inline]
+ fn read_i16(&mut self) -> Result<i16> {
+ let a: [u8; 2] = self.read_u8_array()?;
+ Ok(self.endian().read_i16(&a))
+ }
+
+ /// Read a u32.
+ #[inline]
+ fn read_u32(&mut self) -> Result<u32> {
+ let a: [u8; 4] = self.read_u8_array()?;
+ Ok(self.endian().read_u32(&a))
+ }
+
+ /// Read an i32.
+ #[inline]
+ fn read_i32(&mut self) -> Result<i32> {
+ let a: [u8; 4] = self.read_u8_array()?;
+ Ok(self.endian().read_i32(&a))
+ }
+
+ /// Read a u64.
+ #[inline]
+ fn read_u64(&mut self) -> Result<u64> {
+ let a: [u8; 8] = self.read_u8_array()?;
+ Ok(self.endian().read_u64(&a))
+ }
+
+ /// Read an i64.
+ #[inline]
+ fn read_i64(&mut self) -> Result<i64> {
+ let a: [u8; 8] = self.read_u8_array()?;
+ Ok(self.endian().read_i64(&a))
+ }
+
+ /// Read a f32.
+ #[inline]
+ fn read_f32(&mut self) -> Result<f32> {
+ let a: [u8; 4] = self.read_u8_array()?;
+ Ok(self.endian().read_f32(&a))
+ }
+
+ /// Read a f64.
+ #[inline]
+ fn read_f64(&mut self) -> Result<f64> {
+ let a: [u8; 8] = self.read_u8_array()?;
+ Ok(self.endian().read_f64(&a))
+ }
+
+ /// Read an unsigned n-bytes integer u64.
+ ///
+ /// # Panics
+ ///
+ /// Panics when nbytes < 1 or nbytes > 8
+ #[inline]
+ fn read_uint(&mut self, n: usize) -> Result<u64> {
+ let mut buf = [0; 8];
+ self.read_slice(&mut buf[..n])?;
+ Ok(self.endian().read_uint(&buf[..n]))
+ }
+
+ /// Read a null-terminated slice, and return it (excluding the null).
+ fn read_null_terminated_slice(&mut self) -> Result<Self> {
+ let idx = self.find(0)?;
+ let val = self.split(idx)?;
+ self.skip(Self::Offset::from_u8(1))?;
+ Ok(val)
+ }
+
+ /// Skip a LEB128 encoded integer.
+ fn skip_leb128(&mut self) -> Result<()> {
+ leb128::read::skip(self)
+ }
+
+ /// Read an unsigned LEB128 encoded integer.
+ fn read_uleb128(&mut self) -> Result<u64> {
+ leb128::read::unsigned(self)
+ }
+
+ /// Read an unsigned LEB128 encoded u32.
+ fn read_uleb128_u32(&mut self) -> Result<u32> {
+ leb128::read::unsigned(self)?
+ .try_into()
+ .map_err(|_| Error::BadUnsignedLeb128)
+ }
+
+ /// Read an unsigned LEB128 encoded u16.
+ fn read_uleb128_u16(&mut self) -> Result<u16> {
+ leb128::read::u16(self)
+ }
+
+ /// Read a signed LEB128 encoded integer.
+ fn read_sleb128(&mut self) -> Result<i64> {
+ leb128::read::signed(self)
+ }
+
+ /// Read an initial length field.
+ ///
+ /// This field is encoded as either a 32-bit length or
+ /// a 64-bit length, and the returned `Format` indicates which.
+ fn read_initial_length(&mut self) -> Result<(Self::Offset, Format)> {
+ const MAX_DWARF_32_UNIT_LENGTH: u32 = 0xffff_fff0;
+ const DWARF_64_INITIAL_UNIT_LENGTH: u32 = 0xffff_ffff;
+
+ let val = self.read_u32()?;
+ if val < MAX_DWARF_32_UNIT_LENGTH {
+ Ok((Self::Offset::from_u32(val), Format::Dwarf32))
+ } else if val == DWARF_64_INITIAL_UNIT_LENGTH {
+ let val = self.read_u64().and_then(Self::Offset::from_u64)?;
+ Ok((val, Format::Dwarf64))
+ } else {
+ Err(Error::UnknownReservedLength)
+ }
+ }
+
+ /// Read an address-sized integer, and return it as a `u64`.
+ fn read_address(&mut self, address_size: u8) -> Result<u64> {
+ match address_size {
+ 1 => self.read_u8().map(u64::from),
+ 2 => self.read_u16().map(u64::from),
+ 4 => self.read_u32().map(u64::from),
+ 8 => self.read_u64(),
+ otherwise => Err(Error::UnsupportedAddressSize(otherwise)),
+ }
+ }
+
+ /// Parse a word-sized integer according to the DWARF format.
+ ///
+ /// These are always used to encode section offsets or lengths,
+ /// and so have a type of `Self::Offset`.
+ fn read_word(&mut self, format: Format) -> Result<Self::Offset> {
+ match format {
+ Format::Dwarf32 => self.read_u32().map(Self::Offset::from_u32),
+ Format::Dwarf64 => self.read_u64().and_then(Self::Offset::from_u64),
+ }
+ }
+
+ /// Parse a word-sized section length according to the DWARF format.
+ #[inline]
+ fn read_length(&mut self, format: Format) -> Result<Self::Offset> {
+ self.read_word(format)
+ }
+
+ /// Parse a word-sized section offset according to the DWARF format.
+ #[inline]
+ fn read_offset(&mut self, format: Format) -> Result<Self::Offset> {
+ self.read_word(format)
+ }
+
+ /// Parse a section offset of the given size.
+ ///
+ /// This is used for `DW_FORM_ref_addr` values in DWARF version 2.
+ fn read_sized_offset(&mut self, size: u8) -> Result<Self::Offset> {
+ match size {
+ 1 => self.read_u8().map(u64::from),
+ 2 => self.read_u16().map(u64::from),
+ 4 => self.read_u32().map(u64::from),
+ 8 => self.read_u64(),
+ otherwise => Err(Error::UnsupportedOffsetSize(otherwise)),
+ }
+ .and_then(Self::Offset::from_u64)
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/rnglists.rs b/vendor/gimli-0.26.2/src/read/rnglists.rs
new file mode 100644
index 000000000..d8d49042f
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/rnglists.rs
@@ -0,0 +1,1354 @@
+use crate::common::{
+ DebugAddrBase, DebugAddrIndex, DebugRngListsBase, DebugRngListsIndex, DwarfFileType, Encoding,
+ RangeListsOffset, SectionId,
+};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::{
+ lists::ListsHeader, DebugAddr, EndianSlice, Error, Reader, ReaderOffset, ReaderOffsetId,
+ Result, Section,
+};
+
+/// The raw contents of the `.debug_ranges` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugRanges<R> {
+ pub(crate) section: R,
+}
+
+impl<'input, Endian> DebugRanges<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugRanges` instance from the data in the `.debug_ranges`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_ranges` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugRanges, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_ranges_section_somehow = || &buf;
+ /// let debug_ranges = DebugRanges::new(read_debug_ranges_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugRanges<R> {
+ fn id() -> SectionId {
+ SectionId::DebugRanges
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugRanges<R> {
+ fn from(section: R) -> Self {
+ DebugRanges { section }
+ }
+}
+
+/// The `DebugRngLists` struct represents the contents of the
+/// `.debug_rnglists` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugRngLists<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugRngLists<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugRngLists` instance from the data in the
+ /// `.debug_rnglists` section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_rnglists`
+ /// section and present it as a `&[u8]` slice. That means using some ELF
+ /// loader on Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugRngLists, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_rnglists_section_somehow = || &buf;
+ /// let debug_rnglists =
+ /// DebugRngLists::new(read_debug_rnglists_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(section, endian))
+ }
+}
+
+impl<R> Section<R> for DebugRngLists<R> {
+ fn id() -> SectionId {
+ SectionId::DebugRngLists
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugRngLists<R> {
+ fn from(section: R) -> Self {
+ DebugRngLists { section }
+ }
+}
+
+#[allow(unused)]
+pub(crate) type RngListsHeader = ListsHeader;
+
+impl<Offset> DebugRngListsBase<Offset>
+where
+ Offset: ReaderOffset,
+{
+ /// Returns a `DebugRngListsBase` with the default value of DW_AT_rnglists_base
+ /// for the given `Encoding` and `DwarfFileType`.
+ pub fn default_for_encoding_and_file(
+ encoding: Encoding,
+ file_type: DwarfFileType,
+ ) -> DebugRngListsBase<Offset> {
+ if encoding.version >= 5 && file_type == DwarfFileType::Dwo {
+ // In .dwo files, the compiler omits the DW_AT_rnglists_base attribute (because there is
+ // only a single unit in the file) but we must skip past the header, which the attribute
+ // would normally do for us.
+ DebugRngListsBase(Offset::from_u8(RngListsHeader::size_for_encoding(encoding)))
+ } else {
+ DebugRngListsBase(Offset::from_u8(0))
+ }
+ }
+}
+
+/// The DWARF data found in `.debug_ranges` and `.debug_rnglists` sections.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct RangeLists<R> {
+ debug_ranges: DebugRanges<R>,
+ debug_rnglists: DebugRngLists<R>,
+}
+
+impl<R> RangeLists<R> {
+ /// Construct a new `RangeLists` instance from the data in the `.debug_ranges` and
+ /// `.debug_rnglists` sections.
+ pub fn new(debug_ranges: DebugRanges<R>, debug_rnglists: DebugRngLists<R>) -> RangeLists<R> {
+ RangeLists {
+ debug_ranges,
+ debug_rnglists,
+ }
+ }
+
+ /// Return the `.debug_ranges` section.
+ pub fn debug_ranges(&self) -> &DebugRanges<R> {
+ &self.debug_ranges
+ }
+
+ /// Replace the `.debug_ranges` section.
+ ///
+ /// This is useful for `.dwo` files when using the GNU split-dwarf extension to DWARF 4.
+ pub fn set_debug_ranges(&mut self, debug_ranges: DebugRanges<R>) {
+ self.debug_ranges = debug_ranges;
+ }
+
+ /// Return the `.debug_rnglists` section.
+ pub fn debug_rnglists(&self) -> &DebugRngLists<R> {
+ &self.debug_rnglists
+ }
+}
+
+impl<T> RangeLists<T> {
+ /// Create a `RangeLists` that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::RangeLists<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> RangeLists<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ RangeLists {
+ debug_ranges: borrow(&self.debug_ranges.section).into(),
+ debug_rnglists: borrow(&self.debug_rnglists.section).into(),
+ }
+ }
+}
+
+impl<R: Reader> RangeLists<R> {
+ /// Iterate over the `Range` list entries starting at the given offset.
+ ///
+ /// The `unit_version` and `address_size` must match the compilation unit that the
+ /// offset was contained in.
+ ///
+ /// The `base_address` should be obtained from the `DW_AT_low_pc` attribute in the
+ /// `DW_TAG_compile_unit` entry for the compilation unit that contains this range list.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn ranges(
+ &self,
+ offset: RangeListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ base_address: u64,
+ debug_addr: &DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+ ) -> Result<RngListIter<R>> {
+ Ok(RngListIter::new(
+ self.raw_ranges(offset, unit_encoding)?,
+ base_address,
+ debug_addr.clone(),
+ debug_addr_base,
+ ))
+ }
+
+ /// Iterate over the `RawRngListEntry`ies starting at the given offset.
+ ///
+ /// The `unit_encoding` must match the compilation unit that the
+ /// offset was contained in.
+ ///
+ /// This iterator does not perform any processing of the range entries,
+ /// such as handling base addresses.
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn raw_ranges(
+ &self,
+ offset: RangeListsOffset<R::Offset>,
+ unit_encoding: Encoding,
+ ) -> Result<RawRngListIter<R>> {
+ let (mut input, format) = if unit_encoding.version <= 4 {
+ (self.debug_ranges.section.clone(), RangeListsFormat::Bare)
+ } else {
+ (self.debug_rnglists.section.clone(), RangeListsFormat::RLE)
+ };
+ input.skip(offset.0)?;
+ Ok(RawRngListIter::new(input, unit_encoding, format))
+ }
+
+ /// Returns the `.debug_rnglists` offset at the given `base` and `index`.
+ ///
+ /// The `base` must be the `DW_AT_rnglists_base` value from the compilation unit DIE.
+ /// This is an offset that points to the first entry following the header.
+ ///
+ /// The `index` is the value of a `DW_FORM_rnglistx` attribute.
+ ///
+ /// The `unit_encoding` must match the compilation unit that the
+ /// index was contained in.
+ pub fn get_offset(
+ &self,
+ unit_encoding: Encoding,
+ base: DebugRngListsBase<R::Offset>,
+ index: DebugRngListsIndex<R::Offset>,
+ ) -> Result<RangeListsOffset<R::Offset>> {
+ let format = unit_encoding.format;
+ let input = &mut self.debug_rnglists.section.clone();
+ input.skip(base.0)?;
+ input.skip(R::Offset::from_u64(
+ index.0.into_u64() * u64::from(format.word_size()),
+ )?)?;
+ input
+ .read_offset(format)
+ .map(|x| RangeListsOffset(base.0 + x))
+ }
+
+ /// Call `Reader::lookup_offset_id` for each section, and return the first match.
+ pub fn lookup_offset_id(&self, id: ReaderOffsetId) -> Option<(SectionId, R::Offset)> {
+ self.debug_ranges
+ .lookup_offset_id(id)
+ .or_else(|| self.debug_rnglists.lookup_offset_id(id))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum RangeListsFormat {
+ /// The bare range list format used before DWARF 5.
+ Bare,
+ /// The DW_RLE encoded range list format used in DWARF 5.
+ RLE,
+}
+
+/// A raw iterator over an address range list.
+///
+/// This iterator does not perform any processing of the range entries,
+/// such as handling base addresses.
+#[derive(Debug)]
+pub struct RawRngListIter<R: Reader> {
+ input: R,
+ encoding: Encoding,
+ format: RangeListsFormat,
+}
+
+/// A raw entry in .debug_rnglists
+#[derive(Clone, Debug)]
+pub enum RawRngListEntry<T> {
+ /// A range from DWARF version <= 4.
+ AddressOrOffsetPair {
+ /// Start of range. May be an address or an offset.
+ begin: u64,
+ /// End of range. May be an address or an offset.
+ end: u64,
+ },
+ /// DW_RLE_base_address
+ BaseAddress {
+ /// base address
+ addr: u64,
+ },
+ /// DW_RLE_base_addressx
+ BaseAddressx {
+ /// base address
+ addr: DebugAddrIndex<T>,
+ },
+ /// DW_RLE_startx_endx
+ StartxEndx {
+ /// start of range
+ begin: DebugAddrIndex<T>,
+ /// end of range
+ end: DebugAddrIndex<T>,
+ },
+ /// DW_RLE_startx_length
+ StartxLength {
+ /// start of range
+ begin: DebugAddrIndex<T>,
+ /// length of range
+ length: u64,
+ },
+ /// DW_RLE_offset_pair
+ OffsetPair {
+ /// start of range
+ begin: u64,
+ /// end of range
+ end: u64,
+ },
+ /// DW_RLE_start_end
+ StartEnd {
+ /// start of range
+ begin: u64,
+ /// end of range
+ end: u64,
+ },
+ /// DW_RLE_start_length
+ StartLength {
+ /// start of range
+ begin: u64,
+ /// length of range
+ length: u64,
+ },
+}
+
+impl<T: ReaderOffset> RawRngListEntry<T> {
+ /// Parse a range entry from `.debug_rnglists`
+ fn parse<R: Reader<Offset = T>>(
+ input: &mut R,
+ encoding: Encoding,
+ format: RangeListsFormat,
+ ) -> Result<Option<Self>> {
+ match format {
+ RangeListsFormat::Bare => {
+ let range = RawRange::parse(input, encoding.address_size)?;
+ return Ok(if range.is_end() {
+ None
+ } else if range.is_base_address(encoding.address_size) {
+ Some(RawRngListEntry::BaseAddress { addr: range.end })
+ } else {
+ Some(RawRngListEntry::AddressOrOffsetPair {
+ begin: range.begin,
+ end: range.end,
+ })
+ });
+ }
+ RangeListsFormat::RLE => Ok(match constants::DwRle(input.read_u8()?) {
+ constants::DW_RLE_end_of_list => None,
+ constants::DW_RLE_base_addressx => Some(RawRngListEntry::BaseAddressx {
+ addr: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ }),
+ constants::DW_RLE_startx_endx => Some(RawRngListEntry::StartxEndx {
+ begin: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ end: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ }),
+ constants::DW_RLE_startx_length => Some(RawRngListEntry::StartxLength {
+ begin: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
+ length: input.read_uleb128()?,
+ }),
+ constants::DW_RLE_offset_pair => Some(RawRngListEntry::OffsetPair {
+ begin: input.read_uleb128()?,
+ end: input.read_uleb128()?,
+ }),
+ constants::DW_RLE_base_address => Some(RawRngListEntry::BaseAddress {
+ addr: input.read_address(encoding.address_size)?,
+ }),
+ constants::DW_RLE_start_end => Some(RawRngListEntry::StartEnd {
+ begin: input.read_address(encoding.address_size)?,
+ end: input.read_address(encoding.address_size)?,
+ }),
+ constants::DW_RLE_start_length => Some(RawRngListEntry::StartLength {
+ begin: input.read_address(encoding.address_size)?,
+ length: input.read_uleb128()?,
+ }),
+ _ => {
+ return Err(Error::InvalidAddressRange);
+ }
+ }),
+ }
+ }
+}
+
+impl<R: Reader> RawRngListIter<R> {
+ /// Construct a `RawRngListIter`.
+ fn new(input: R, encoding: Encoding, format: RangeListsFormat) -> RawRngListIter<R> {
+ RawRngListIter {
+ input,
+ encoding,
+ format,
+ }
+ }
+
+ /// Advance the iterator to the next range.
+ pub fn next(&mut self) -> Result<Option<RawRngListEntry<R::Offset>>> {
+ if self.input.is_empty() {
+ return Ok(None);
+ }
+
+ match RawRngListEntry::parse(&mut self.input, self.encoding, self.format) {
+ Ok(range) => {
+ if range.is_none() {
+ self.input.empty();
+ }
+ Ok(range)
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for RawRngListIter<R> {
+ type Item = RawRngListEntry<R::Offset>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ RawRngListIter::next(self)
+ }
+}
+
+/// An iterator over an address range list.
+///
+/// This iterator internally handles processing of base addresses and different
+/// entry types. Thus, it only returns range entries that are valid
+/// and already adjusted for the base address.
+#[derive(Debug)]
+pub struct RngListIter<R: Reader> {
+ raw: RawRngListIter<R>,
+ base_address: u64,
+ debug_addr: DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+}
+
+impl<R: Reader> RngListIter<R> {
+ /// Construct a `RngListIter`.
+ fn new(
+ raw: RawRngListIter<R>,
+ base_address: u64,
+ debug_addr: DebugAddr<R>,
+ debug_addr_base: DebugAddrBase<R::Offset>,
+ ) -> RngListIter<R> {
+ RngListIter {
+ raw,
+ base_address,
+ debug_addr,
+ debug_addr_base,
+ }
+ }
+
+ #[inline]
+ fn get_address(&self, index: DebugAddrIndex<R::Offset>) -> Result<u64> {
+ self.debug_addr
+ .get_address(self.raw.encoding.address_size, self.debug_addr_base, index)
+ }
+
+ /// Advance the iterator to the next range.
+ pub fn next(&mut self) -> Result<Option<Range>> {
+ loop {
+ let raw_range = match self.raw.next()? {
+ Some(range) => range,
+ None => return Ok(None),
+ };
+
+ let range = match raw_range {
+ RawRngListEntry::BaseAddress { addr } => {
+ self.base_address = addr;
+ continue;
+ }
+ RawRngListEntry::BaseAddressx { addr } => {
+ self.base_address = self.get_address(addr)?;
+ continue;
+ }
+ RawRngListEntry::StartxEndx { begin, end } => {
+ let begin = self.get_address(begin)?;
+ let end = self.get_address(end)?;
+ Range { begin, end }
+ }
+ RawRngListEntry::StartxLength { begin, length } => {
+ let begin = self.get_address(begin)?;
+ let end = begin + length;
+ Range { begin, end }
+ }
+ RawRngListEntry::AddressOrOffsetPair { begin, end }
+ | RawRngListEntry::OffsetPair { begin, end } => {
+ let mut range = Range { begin, end };
+ range.add_base_address(self.base_address, self.raw.encoding.address_size);
+ range
+ }
+ RawRngListEntry::StartEnd { begin, end } => Range { begin, end },
+ RawRngListEntry::StartLength { begin, length } => Range {
+ begin,
+ end: begin + length,
+ },
+ };
+
+ if range.begin > range.end {
+ self.raw.input.empty();
+ return Err(Error::InvalidAddressRange);
+ }
+
+ return Ok(Some(range));
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for RngListIter<R> {
+ type Item = Range;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ RngListIter::next(self)
+ }
+}
+
+/// A raw address range from the `.debug_ranges` section.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct RawRange {
+ /// The beginning address of the range.
+ pub begin: u64,
+
+ /// The first address past the end of the range.
+ pub end: u64,
+}
+
+impl RawRange {
+ /// Check if this is a range end entry.
+ ///
+ /// This will only occur for raw ranges.
+ #[inline]
+ pub fn is_end(&self) -> bool {
+ self.begin == 0 && self.end == 0
+ }
+
+ /// Check if this is a base address selection entry.
+ ///
+ /// A base address selection entry changes the base address that subsequent
+ /// range entries are relative to. This will only occur for raw ranges.
+ #[inline]
+ pub fn is_base_address(&self, address_size: u8) -> bool {
+ self.begin == !0 >> (64 - address_size * 8)
+ }
+
+ /// Parse an address range entry from `.debug_ranges` or `.debug_loc`.
+ #[doc(hidden)]
+ #[inline]
+ pub fn parse<R: Reader>(input: &mut R, address_size: u8) -> Result<RawRange> {
+ let begin = input.read_address(address_size)?;
+ let end = input.read_address(address_size)?;
+ let range = RawRange { begin, end };
+ Ok(range)
+ }
+}
+
+/// An address range from the `.debug_ranges`, `.debug_rnglists`, or `.debug_aranges` sections.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Range {
+ /// The beginning address of the range.
+ pub begin: u64,
+
+ /// The first address past the end of the range.
+ pub end: u64,
+}
+
+impl Range {
+ /// Add a base address to this range.
+ #[inline]
+ pub(crate) fn add_base_address(&mut self, base_address: u64, address_size: u8) {
+ let mask = !0 >> (64 - address_size * 8);
+ self.begin = base_address.wrapping_add(self.begin) & mask;
+ self.end = base_address.wrapping_add(self.end) & mask;
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::common::Format;
+ use crate::endianity::LittleEndian;
+ use crate::test_util::GimliSectionMethods;
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ #[test]
+ fn test_rnglists_32() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+ let section = Section::with_endian(Endian::Little)
+ .L32(0x0300_0000)
+ .L32(0x0301_0300)
+ .L32(0x0301_0400)
+ .L32(0x0301_0500);
+ let buf = section.get_contents().unwrap();
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let start = Label::new();
+ let first = Label::new();
+ let size = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // Header
+ .mark(&start)
+ .L32(&size)
+ .L16(encoding.version)
+ .L8(encoding.address_size)
+ .L8(0)
+ .L32(0)
+ .mark(&first)
+ // OffsetPair
+ .L8(4).uleb(0x10200).uleb(0x10300)
+ // A base address selection followed by an OffsetPair.
+ .L8(5).L32(0x0200_0000)
+ .L8(4).uleb(0x10400).uleb(0x10500)
+ // An empty OffsetPair followed by a normal OffsetPair.
+ .L8(4).uleb(0x10600).uleb(0x10600)
+ .L8(4).uleb(0x10800).uleb(0x10900)
+ // A StartEnd
+ .L8(6).L32(0x201_0a00).L32(0x201_0b00)
+ // A StartLength
+ .L8(7).L32(0x201_0c00).uleb(0x100)
+ // An OffsetPair that starts at 0.
+ .L8(4).uleb(0).uleb(1)
+ // An OffsetPair that starts and ends at 0.
+ .L8(4).uleb(0).uleb(0)
+ // An OffsetPair that ends at -1.
+ .L8(5).L32(0)
+ .L8(4).uleb(0).uleb(0xffff_ffff)
+ // A BaseAddressx + OffsetPair
+ .L8(1).uleb(0)
+ .L8(4).uleb(0x10100).uleb(0x10200)
+ // A StartxEndx
+ .L8(2).uleb(1).uleb(2)
+ // A StartxLength
+ .L8(3).uleb(3).uleb(0x100)
+ // A range end.
+ .L8(0)
+ // Some extra data.
+ .L32(0xffff_ffff);
+ size.set_const((&section.here() - &start - 4) as u64);
+
+ let buf = section.get_contents().unwrap();
+ let debug_ranges = DebugRanges::new(&[], LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&buf, LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+ let offset = RangeListsOffset((&first - &start) as usize);
+ let mut ranges = rnglists
+ .ranges(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ }))
+ );
+
+ // A base address selection followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ }))
+ );
+
+ // An empty range followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ }))
+ );
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ }))
+ );
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0a00,
+ end: 0x0201_0b00,
+ }))
+ );
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0c00,
+ end: 0x0201_0d00,
+ }))
+ );
+
+ // A range that starts at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ }))
+ );
+
+ // A range that starts and ends at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0000,
+ }))
+ );
+
+ // A range that ends at -1.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ }))
+ );
+
+ // A BaseAddressx + OffsetPair
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0100,
+ end: 0x0301_0200,
+ }))
+ );
+
+ // A StartxEndx
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0300,
+ end: 0x0301_0400,
+ }))
+ );
+
+ // A StartxLength
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0500,
+ end: 0x0301_0600,
+ }))
+ );
+
+ // A range end.
+ assert_eq!(ranges.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_rnglists_64() {
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let section = Section::with_endian(Endian::Little)
+ .L64(0x0300_0000)
+ .L64(0x0301_0300)
+ .L64(0x0301_0400)
+ .L64(0x0301_0500);
+ let buf = section.get_contents().unwrap();
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let start = Label::new();
+ let first = Label::new();
+ let size = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // Header
+ .mark(&start)
+ .L32(0xffff_ffff)
+ .L64(&size)
+ .L16(encoding.version)
+ .L8(encoding.address_size)
+ .L8(0)
+ .L32(0)
+ .mark(&first)
+ // OffsetPair
+ .L8(4).uleb(0x10200).uleb(0x10300)
+ // A base address selection followed by an OffsetPair.
+ .L8(5).L64(0x0200_0000)
+ .L8(4).uleb(0x10400).uleb(0x10500)
+ // An empty OffsetPair followed by a normal OffsetPair.
+ .L8(4).uleb(0x10600).uleb(0x10600)
+ .L8(4).uleb(0x10800).uleb(0x10900)
+ // A StartEnd
+ .L8(6).L64(0x201_0a00).L64(0x201_0b00)
+ // A StartLength
+ .L8(7).L64(0x201_0c00).uleb(0x100)
+ // An OffsetPair that starts at 0.
+ .L8(4).uleb(0).uleb(1)
+ // An OffsetPair that starts and ends at 0.
+ .L8(4).uleb(0).uleb(0)
+ // An OffsetPair that ends at -1.
+ .L8(5).L64(0)
+ .L8(4).uleb(0).uleb(0xffff_ffff)
+ // A BaseAddressx + OffsetPair
+ .L8(1).uleb(0)
+ .L8(4).uleb(0x10100).uleb(0x10200)
+ // A StartxEndx
+ .L8(2).uleb(1).uleb(2)
+ // A StartxLength
+ .L8(3).uleb(3).uleb(0x100)
+ // A range end.
+ .L8(0)
+ // Some extra data.
+ .L32(0xffff_ffff);
+ size.set_const((&section.here() - &start - 12) as u64);
+
+ let buf = section.get_contents().unwrap();
+ let debug_ranges = DebugRanges::new(&[], LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&buf, LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+ let offset = RangeListsOffset((&first - &start) as usize);
+ let mut ranges = rnglists
+ .ranges(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ }))
+ );
+
+ // A base address selection followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ }))
+ );
+
+ // An empty range followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ }))
+ );
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ }))
+ );
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0a00,
+ end: 0x0201_0b00,
+ }))
+ );
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0c00,
+ end: 0x0201_0d00,
+ }))
+ );
+
+ // A range that starts at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ }))
+ );
+
+ // A range that starts and ends at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0000,
+ }))
+ );
+
+ // A range that ends at -1.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ }))
+ );
+
+ // A BaseAddressx + OffsetPair
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0100,
+ end: 0x0301_0200,
+ }))
+ );
+
+ // A StartxEndx
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0300,
+ end: 0x0301_0400,
+ }))
+ );
+
+ // A StartxLength
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0301_0500,
+ end: 0x0301_0600,
+ }))
+ );
+
+ // A range end.
+ assert_eq!(ranges.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_raw_range() {
+ let range = RawRange {
+ begin: 0,
+ end: 0xffff_ffff,
+ };
+ assert!(!range.is_end());
+ assert!(!range.is_base_address(4));
+ assert!(!range.is_base_address(8));
+
+ let range = RawRange { begin: 0, end: 0 };
+ assert!(range.is_end());
+ assert!(!range.is_base_address(4));
+ assert!(!range.is_base_address(8));
+
+ let range = RawRange {
+ begin: 0xffff_ffff,
+ end: 0,
+ };
+ assert!(!range.is_end());
+ assert!(range.is_base_address(4));
+ assert!(!range.is_base_address(8));
+
+ let range = RawRange {
+ begin: 0xffff_ffff_ffff_ffff,
+ end: 0,
+ };
+ assert!(!range.is_end());
+ assert!(!range.is_base_address(4));
+ assert!(range.is_base_address(8));
+ }
+
+ #[test]
+ fn test_ranges_32() {
+ let start = Label::new();
+ let first = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // A range before the offset.
+ .mark(&start)
+ .L32(0x10000).L32(0x10100)
+ .mark(&first)
+ // A normal range.
+ .L32(0x10200).L32(0x10300)
+ // A base address selection followed by a normal range.
+ .L32(0xffff_ffff).L32(0x0200_0000)
+ .L32(0x10400).L32(0x10500)
+ // An empty range followed by a normal range.
+ .L32(0x10600).L32(0x10600)
+ .L32(0x10800).L32(0x10900)
+ // A range that starts at 0.
+ .L32(0).L32(1)
+ // A range that ends at -1.
+ .L32(0xffff_ffff).L32(0x0000_0000)
+ .L32(0).L32(0xffff_ffff)
+ // A range end.
+ .L32(0).L32(0)
+ // Some extra data.
+ .L32(0);
+
+ let buf = section.get_contents().unwrap();
+ let debug_ranges = DebugRanges::new(&buf, LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&[], LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+ let offset = RangeListsOffset((&first - &start) as usize);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut ranges = rnglists
+ .ranges(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ }))
+ );
+
+ // A base address selection followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ }))
+ );
+
+ // An empty range followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ }))
+ );
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ }))
+ );
+
+ // A range that starts at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ }))
+ );
+
+ // A range that ends at -1.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0000_0000,
+ end: 0xffff_ffff,
+ }))
+ );
+
+ // A range end.
+ assert_eq!(ranges.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_ranges_64() {
+ let start = Label::new();
+ let first = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // A range before the offset.
+ .mark(&start)
+ .L64(0x10000).L64(0x10100)
+ .mark(&first)
+ // A normal range.
+ .L64(0x10200).L64(0x10300)
+ // A base address selection followed by a normal range.
+ .L64(0xffff_ffff_ffff_ffff).L64(0x0200_0000)
+ .L64(0x10400).L64(0x10500)
+ // An empty range followed by a normal range.
+ .L64(0x10600).L64(0x10600)
+ .L64(0x10800).L64(0x10900)
+ // A range that starts at 0.
+ .L64(0).L64(1)
+ // A range that ends at -1.
+ .L64(0xffff_ffff_ffff_ffff).L64(0x0000_0000)
+ .L64(0).L64(0xffff_ffff_ffff_ffff)
+ // A range end.
+ .L64(0).L64(0)
+ // Some extra data.
+ .L64(0);
+
+ let buf = section.get_contents().unwrap();
+ let debug_ranges = DebugRanges::new(&buf, LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&[], LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+ let offset = RangeListsOffset((&first - &start) as usize);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ };
+ let mut ranges = rnglists
+ .ranges(offset, encoding, 0x0100_0000, debug_addr, debug_addr_base)
+ .unwrap();
+
+ // A normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0101_0200,
+ end: 0x0101_0300,
+ }))
+ );
+
+ // A base address selection followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0400,
+ end: 0x0201_0500,
+ }))
+ );
+
+ // An empty range followed by a normal range.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0600,
+ end: 0x0201_0600,
+ }))
+ );
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_0800,
+ end: 0x0201_0900,
+ }))
+ );
+
+ // A range that starts at 0.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0200_0000,
+ end: 0x0200_0001,
+ }))
+ );
+
+ // A range that ends at -1.
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0,
+ end: 0xffff_ffff_ffff_ffff,
+ }))
+ );
+
+ // A range end.
+ assert_eq!(ranges.next(), Ok(None));
+
+ // An offset at the end of buf.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(buf.len()),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_ranges_invalid() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ // An invalid range.
+ .L32(0x20000).L32(0x10000)
+ // An invalid range after wrapping.
+ .L32(0x20000).L32(0xff01_0000);
+
+ let buf = section.get_contents().unwrap();
+ let debug_ranges = DebugRanges::new(&buf, LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&[], LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+ let debug_addr = &DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+
+ // An invalid range.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(0x0),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Err(Error::InvalidAddressRange));
+
+ // An invalid range after wrapping.
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(0x8),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ )
+ .unwrap();
+ assert_eq!(ranges.next(), Err(Error::InvalidAddressRange));
+
+ // An invalid offset.
+ match rnglists.ranges(
+ RangeListsOffset(buf.len() + 1),
+ encoding,
+ 0x0100_0000,
+ debug_addr,
+ debug_addr_base,
+ ) {
+ Err(Error::UnexpectedEof(_)) => {}
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_get_offset() {
+ for format in vec![Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version: 5,
+ address_size: 4,
+ };
+
+ let zero = Label::new();
+ let length = Label::new();
+ let start = Label::new();
+ let first = Label::new();
+ let end = Label::new();
+ let mut section = Section::with_endian(Endian::Little)
+ .mark(&zero)
+ .initial_length(format, &length, &start)
+ .D16(encoding.version)
+ .D8(encoding.address_size)
+ .D8(0)
+ .D32(20)
+ .mark(&first);
+ for i in 0..20 {
+ section = section.word(format.word_size(), 1000 + i);
+ }
+ section = section.mark(&end);
+ length.set_const((&end - &start) as u64);
+ let section = section.get_contents().unwrap();
+
+ let debug_ranges = DebugRanges::from(EndianSlice::new(&[], LittleEndian));
+ let debug_rnglists = DebugRngLists::from(EndianSlice::new(&section, LittleEndian));
+ let ranges = RangeLists::new(debug_ranges, debug_rnglists);
+
+ let base = DebugRngListsBase((&first - &zero) as usize);
+ assert_eq!(
+ ranges.get_offset(encoding, base, DebugRngListsIndex(0)),
+ Ok(RangeListsOffset(base.0 + 1000))
+ );
+ assert_eq!(
+ ranges.get_offset(encoding, base, DebugRngListsIndex(19)),
+ Ok(RangeListsOffset(base.0 + 1019))
+ );
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/str.rs b/vendor/gimli-0.26.2/src/read/str.rs
new file mode 100644
index 000000000..c6b87d8f9
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/str.rs
@@ -0,0 +1,321 @@
+use crate::common::{
+ DebugLineStrOffset, DebugStrOffset, DebugStrOffsetsBase, DebugStrOffsetsIndex, DwarfFileType,
+ Encoding, SectionId,
+};
+use crate::endianity::Endianity;
+use crate::read::{EndianSlice, Reader, ReaderOffset, Result, Section};
+use crate::Format;
+
+/// The `DebugStr` struct represents the DWARF strings
+/// found in the `.debug_str` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugStr<R> {
+ debug_str_section: R,
+}
+
+impl<'input, Endian> DebugStr<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugStr` instance from the data in the `.debug_str`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_str` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugStr, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_str_section_somehow = || &buf;
+ /// let debug_str = DebugStr::new(read_debug_str_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_str_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_str_section, endian))
+ }
+}
+
+impl<R: Reader> DebugStr<R> {
+ /// Lookup a string from the `.debug_str` section by DebugStrOffset.
+ ///
+ /// ```
+ /// use gimli::{DebugStr, DebugStrOffset, LittleEndian};
+ ///
+ /// # let buf = [0x01, 0x02, 0x00];
+ /// # let offset = DebugStrOffset(0);
+ /// # let read_debug_str_section_somehow = || &buf;
+ /// # let debug_str_offset_somehow = || offset;
+ /// let debug_str = DebugStr::new(read_debug_str_section_somehow(), LittleEndian);
+ /// println!("Found string {:?}", debug_str.get_str(debug_str_offset_somehow()));
+ /// ```
+ pub fn get_str(&self, offset: DebugStrOffset<R::Offset>) -> Result<R> {
+ let input = &mut self.debug_str_section.clone();
+ input.skip(offset.0)?;
+ input.read_null_terminated_slice()
+ }
+}
+
+impl<T> DebugStr<T> {
+ /// Create a `DebugStr` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugStr<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugStr<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.debug_str_section).into()
+ }
+}
+
+impl<R> Section<R> for DebugStr<R> {
+ fn id() -> SectionId {
+ SectionId::DebugStr
+ }
+
+ fn reader(&self) -> &R {
+ &self.debug_str_section
+ }
+}
+
+impl<R> From<R> for DebugStr<R> {
+ fn from(debug_str_section: R) -> Self {
+ DebugStr { debug_str_section }
+ }
+}
+
+/// The raw contents of the `.debug_str_offsets` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugStrOffsets<R> {
+ section: R,
+}
+
+impl<R: Reader> DebugStrOffsets<R> {
+ // TODO: add an iterator over the sets of entries in the section.
+ // This is not needed for common usage of the section though.
+
+ /// Returns the `.debug_str` offset at the given `base` and `index`.
+ ///
+ /// A set of entries in the `.debug_str_offsets` section consists of a header
+ /// followed by a series of string table offsets.
+ ///
+ /// The `base` must be the `DW_AT_str_offsets_base` value from the compilation unit DIE.
+ /// This is an offset that points to the first entry following the header.
+ ///
+ /// The `index` is the value of a `DW_FORM_strx` attribute.
+ ///
+ /// The `format` must be the DWARF format of the compilation unit. This format must
+ /// match the header. However, note that we do not parse the header to validate this,
+ /// since locating the header is unreliable, and the GNU extensions do not emit it.
+ pub fn get_str_offset(
+ &self,
+ format: Format,
+ base: DebugStrOffsetsBase<R::Offset>,
+ index: DebugStrOffsetsIndex<R::Offset>,
+ ) -> Result<DebugStrOffset<R::Offset>> {
+ let input = &mut self.section.clone();
+ input.skip(base.0)?;
+ input.skip(R::Offset::from_u64(
+ index.0.into_u64() * u64::from(format.word_size()),
+ )?)?;
+ input.read_offset(format).map(DebugStrOffset)
+ }
+}
+
+impl<T> DebugStrOffsets<T> {
+ /// Create a `DebugStrOffsets` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugStrOffsets<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugStrOffsets<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.section).into()
+ }
+}
+
+impl<R> Section<R> for DebugStrOffsets<R> {
+ fn id() -> SectionId {
+ SectionId::DebugStrOffsets
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugStrOffsets<R> {
+ fn from(section: R) -> Self {
+ DebugStrOffsets { section }
+ }
+}
+
+impl<Offset> DebugStrOffsetsBase<Offset>
+where
+ Offset: ReaderOffset,
+{
+ /// Returns a `DebugStrOffsetsBase` with the default value of DW_AT_str_offsets_base
+ /// for the given `Encoding` and `DwarfFileType`.
+ pub fn default_for_encoding_and_file(
+ encoding: Encoding,
+ file_type: DwarfFileType,
+ ) -> DebugStrOffsetsBase<Offset> {
+ if encoding.version >= 5 && file_type == DwarfFileType::Dwo {
+ // In .dwo files, the compiler omits the DW_AT_str_offsets_base attribute (because there is
+ // only a single unit in the file) but we must skip past the header, which the attribute
+ // would normally do for us.
+ // initial_length_size + version + 2 bytes of padding.
+ DebugStrOffsetsBase(Offset::from_u8(
+ encoding.format.initial_length_size() + 2 + 2,
+ ))
+ } else {
+ DebugStrOffsetsBase(Offset::from_u8(0))
+ }
+ }
+}
+
+/// The `DebugLineStr` struct represents the DWARF strings
+/// found in the `.debug_line_str` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugLineStr<R> {
+ section: R,
+}
+
+impl<'input, Endian> DebugLineStr<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugLineStr` instance from the data in the `.debug_line_str`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_line_str` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugLineStr, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_line_str_section_somehow = || &buf;
+ /// let debug_str = DebugLineStr::new(read_debug_line_str_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_line_str_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_line_str_section, endian))
+ }
+}
+
+impl<R: Reader> DebugLineStr<R> {
+ /// Lookup a string from the `.debug_line_str` section by DebugLineStrOffset.
+ pub fn get_str(&self, offset: DebugLineStrOffset<R::Offset>) -> Result<R> {
+ let input = &mut self.section.clone();
+ input.skip(offset.0)?;
+ input.read_null_terminated_slice()
+ }
+}
+
+impl<T> DebugLineStr<T> {
+ /// Create a `DebugLineStr` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugLineStr<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugLineStr<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.section).into()
+ }
+}
+
+impl<R> Section<R> for DebugLineStr<R> {
+ fn id() -> SectionId {
+ SectionId::DebugLineStr
+ }
+
+ fn reader(&self) -> &R {
+ &self.section
+ }
+}
+
+impl<R> From<R> for DebugLineStr<R> {
+ fn from(section: R) -> Self {
+ DebugLineStr { section }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::test_util::GimliSectionMethods;
+ use crate::LittleEndian;
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ #[test]
+ fn test_get_str_offset() {
+ for format in vec![Format::Dwarf32, Format::Dwarf64] {
+ let zero = Label::new();
+ let length = Label::new();
+ let start = Label::new();
+ let first = Label::new();
+ let end = Label::new();
+ let mut section = Section::with_endian(Endian::Little)
+ .mark(&zero)
+ .initial_length(format, &length, &start)
+ .D16(5)
+ .D16(0)
+ .mark(&first);
+ for i in 0..20 {
+ section = section.word(format.word_size(), 1000 + i);
+ }
+ section = section.mark(&end);
+ length.set_const((&end - &start) as u64);
+
+ let section = section.get_contents().unwrap();
+ let debug_str_offsets = DebugStrOffsets::from(EndianSlice::new(&section, LittleEndian));
+ let base = DebugStrOffsetsBase((&first - &zero) as usize);
+
+ assert_eq!(
+ debug_str_offsets.get_str_offset(format, base, DebugStrOffsetsIndex(0)),
+ Ok(DebugStrOffset(1000))
+ );
+ assert_eq!(
+ debug_str_offsets.get_str_offset(format, base, DebugStrOffsetsIndex(19)),
+ Ok(DebugStrOffset(1019))
+ );
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/unit.rs b/vendor/gimli-0.26.2/src/read/unit.rs
new file mode 100644
index 000000000..670e55efd
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/unit.rs
@@ -0,0 +1,6146 @@
+//! Functions for parsing DWARF `.debug_info` and `.debug_types` sections.
+
+use core::cell::Cell;
+use core::ops::{Range, RangeFrom, RangeTo};
+use core::{u16, u8};
+
+use crate::common::{
+ DebugAbbrevOffset, DebugAddrBase, DebugAddrIndex, DebugInfoOffset, DebugLineOffset,
+ DebugLineStrOffset, DebugLocListsBase, DebugLocListsIndex, DebugMacinfoOffset,
+ DebugMacroOffset, DebugRngListsBase, DebugRngListsIndex, DebugStrOffset, DebugStrOffsetsBase,
+ DebugStrOffsetsIndex, DebugTypeSignature, DebugTypesOffset, DwoId, Encoding, Format,
+ LocationListsOffset, RawRangeListsOffset, SectionId, UnitSectionOffset,
+};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::read::abbrev::get_attribute_size;
+use crate::read::{
+ Abbreviation, Abbreviations, AttributeSpecification, DebugAbbrev, DebugStr, EndianSlice, Error,
+ Expression, Reader, ReaderOffset, Result, Section, UnitOffset,
+};
+
+impl<T: ReaderOffset> DebugTypesOffset<T> {
+ /// Convert an offset to be relative to the start of the given unit,
+ /// instead of relative to the start of the .debug_types section.
+ /// Returns `None` if the offset is not within the unit entries.
+ pub fn to_unit_offset<R>(&self, unit: &UnitHeader<R>) -> Option<UnitOffset<T>>
+ where
+ R: Reader<Offset = T>,
+ {
+ let unit_offset = unit.offset().as_debug_types_offset()?;
+ let offset = UnitOffset(self.0.checked_sub(unit_offset.0)?);
+ if !unit.is_valid_offset(offset) {
+ return None;
+ }
+ Some(offset)
+ }
+}
+
+impl<T: ReaderOffset> DebugInfoOffset<T> {
+ /// Convert an offset to be relative to the start of the given unit,
+ /// instead of relative to the start of the .debug_info section.
+ /// Returns `None` if the offset is not within this unit entries.
+ pub fn to_unit_offset<R>(&self, unit: &UnitHeader<R>) -> Option<UnitOffset<T>>
+ where
+ R: Reader<Offset = T>,
+ {
+ let unit_offset = unit.offset().as_debug_info_offset()?;
+ let offset = UnitOffset(self.0.checked_sub(unit_offset.0)?);
+ if !unit.is_valid_offset(offset) {
+ return None;
+ }
+ Some(offset)
+ }
+}
+
+impl<T: ReaderOffset> UnitOffset<T> {
+ /// Convert an offset to be relative to the start of the .debug_info section,
+ /// instead of relative to the start of the given unit. Returns None if the
+ /// provided unit lives in the .debug_types section.
+ pub fn to_debug_info_offset<R>(&self, unit: &UnitHeader<R>) -> Option<DebugInfoOffset<T>>
+ where
+ R: Reader<Offset = T>,
+ {
+ let unit_offset = unit.offset().as_debug_info_offset()?;
+ Some(DebugInfoOffset(unit_offset.0 + self.0))
+ }
+
+ /// Convert an offset to be relative to the start of the .debug_types section,
+ /// instead of relative to the start of the given unit. Returns None if the
+ /// provided unit lives in the .debug_info section.
+ pub fn to_debug_types_offset<R>(&self, unit: &UnitHeader<R>) -> Option<DebugTypesOffset<T>>
+ where
+ R: Reader<Offset = T>,
+ {
+ let unit_offset = unit.offset().as_debug_types_offset()?;
+ Some(DebugTypesOffset(unit_offset.0 + self.0))
+ }
+}
+
+/// The `DebugInfo` struct represents the DWARF debugging information found in
+/// the `.debug_info` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugInfo<R> {
+ debug_info_section: R,
+}
+
+impl<'input, Endian> DebugInfo<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugInfo` instance from the data in the `.debug_info`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_info` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugInfo, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_info_section_somehow = || &buf;
+ /// let debug_info = DebugInfo::new(read_debug_info_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_info_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_info_section, endian))
+ }
+}
+
+impl<R: Reader> DebugInfo<R> {
+ /// Iterate the units in this `.debug_info` section.
+ ///
+ /// ```
+ /// use gimli::{DebugInfo, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_info_section_somehow = || &buf;
+ /// let debug_info = DebugInfo::new(read_debug_info_section_somehow(), LittleEndian);
+ ///
+ /// let mut iter = debug_info.units();
+ /// while let Some(unit) = iter.next().unwrap() {
+ /// println!("unit's length is {}", unit.unit_length());
+ /// }
+ /// ```
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn units(&self) -> DebugInfoUnitHeadersIter<R> {
+ DebugInfoUnitHeadersIter {
+ input: self.debug_info_section.clone(),
+ offset: DebugInfoOffset(R::Offset::from_u8(0)),
+ }
+ }
+
+ /// Get the UnitHeader located at offset from this .debug_info section.
+ ///
+ ///
+ pub fn header_from_offset(&self, offset: DebugInfoOffset<R::Offset>) -> Result<UnitHeader<R>> {
+ let input = &mut self.debug_info_section.clone();
+ input.skip(offset.0)?;
+ parse_unit_header(input, offset.into())
+ }
+}
+
+impl<T> DebugInfo<T> {
+ /// Create a `DebugInfo` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugInfo<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugInfo<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.debug_info_section).into()
+ }
+}
+
+impl<R> Section<R> for DebugInfo<R> {
+ fn id() -> SectionId {
+ SectionId::DebugInfo
+ }
+
+ fn reader(&self) -> &R {
+ &self.debug_info_section
+ }
+}
+
+impl<R> From<R> for DebugInfo<R> {
+ fn from(debug_info_section: R) -> Self {
+ DebugInfo { debug_info_section }
+ }
+}
+
+/// An iterator over the units of a .debug_info section.
+///
+/// See the [documentation on
+/// `DebugInfo::units`](./struct.DebugInfo.html#method.units) for more detail.
+#[derive(Clone, Debug)]
+pub struct DebugInfoUnitHeadersIter<R: Reader> {
+ input: R,
+ offset: DebugInfoOffset<R::Offset>,
+}
+
+impl<R: Reader> DebugInfoUnitHeadersIter<R> {
+ /// Advance the iterator to the next unit header.
+ pub fn next(&mut self) -> Result<Option<UnitHeader<R>>> {
+ if self.input.is_empty() {
+ Ok(None)
+ } else {
+ let len = self.input.len();
+ match parse_unit_header(&mut self.input, self.offset.into()) {
+ Ok(header) => {
+ self.offset.0 += len - self.input.len();
+ Ok(Some(header))
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for DebugInfoUnitHeadersIter<R> {
+ type Item = UnitHeader<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ DebugInfoUnitHeadersIter::next(self)
+ }
+}
+
+/// Parse the unit type from the unit header.
+fn parse_unit_type<R: Reader>(input: &mut R) -> Result<constants::DwUt> {
+ let val = input.read_u8()?;
+ Ok(constants::DwUt(val))
+}
+
+/// Parse the `debug_abbrev_offset` in the compilation unit header.
+fn parse_debug_abbrev_offset<R: Reader>(
+ input: &mut R,
+ format: Format,
+) -> Result<DebugAbbrevOffset<R::Offset>> {
+ input.read_offset(format).map(DebugAbbrevOffset)
+}
+
+/// Parse the `debug_info_offset` in the arange header.
+pub(crate) fn parse_debug_info_offset<R: Reader>(
+ input: &mut R,
+ format: Format,
+) -> Result<DebugInfoOffset<R::Offset>> {
+ input.read_offset(format).map(DebugInfoOffset)
+}
+
+/// This enum specifies the type of the unit and any type
+/// specific data carried in the header (e.g. the type
+/// signature/type offset of a type unit).
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum UnitType<Offset>
+where
+ Offset: ReaderOffset,
+{
+ /// In DWARF5, a unit with type `DW_UT_compile`. In previous DWARF versions,
+ /// any unit appearing in the .debug_info section.
+ Compilation,
+ /// In DWARF5, a unit with type `DW_UT_type`. In DWARF4, any unit appearing
+ /// in the .debug_types section.
+ Type {
+ /// The unique type signature for this type unit.
+ type_signature: DebugTypeSignature,
+ /// The offset within this type unit where the type is defined.
+ type_offset: UnitOffset<Offset>,
+ },
+ /// A unit with type `DW_UT_partial`. The root DIE of this unit should be a
+ /// `DW_TAG_partial_unit`.
+ Partial,
+ /// A unit with type `DW_UT_skeleton`. The enclosed dwo_id can be used to
+ /// link this with the corresponding `SplitCompilation` unit in a dwo file.
+ /// NB: The non-standard GNU split DWARF extension to DWARF 4 will instead
+ /// be a `Compilation` unit with the dwo_id present as an attribute on the
+ /// root DIE.
+ Skeleton(DwoId),
+ /// A unit with type `DW_UT_split_compile`. The enclosed dwo_id can be used to
+ /// link this with the corresponding `Skeleton` unit in the original binary.
+ /// NB: The non-standard GNU split DWARF extension to DWARF 4 will instead
+ /// be a `Compilation` unit with the dwo_id present as an attribute on the
+ /// root DIE.
+ SplitCompilation(DwoId),
+ /// A unit with type `DW_UT_split_type`. A split type unit is identical to a
+ /// conventional type unit except for the section in which it appears.
+ SplitType {
+ /// The unique type signature for this type unit.
+ type_signature: DebugTypeSignature,
+ /// The offset within this type unit where the type is defined.
+ type_offset: UnitOffset<Offset>,
+ },
+}
+
+impl<Offset> UnitType<Offset>
+where
+ Offset: ReaderOffset,
+{
+ // TODO: This will be used by the DWARF writing code once it
+ // supports unit types other than simple compilation units.
+ #[allow(unused)]
+ pub(crate) fn dw_ut(&self) -> constants::DwUt {
+ match self {
+ UnitType::Compilation => constants::DW_UT_compile,
+ UnitType::Type { .. } => constants::DW_UT_type,
+ UnitType::Partial => constants::DW_UT_partial,
+ UnitType::Skeleton(_) => constants::DW_UT_skeleton,
+ UnitType::SplitCompilation(_) => constants::DW_UT_split_compile,
+ UnitType::SplitType { .. } => constants::DW_UT_split_type,
+ }
+ }
+}
+
+/// The common fields for the headers of compilation units and
+/// type units.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct UnitHeader<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ encoding: Encoding,
+ unit_length: Offset,
+ unit_type: UnitType<Offset>,
+ debug_abbrev_offset: DebugAbbrevOffset<Offset>,
+ unit_offset: UnitSectionOffset<Offset>,
+ entries_buf: R,
+}
+
+/// Static methods.
+impl<R, Offset> UnitHeader<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Construct a new `UnitHeader`.
+ pub fn new(
+ encoding: Encoding,
+ unit_length: Offset,
+ unit_type: UnitType<Offset>,
+ debug_abbrev_offset: DebugAbbrevOffset<Offset>,
+ unit_offset: UnitSectionOffset<Offset>,
+ entries_buf: R,
+ ) -> Self {
+ UnitHeader {
+ encoding,
+ unit_length,
+ unit_type,
+ debug_abbrev_offset,
+ unit_offset,
+ entries_buf,
+ }
+ }
+}
+
+/// Instance methods.
+impl<R, Offset> UnitHeader<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Get the offset of this unit within its section.
+ pub fn offset(&self) -> UnitSectionOffset<Offset> {
+ self.unit_offset
+ }
+
+ /// Return the serialized size of the common unit header for the given
+ /// DWARF format.
+ pub fn size_of_header(&self) -> usize {
+ let unit_length_size = self.encoding.format.initial_length_size() as usize;
+ let version_size = 2;
+ let debug_abbrev_offset_size = self.encoding.format.word_size() as usize;
+ let address_size_size = 1;
+ let unit_type_size = if self.encoding.version == 5 { 1 } else { 0 };
+ let type_specific_size = match self.unit_type {
+ UnitType::Compilation | UnitType::Partial => 0,
+ UnitType::Type { .. } | UnitType::SplitType { .. } => {
+ let type_signature_size = 8;
+ let type_offset_size = self.encoding.format.word_size() as usize;
+ type_signature_size + type_offset_size
+ }
+ UnitType::Skeleton(_) | UnitType::SplitCompilation(_) => 8,
+ };
+
+ unit_length_size
+ + version_size
+ + debug_abbrev_offset_size
+ + address_size_size
+ + unit_type_size
+ + type_specific_size
+ }
+
+ /// Get the length of the debugging info for this compilation unit, not
+ /// including the byte length of the encoded length itself.
+ pub fn unit_length(&self) -> Offset {
+ self.unit_length
+ }
+
+ /// Get the length of the debugging info for this compilation unit,
+ /// including the byte length of the encoded length itself.
+ pub fn length_including_self(&self) -> Offset {
+ Offset::from_u8(self.format().initial_length_size()) + self.unit_length
+ }
+
+ /// Return the encoding parameters for this unit.
+ pub fn encoding(&self) -> Encoding {
+ self.encoding
+ }
+
+ /// Get the DWARF version of the debugging info for this compilation unit.
+ pub fn version(&self) -> u16 {
+ self.encoding.version
+ }
+
+ /// Get the UnitType of this unit.
+ pub fn type_(&self) -> UnitType<Offset> {
+ self.unit_type
+ }
+
+ /// The offset into the `.debug_abbrev` section for this compilation unit's
+ /// debugging information entries' abbreviations.
+ pub fn debug_abbrev_offset(&self) -> DebugAbbrevOffset<Offset> {
+ self.debug_abbrev_offset
+ }
+
+ /// The size of addresses (in bytes) in this compilation unit.
+ pub fn address_size(&self) -> u8 {
+ self.encoding.address_size
+ }
+
+ /// Whether this compilation unit is encoded in 64- or 32-bit DWARF.
+ pub fn format(&self) -> Format {
+ self.encoding.format
+ }
+
+ /// The serialized size of the header for this compilation unit.
+ pub fn header_size(&self) -> Offset {
+ self.length_including_self() - self.entries_buf.len()
+ }
+
+ pub(crate) fn is_valid_offset(&self, offset: UnitOffset<Offset>) -> bool {
+ let size_of_header = self.header_size();
+ if offset.0 < size_of_header {
+ return false;
+ }
+
+ let relative_to_entries_buf = offset.0 - size_of_header;
+ relative_to_entries_buf < self.entries_buf.len()
+ }
+
+ /// Get the underlying bytes for the supplied range.
+ pub fn range(&self, idx: Range<UnitOffset<Offset>>) -> Result<R> {
+ if !self.is_valid_offset(idx.start) {
+ return Err(Error::OffsetOutOfBounds);
+ }
+ if !self.is_valid_offset(idx.end) {
+ return Err(Error::OffsetOutOfBounds);
+ }
+ assert!(idx.start <= idx.end);
+ let size_of_header = self.header_size();
+ let start = idx.start.0 - size_of_header;
+ let end = idx.end.0 - size_of_header;
+ let mut input = self.entries_buf.clone();
+ input.skip(start)?;
+ input.truncate(end - start)?;
+ Ok(input)
+ }
+
+ /// Get the underlying bytes for the supplied range.
+ pub fn range_from(&self, idx: RangeFrom<UnitOffset<Offset>>) -> Result<R> {
+ if !self.is_valid_offset(idx.start) {
+ return Err(Error::OffsetOutOfBounds);
+ }
+ let start = idx.start.0 - self.header_size();
+ let mut input = self.entries_buf.clone();
+ input.skip(start)?;
+ Ok(input)
+ }
+
+ /// Get the underlying bytes for the supplied range.
+ pub fn range_to(&self, idx: RangeTo<UnitOffset<Offset>>) -> Result<R> {
+ if !self.is_valid_offset(idx.end) {
+ return Err(Error::OffsetOutOfBounds);
+ }
+ let end = idx.end.0 - self.header_size();
+ let mut input = self.entries_buf.clone();
+ input.truncate(end)?;
+ Ok(input)
+ }
+
+ /// Read the `DebuggingInformationEntry` at the given offset.
+ pub fn entry<'me, 'abbrev>(
+ &'me self,
+ abbreviations: &'abbrev Abbreviations,
+ offset: UnitOffset<Offset>,
+ ) -> Result<DebuggingInformationEntry<'abbrev, 'me, R>> {
+ let mut input = self.range_from(offset..)?;
+ let entry = DebuggingInformationEntry::parse(&mut input, self, abbreviations)?;
+ entry.ok_or(Error::NoEntryAtGivenOffset)
+ }
+
+ /// Navigate this unit's `DebuggingInformationEntry`s.
+ pub fn entries<'me, 'abbrev>(
+ &'me self,
+ abbreviations: &'abbrev Abbreviations,
+ ) -> EntriesCursor<'abbrev, 'me, R> {
+ EntriesCursor {
+ unit: self,
+ input: self.entries_buf.clone(),
+ abbreviations,
+ cached_current: None,
+ delta_depth: 0,
+ }
+ }
+
+ /// Navigate this compilation unit's `DebuggingInformationEntry`s
+ /// starting at the given offset.
+ pub fn entries_at_offset<'me, 'abbrev>(
+ &'me self,
+ abbreviations: &'abbrev Abbreviations,
+ offset: UnitOffset<Offset>,
+ ) -> Result<EntriesCursor<'abbrev, 'me, R>> {
+ let input = self.range_from(offset..)?;
+ Ok(EntriesCursor {
+ unit: self,
+ input,
+ abbreviations,
+ cached_current: None,
+ delta_depth: 0,
+ })
+ }
+
+ /// Navigate this unit's `DebuggingInformationEntry`s as a tree
+ /// starting at the given offset.
+ pub fn entries_tree<'me, 'abbrev>(
+ &'me self,
+ abbreviations: &'abbrev Abbreviations,
+ offset: Option<UnitOffset<Offset>>,
+ ) -> Result<EntriesTree<'abbrev, 'me, R>> {
+ let input = match offset {
+ Some(offset) => self.range_from(offset..)?,
+ None => self.entries_buf.clone(),
+ };
+ Ok(EntriesTree::new(input, self, abbreviations))
+ }
+
+ /// Read the raw data that defines the Debugging Information Entries.
+ pub fn entries_raw<'me, 'abbrev>(
+ &'me self,
+ abbreviations: &'abbrev Abbreviations,
+ offset: Option<UnitOffset<Offset>>,
+ ) -> Result<EntriesRaw<'abbrev, 'me, R>> {
+ let input = match offset {
+ Some(offset) => self.range_from(offset..)?,
+ None => self.entries_buf.clone(),
+ };
+ Ok(EntriesRaw {
+ input,
+ unit: self,
+ abbreviations,
+ depth: 0,
+ })
+ }
+
+ /// Parse this unit's abbreviations.
+ pub fn abbreviations(&self, debug_abbrev: &DebugAbbrev<R>) -> Result<Abbreviations> {
+ debug_abbrev.abbreviations(self.debug_abbrev_offset())
+ }
+}
+
+/// Parse a unit header.
+fn parse_unit_header<R, Offset>(
+ input: &mut R,
+ unit_offset: UnitSectionOffset<Offset>,
+) -> Result<UnitHeader<R>>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ let (unit_length, format) = input.read_initial_length()?;
+ let mut rest = input.split(unit_length)?;
+
+ let version = rest.read_u16()?;
+ let abbrev_offset;
+ let address_size;
+ let unit_type;
+ // DWARF 1 was very different, and is obsolete, so isn't supported by this
+ // reader.
+ if 2 <= version && version <= 4 {
+ abbrev_offset = parse_debug_abbrev_offset(&mut rest, format)?;
+ address_size = rest.read_u8()?;
+ // Before DWARF5, all units in the .debug_info section are compilation
+ // units, and all units in the .debug_types section are type units.
+ unit_type = match unit_offset {
+ UnitSectionOffset::DebugInfoOffset(_) => constants::DW_UT_compile,
+ UnitSectionOffset::DebugTypesOffset(_) => constants::DW_UT_type,
+ };
+ } else if version == 5 {
+ unit_type = parse_unit_type(&mut rest)?;
+ address_size = rest.read_u8()?;
+ abbrev_offset = parse_debug_abbrev_offset(&mut rest, format)?;
+ } else {
+ return Err(Error::UnknownVersion(u64::from(version)));
+ }
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ // Parse any data specific to this type of unit.
+ let unit_type = match unit_type {
+ constants::DW_UT_compile => UnitType::Compilation,
+ constants::DW_UT_type => {
+ let type_signature = parse_type_signature(&mut rest)?;
+ let type_offset = parse_type_offset(&mut rest, format)?;
+ UnitType::Type {
+ type_signature,
+ type_offset,
+ }
+ }
+ constants::DW_UT_partial => UnitType::Partial,
+ constants::DW_UT_skeleton => {
+ let dwo_id = parse_dwo_id(&mut rest)?;
+ UnitType::Skeleton(dwo_id)
+ }
+ constants::DW_UT_split_compile => {
+ let dwo_id = parse_dwo_id(&mut rest)?;
+ UnitType::SplitCompilation(dwo_id)
+ }
+ constants::DW_UT_split_type => {
+ let type_signature = parse_type_signature(&mut rest)?;
+ let type_offset = parse_type_offset(&mut rest, format)?;
+ UnitType::SplitType {
+ type_signature,
+ type_offset,
+ }
+ }
+ _ => return Err(Error::UnsupportedUnitType),
+ };
+
+ Ok(UnitHeader::new(
+ encoding,
+ unit_length,
+ unit_type,
+ abbrev_offset,
+ unit_offset,
+ rest,
+ ))
+}
+
+/// Parse a dwo_id from a header
+fn parse_dwo_id<R: Reader>(input: &mut R) -> Result<DwoId> {
+ Ok(DwoId(input.read_u64()?))
+}
+
+/// A Debugging Information Entry (DIE).
+///
+/// DIEs have a set of attributes and optionally have children DIEs as well.
+#[derive(Clone, Debug)]
+pub struct DebuggingInformationEntry<'abbrev, 'unit, R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ offset: UnitOffset<Offset>,
+ attrs_slice: R,
+ attrs_len: Cell<Option<Offset>>,
+ abbrev: &'abbrev Abbreviation,
+ unit: &'unit UnitHeader<R, Offset>,
+}
+
+impl<'abbrev, 'unit, R, Offset> DebuggingInformationEntry<'abbrev, 'unit, R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Construct a new `DebuggingInformationEntry`.
+ pub fn new(
+ offset: UnitOffset<Offset>,
+ attrs_slice: R,
+ abbrev: &'abbrev Abbreviation,
+ unit: &'unit UnitHeader<R, Offset>,
+ ) -> Self {
+ DebuggingInformationEntry {
+ offset,
+ attrs_slice,
+ attrs_len: Cell::new(None),
+ abbrev,
+ unit,
+ }
+ }
+
+ /// Get this entry's code.
+ pub fn code(&self) -> u64 {
+ self.abbrev.code()
+ }
+
+ /// Get this entry's offset.
+ pub fn offset(&self) -> UnitOffset<Offset> {
+ self.offset
+ }
+
+ /// Get this entry's `DW_TAG_whatever` tag.
+ ///
+ /// ```
+ /// # use gimli::{DebugAbbrev, DebugInfo, LittleEndian};
+ /// # let info_buf = [
+ /// # // Comilation unit header
+ /// #
+ /// # // 32-bit unit length = 12
+ /// # 0x0c, 0x00, 0x00, 0x00,
+ /// # // Version 4
+ /// # 0x04, 0x00,
+ /// # // debug_abbrev_offset
+ /// # 0x00, 0x00, 0x00, 0x00,
+ /// # // Address size
+ /// # 0x04,
+ /// #
+ /// # // DIEs
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// # ];
+ /// # let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+ /// # let abbrev_buf = [
+ /// # // Code
+ /// # 0x01,
+ /// # // DW_TAG_subprogram
+ /// # 0x2e,
+ /// # // DW_CHILDREN_no
+ /// # 0x00,
+ /// # // Begin attributes
+ /// # // Attribute name = DW_AT_name
+ /// # 0x03,
+ /// # // Attribute form = DW_FORM_string
+ /// # 0x08,
+ /// # // End attributes
+ /// # 0x00,
+ /// # 0x00,
+ /// # // Null terminator
+ /// # 0x00
+ /// # ];
+ /// # let debug_abbrev = DebugAbbrev::new(&abbrev_buf, LittleEndian);
+ /// # let unit = debug_info.units().next().unwrap().unwrap();
+ /// # let abbrevs = unit.abbreviations(&debug_abbrev).unwrap();
+ /// # let mut cursor = unit.entries(&abbrevs);
+ /// # let (_, entry) = cursor.next_dfs().unwrap().unwrap();
+ /// # let mut get_some_entry = || entry;
+ /// let entry = get_some_entry();
+ ///
+ /// match entry.tag() {
+ /// gimli::DW_TAG_subprogram =>
+ /// println!("this entry contains debug info about a function"),
+ /// gimli::DW_TAG_inlined_subroutine =>
+ /// println!("this entry contains debug info about a particular instance of inlining"),
+ /// gimli::DW_TAG_variable =>
+ /// println!("this entry contains debug info about a local variable"),
+ /// gimli::DW_TAG_formal_parameter =>
+ /// println!("this entry contains debug info about a function parameter"),
+ /// otherwise =>
+ /// println!("this entry is some other kind of data: {:?}", otherwise),
+ /// };
+ /// ```
+ pub fn tag(&self) -> constants::DwTag {
+ self.abbrev.tag()
+ }
+
+ /// Return true if this entry's type can have children, false otherwise.
+ pub fn has_children(&self) -> bool {
+ self.abbrev.has_children()
+ }
+
+ /// Iterate over this entry's set of attributes.
+ ///
+ /// ```
+ /// use gimli::{DebugAbbrev, DebugInfo, LittleEndian};
+ ///
+ /// // Read the `.debug_info` section.
+ ///
+ /// # let info_buf = [
+ /// # // Comilation unit header
+ /// #
+ /// # // 32-bit unit length = 12
+ /// # 0x0c, 0x00, 0x00, 0x00,
+ /// # // Version 4
+ /// # 0x04, 0x00,
+ /// # // debug_abbrev_offset
+ /// # 0x00, 0x00, 0x00, 0x00,
+ /// # // Address size
+ /// # 0x04,
+ /// #
+ /// # // DIEs
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// # ];
+ /// # let read_debug_info_section_somehow = || &info_buf;
+ /// let debug_info = DebugInfo::new(read_debug_info_section_somehow(), LittleEndian);
+ ///
+ /// // Get the data about the first compilation unit out of the `.debug_info`.
+ ///
+ /// let unit = debug_info.units().next()
+ /// .expect("Should have at least one compilation unit")
+ /// .expect("and it should parse ok");
+ ///
+ /// // Read the `.debug_abbrev` section and parse the
+ /// // abbreviations for our compilation unit.
+ ///
+ /// # let abbrev_buf = [
+ /// # // Code
+ /// # 0x01,
+ /// # // DW_TAG_subprogram
+ /// # 0x2e,
+ /// # // DW_CHILDREN_no
+ /// # 0x00,
+ /// # // Begin attributes
+ /// # // Attribute name = DW_AT_name
+ /// # 0x03,
+ /// # // Attribute form = DW_FORM_string
+ /// # 0x08,
+ /// # // End attributes
+ /// # 0x00,
+ /// # 0x00,
+ /// # // Null terminator
+ /// # 0x00
+ /// # ];
+ /// # let read_debug_abbrev_section_somehow = || &abbrev_buf;
+ /// let debug_abbrev = DebugAbbrev::new(read_debug_abbrev_section_somehow(), LittleEndian);
+ /// let abbrevs = unit.abbreviations(&debug_abbrev).unwrap();
+ ///
+ /// // Get the first entry from that compilation unit.
+ ///
+ /// let mut cursor = unit.entries(&abbrevs);
+ /// let (_, entry) = cursor.next_dfs()
+ /// .expect("Should parse next entry")
+ /// .expect("Should have at least one entry");
+ ///
+ /// // Finally, print the first entry's attributes.
+ ///
+ /// let mut attrs = entry.attrs();
+ /// while let Some(attr) = attrs.next().unwrap() {
+ /// println!("Attribute name = {:?}", attr.name());
+ /// println!("Attribute value = {:?}", attr.value());
+ /// }
+ /// ```
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn attrs<'me>(&'me self) -> AttrsIter<'abbrev, 'me, 'unit, R> {
+ AttrsIter {
+ input: self.attrs_slice.clone(),
+ attributes: self.abbrev.attributes(),
+ entry: self,
+ }
+ }
+
+ /// Find the first attribute in this entry which has the given name,
+ /// and return it. Returns `Ok(None)` if no attribute is found.
+ pub fn attr(&self, name: constants::DwAt) -> Result<Option<Attribute<R>>> {
+ let mut attrs = self.attrs();
+ while let Some(attr) = attrs.next()? {
+ if attr.name() == name {
+ return Ok(Some(attr));
+ }
+ }
+ Ok(None)
+ }
+
+ /// Find the first attribute in this entry which has the given name,
+ /// and return its raw value. Returns `Ok(None)` if no attribute is found.
+ pub fn attr_value_raw(&self, name: constants::DwAt) -> Result<Option<AttributeValue<R>>> {
+ self.attr(name)
+ .map(|attr| attr.map(|attr| attr.raw_value()))
+ }
+
+ /// Find the first attribute in this entry which has the given name,
+ /// and return its normalized value. Returns `Ok(None)` if no
+ /// attribute is found.
+ pub fn attr_value(&self, name: constants::DwAt) -> Result<Option<AttributeValue<R>>> {
+ self.attr(name).map(|attr| attr.map(|attr| attr.value()))
+ }
+
+ /// Return the input buffer after the last attribute.
+ #[allow(clippy::inline_always)]
+ #[inline(always)]
+ fn after_attrs(&self) -> Result<R> {
+ if let Some(attrs_len) = self.attrs_len.get() {
+ let mut input = self.attrs_slice.clone();
+ input.skip(attrs_len)?;
+ Ok(input)
+ } else {
+ let mut attrs = self.attrs();
+ while let Some(_) = attrs.next()? {}
+ Ok(attrs.input)
+ }
+ }
+
+ /// Use the `DW_AT_sibling` attribute to find the input buffer for the
+ /// next sibling. Returns `None` if the attribute is missing or invalid.
+ fn sibling(&self) -> Option<R> {
+ let attr = self.attr_value(constants::DW_AT_sibling);
+ if let Ok(Some(AttributeValue::UnitRef(offset))) = attr {
+ if offset.0 > self.offset.0 {
+ if let Ok(input) = self.unit.range_from(offset..) {
+ return Some(input);
+ }
+ }
+ }
+ None
+ }
+
+ /// Parse an entry. Returns `Ok(None)` for null entries.
+ #[allow(clippy::inline_always)]
+ #[inline(always)]
+ fn parse(
+ input: &mut R,
+ unit: &'unit UnitHeader<R>,
+ abbreviations: &'abbrev Abbreviations,
+ ) -> Result<Option<Self>> {
+ let offset = unit.header_size() + input.offset_from(&unit.entries_buf);
+ let code = input.read_uleb128()?;
+ if code == 0 {
+ return Ok(None);
+ };
+ let abbrev = abbreviations.get(code).ok_or(Error::UnknownAbbreviation)?;
+ Ok(Some(DebuggingInformationEntry {
+ offset: UnitOffset(offset),
+ attrs_slice: input.clone(),
+ attrs_len: Cell::new(None),
+ abbrev,
+ unit,
+ }))
+ }
+}
+
+/// The value of an attribute in a `DebuggingInformationEntry`.
+//
+// Set the discriminant size so that all variants use the same alignment
+// for their data. This gives better code generation in `parse_attribute`.
+#[repr(u64)]
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum AttributeValue<R, Offset = <R as Reader>::Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// "Refers to some location in the address space of the described program."
+ Addr(u64),
+
+ /// A slice of an arbitrary number of bytes.
+ Block(R),
+
+ /// A one byte constant data value. How to interpret the byte depends on context.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data1(u8),
+
+ /// A two byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// These bytes have been converted from `R::Endian`. This may need to be reversed
+ /// if this was not required.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data2(u16),
+
+ /// A four byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// These bytes have been converted from `R::Endian`. This may need to be reversed
+ /// if this was not required.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data4(u32),
+
+ /// An eight byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// These bytes have been converted from `R::Endian`. This may need to be reversed
+ /// if this was not required.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data8(u64),
+
+ /// A signed integer constant.
+ Sdata(i64),
+
+ /// An unsigned integer constant.
+ Udata(u64),
+
+ /// "The information bytes contain a DWARF expression (see Section 2.5) or
+ /// location description (see Section 2.6)."
+ Exprloc(Expression<R>),
+
+ /// A boolean that indicates presence or absence of the attribute.
+ Flag(bool),
+
+ /// An offset into another section. Which section this is an offset into
+ /// depends on context.
+ SecOffset(Offset),
+
+ /// An offset to a set of addresses in the `.debug_addr` section.
+ DebugAddrBase(DebugAddrBase<Offset>),
+
+ /// An index into a set of addresses in the `.debug_addr` section.
+ DebugAddrIndex(DebugAddrIndex<Offset>),
+
+ /// An offset into the current compilation unit.
+ UnitRef(UnitOffset<Offset>),
+
+ /// An offset into the current `.debug_info` section, but possibly a
+ /// different compilation unit from the current one.
+ DebugInfoRef(DebugInfoOffset<Offset>),
+
+ /// An offset into the `.debug_info` section of the supplementary object file.
+ DebugInfoRefSup(DebugInfoOffset<Offset>),
+
+ /// An offset into the `.debug_line` section.
+ DebugLineRef(DebugLineOffset<Offset>),
+
+ /// An offset into either the `.debug_loc` section or the `.debug_loclists` section.
+ LocationListsRef(LocationListsOffset<Offset>),
+
+ /// An offset to a set of offsets in the `.debug_loclists` section.
+ DebugLocListsBase(DebugLocListsBase<Offset>),
+
+ /// An index into a set of offsets in the `.debug_loclists` section.
+ DebugLocListsIndex(DebugLocListsIndex<Offset>),
+
+ /// An offset into the `.debug_macinfo` section.
+ DebugMacinfoRef(DebugMacinfoOffset<Offset>),
+
+ /// An offset into the `.debug_macro` section.
+ DebugMacroRef(DebugMacroOffset<Offset>),
+
+ /// An offset into the `.debug_ranges` section.
+ RangeListsRef(RawRangeListsOffset<Offset>),
+
+ /// An offset to a set of offsets in the `.debug_rnglists` section.
+ DebugRngListsBase(DebugRngListsBase<Offset>),
+
+ /// An index into a set of offsets in the `.debug_rnglists` section.
+ DebugRngListsIndex(DebugRngListsIndex<Offset>),
+
+ /// A type signature.
+ DebugTypesRef(DebugTypeSignature),
+
+ /// An offset into the `.debug_str` section.
+ DebugStrRef(DebugStrOffset<Offset>),
+
+ /// An offset into the `.debug_str` section of the supplementary object file.
+ DebugStrRefSup(DebugStrOffset<Offset>),
+
+ /// An offset to a set of entries in the `.debug_str_offsets` section.
+ DebugStrOffsetsBase(DebugStrOffsetsBase<Offset>),
+
+ /// An index into a set of entries in the `.debug_str_offsets` section.
+ DebugStrOffsetsIndex(DebugStrOffsetsIndex<Offset>),
+
+ /// An offset into the `.debug_line_str` section.
+ DebugLineStrRef(DebugLineStrOffset<Offset>),
+
+ /// A slice of bytes representing a string. Does not include a final null byte.
+ /// Not guaranteed to be UTF-8 or anything like that.
+ String(R),
+
+ /// The value of a `DW_AT_encoding` attribute.
+ Encoding(constants::DwAte),
+
+ /// The value of a `DW_AT_decimal_sign` attribute.
+ DecimalSign(constants::DwDs),
+
+ /// The value of a `DW_AT_endianity` attribute.
+ Endianity(constants::DwEnd),
+
+ /// The value of a `DW_AT_accessibility` attribute.
+ Accessibility(constants::DwAccess),
+
+ /// The value of a `DW_AT_visibility` attribute.
+ Visibility(constants::DwVis),
+
+ /// The value of a `DW_AT_virtuality` attribute.
+ Virtuality(constants::DwVirtuality),
+
+ /// The value of a `DW_AT_language` attribute.
+ Language(constants::DwLang),
+
+ /// The value of a `DW_AT_address_class` attribute.
+ AddressClass(constants::DwAddr),
+
+ /// The value of a `DW_AT_identifier_case` attribute.
+ IdentifierCase(constants::DwId),
+
+ /// The value of a `DW_AT_calling_convention` attribute.
+ CallingConvention(constants::DwCc),
+
+ /// The value of a `DW_AT_inline` attribute.
+ Inline(constants::DwInl),
+
+ /// The value of a `DW_AT_ordering` attribute.
+ Ordering(constants::DwOrd),
+
+ /// An index into the filename entries from the line number information
+ /// table for the compilation unit containing this value.
+ FileIndex(u64),
+
+ /// An implementation-defined identifier uniquely identifying a compilation
+ /// unit.
+ DwoId(DwoId),
+}
+
+/// An attribute in a `DebuggingInformationEntry`, consisting of a name and
+/// associated value.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub struct Attribute<R: Reader> {
+ name: constants::DwAt,
+ value: AttributeValue<R>,
+}
+
+impl<R: Reader> Attribute<R> {
+ /// Get this attribute's name.
+ pub fn name(&self) -> constants::DwAt {
+ self.name
+ }
+
+ /// Get this attribute's raw value.
+ pub fn raw_value(&self) -> AttributeValue<R> {
+ self.value.clone()
+ }
+
+ /// Get this attribute's normalized value.
+ ///
+ /// Attribute values can potentially be encoded in multiple equivalent forms,
+ /// and may have special meaning depending on the attribute name. This method
+ /// converts the attribute value to a normalized form based on the attribute
+ /// name.
+ ///
+ /// See "Table 7.5: Attribute encodings" and "Table 7.6: Attribute form encodings".
+ #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::match_same_arms)]
+ pub fn value(&self) -> AttributeValue<R> {
+ // Table 7.5 shows the possible attribute classes for each name.
+ // Table 7.6 shows the possible attribute classes for each form.
+ // For each attribute name, we need to match on the form, and
+ // convert it to one of the classes that is allowed for both
+ // the name and the form.
+ //
+ // The individual class conversions rarely vary for each name,
+ // so for each class conversion we define a macro that matches
+ // on the allowed forms for that class.
+ //
+ // For some classes, we don't need to do any conversion, so their
+ // macro is empty. In the future we may want to fill them in to
+ // provide strict checking of the forms for each class. For now,
+ // they simply provide a way to document the allowed classes for
+ // each name.
+
+ // DW_FORM_addr
+ // DW_FORM_addrx
+ // DW_FORM_addrx1
+ // DW_FORM_addrx2
+ // DW_FORM_addrx3
+ // DW_FORM_addrx4
+ macro_rules! address {
+ () => {};
+ }
+ // DW_FORM_sec_offset
+ macro_rules! addrptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugAddrBase(DebugAddrBase(offset));
+ }
+ };
+ }
+ // DW_FORM_block
+ // DW_FORM_block1
+ // DW_FORM_block2
+ // DW_FORM_block4
+ macro_rules! block {
+ () => {};
+ }
+ // DW_FORM_sdata
+ // DW_FORM_udata
+ // DW_FORM_data1
+ // DW_FORM_data2
+ // DW_FORM_data4
+ // DW_FORM_data8
+ // DW_FORM_data16
+ // DW_FORM_implicit_const
+ macro_rules! constant {
+ ($value:ident, $variant:ident) => {
+ if let Some(value) = self.$value() {
+ return AttributeValue::$variant(value);
+ }
+ };
+ ($value:ident, $variant:ident, $constant:ident) => {
+ if let Some(value) = self.$value() {
+ return AttributeValue::$variant(constants::$constant(value));
+ }
+ };
+ }
+ // DW_FORM_exprloc
+ macro_rules! exprloc {
+ () => {
+ if let Some(value) = self.exprloc_value() {
+ return AttributeValue::Exprloc(value);
+ }
+ };
+ }
+ // DW_FORM_flag
+ // DW_FORM_flag_present
+ macro_rules! flag {
+ () => {};
+ }
+ // DW_FORM_sec_offset
+ macro_rules! lineptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugLineRef(DebugLineOffset(offset));
+ }
+ };
+ }
+ // This also covers `loclist` in DWARF version 5.
+ // DW_FORM_sec_offset
+ // DW_FORM_loclistx
+ macro_rules! loclistptr {
+ () => {
+ // DebugLocListsIndex is also an allowed form in DWARF version 5.
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::LocationListsRef(LocationListsOffset(offset));
+ }
+ };
+ }
+ // DW_FORM_sec_offset
+ macro_rules! loclistsptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugLocListsBase(DebugLocListsBase(offset));
+ }
+ };
+ }
+ // DWARF version <= 4.
+ // DW_FORM_sec_offset
+ macro_rules! macinfoptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugMacinfoRef(DebugMacinfoOffset(offset));
+ }
+ };
+ }
+ // DWARF version >= 5.
+ // DW_FORM_sec_offset
+ macro_rules! macroptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugMacroRef(DebugMacroOffset(offset));
+ }
+ };
+ }
+ // DW_FORM_ref_addr
+ // DW_FORM_ref1
+ // DW_FORM_ref2
+ // DW_FORM_ref4
+ // DW_FORM_ref8
+ // DW_FORM_ref_udata
+ // DW_FORM_ref_sig8
+ // DW_FORM_ref_sup4
+ // DW_FORM_ref_sup8
+ macro_rules! reference {
+ () => {};
+ }
+ // This also covers `rnglist` in DWARF version 5.
+ // DW_FORM_sec_offset
+ // DW_FORM_rnglistx
+ macro_rules! rangelistptr {
+ () => {
+ // DebugRngListsIndex is also an allowed form in DWARF version 5.
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::RangeListsRef(RawRangeListsOffset(offset));
+ }
+ };
+ }
+ // DW_FORM_sec_offset
+ macro_rules! rnglistsptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugRngListsBase(DebugRngListsBase(offset));
+ }
+ };
+ }
+ // DW_FORM_string
+ // DW_FORM_strp
+ // DW_FORM_strx
+ // DW_FORM_strx1
+ // DW_FORM_strx2
+ // DW_FORM_strx3
+ // DW_FORM_strx4
+ // DW_FORM_strp_sup
+ // DW_FORM_line_strp
+ macro_rules! string {
+ () => {};
+ }
+ // DW_FORM_sec_offset
+ macro_rules! stroffsetsptr {
+ () => {
+ if let Some(offset) = self.offset_value() {
+ return AttributeValue::DebugStrOffsetsBase(DebugStrOffsetsBase(offset));
+ }
+ };
+ }
+ // This isn't a separate form but it's useful to distinguish it from a generic udata.
+ macro_rules! dwoid {
+ () => {
+ if let Some(value) = self.udata_value() {
+ return AttributeValue::DwoId(DwoId(value));
+ }
+ };
+ }
+
+ // Perform the allowed class conversions for each attribute name.
+ match self.name {
+ constants::DW_AT_sibling => {
+ reference!();
+ }
+ constants::DW_AT_location => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_name => {
+ string!();
+ }
+ constants::DW_AT_ordering => {
+ constant!(u8_value, Ordering, DwOrd);
+ }
+ constants::DW_AT_byte_size
+ | constants::DW_AT_bit_offset
+ | constants::DW_AT_bit_size => {
+ constant!(udata_value, Udata);
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_stmt_list => {
+ lineptr!();
+ }
+ constants::DW_AT_low_pc => {
+ address!();
+ }
+ constants::DW_AT_high_pc => {
+ address!();
+ constant!(udata_value, Udata);
+ }
+ constants::DW_AT_language => {
+ constant!(u16_value, Language, DwLang);
+ }
+ constants::DW_AT_discr => {
+ reference!();
+ }
+ constants::DW_AT_discr_value => {
+ // constant: depends on type of DW_TAG_variant_part,
+ // so caller must normalize.
+ }
+ constants::DW_AT_visibility => {
+ constant!(u8_value, Visibility, DwVis);
+ }
+ constants::DW_AT_import => {
+ reference!();
+ }
+ constants::DW_AT_string_length => {
+ exprloc!();
+ loclistptr!();
+ reference!();
+ }
+ constants::DW_AT_common_reference => {
+ reference!();
+ }
+ constants::DW_AT_comp_dir => {
+ string!();
+ }
+ constants::DW_AT_const_value => {
+ // TODO: constant: sign depends on DW_AT_type.
+ block!();
+ string!();
+ }
+ constants::DW_AT_containing_type => {
+ reference!();
+ }
+ constants::DW_AT_default_value => {
+ // TODO: constant: sign depends on DW_AT_type.
+ reference!();
+ flag!();
+ }
+ constants::DW_AT_inline => {
+ constant!(u8_value, Inline, DwInl);
+ }
+ constants::DW_AT_is_optional => {
+ flag!();
+ }
+ constants::DW_AT_lower_bound => {
+ // TODO: constant: sign depends on DW_AT_type.
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_producer => {
+ string!();
+ }
+ constants::DW_AT_prototyped => {
+ flag!();
+ }
+ constants::DW_AT_return_addr => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_start_scope => {
+ // TODO: constant
+ rangelistptr!();
+ }
+ constants::DW_AT_bit_stride => {
+ constant!(udata_value, Udata);
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_upper_bound => {
+ // TODO: constant: sign depends on DW_AT_type.
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_abstract_origin => {
+ reference!();
+ }
+ constants::DW_AT_accessibility => {
+ constant!(u8_value, Accessibility, DwAccess);
+ }
+ constants::DW_AT_address_class => {
+ constant!(udata_value, AddressClass, DwAddr);
+ }
+ constants::DW_AT_artificial => {
+ flag!();
+ }
+ constants::DW_AT_base_types => {
+ reference!();
+ }
+ constants::DW_AT_calling_convention => {
+ constant!(u8_value, CallingConvention, DwCc);
+ }
+ constants::DW_AT_count => {
+ // TODO: constant
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_data_member_location => {
+ // Constants must be handled before loclistptr so that DW_FORM_data4/8
+ // are correctly interpreted for DWARF version 4+.
+ constant!(udata_value, Udata);
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_decl_column => {
+ constant!(udata_value, Udata);
+ }
+ constants::DW_AT_decl_file => {
+ constant!(udata_value, FileIndex);
+ }
+ constants::DW_AT_decl_line => {
+ constant!(udata_value, Udata);
+ }
+ constants::DW_AT_declaration => {
+ flag!();
+ }
+ constants::DW_AT_discr_list => {
+ block!();
+ }
+ constants::DW_AT_encoding => {
+ constant!(u8_value, Encoding, DwAte);
+ }
+ constants::DW_AT_external => {
+ flag!();
+ }
+ constants::DW_AT_frame_base => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_friend => {
+ reference!();
+ }
+ constants::DW_AT_identifier_case => {
+ constant!(u8_value, IdentifierCase, DwId);
+ }
+ constants::DW_AT_macro_info => {
+ macinfoptr!();
+ }
+ constants::DW_AT_namelist_item => {
+ reference!();
+ }
+ constants::DW_AT_priority => {
+ reference!();
+ }
+ constants::DW_AT_segment => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_specification => {
+ reference!();
+ }
+ constants::DW_AT_static_link => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_type => {
+ reference!();
+ }
+ constants::DW_AT_use_location => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_variable_parameter => {
+ flag!();
+ }
+ constants::DW_AT_virtuality => {
+ constant!(u8_value, Virtuality, DwVirtuality);
+ }
+ constants::DW_AT_vtable_elem_location => {
+ exprloc!();
+ loclistptr!();
+ }
+ constants::DW_AT_allocated => {
+ // TODO: constant
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_associated => {
+ // TODO: constant
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_data_location => {
+ exprloc!();
+ }
+ constants::DW_AT_byte_stride => {
+ constant!(udata_value, Udata);
+ exprloc!();
+ reference!();
+ }
+ constants::DW_AT_entry_pc => {
+ // TODO: constant
+ address!();
+ }
+ constants::DW_AT_use_UTF8 => {
+ flag!();
+ }
+ constants::DW_AT_extension => {
+ reference!();
+ }
+ constants::DW_AT_ranges => {
+ rangelistptr!();
+ }
+ constants::DW_AT_trampoline => {
+ address!();
+ flag!();
+ reference!();
+ string!();
+ }
+ constants::DW_AT_call_column => {
+ constant!(udata_value, Udata);
+ }
+ constants::DW_AT_call_file => {
+ constant!(udata_value, FileIndex);
+ }
+ constants::DW_AT_call_line => {
+ constant!(udata_value, Udata);
+ }
+ constants::DW_AT_description => {
+ string!();
+ }
+ constants::DW_AT_binary_scale => {
+ // TODO: constant
+ }
+ constants::DW_AT_decimal_scale => {
+ // TODO: constant
+ }
+ constants::DW_AT_small => {
+ reference!();
+ }
+ constants::DW_AT_decimal_sign => {
+ constant!(u8_value, DecimalSign, DwDs);
+ }
+ constants::DW_AT_digit_count => {
+ // TODO: constant
+ }
+ constants::DW_AT_picture_string => {
+ string!();
+ }
+ constants::DW_AT_mutable => {
+ flag!();
+ }
+ constants::DW_AT_threads_scaled => {
+ flag!();
+ }
+ constants::DW_AT_explicit => {
+ flag!();
+ }
+ constants::DW_AT_object_pointer => {
+ reference!();
+ }
+ constants::DW_AT_endianity => {
+ constant!(u8_value, Endianity, DwEnd);
+ }
+ constants::DW_AT_elemental => {
+ flag!();
+ }
+ constants::DW_AT_pure => {
+ flag!();
+ }
+ constants::DW_AT_recursive => {
+ flag!();
+ }
+ constants::DW_AT_signature => {
+ reference!();
+ }
+ constants::DW_AT_main_subprogram => {
+ flag!();
+ }
+ constants::DW_AT_data_bit_offset => {
+ // TODO: constant
+ }
+ constants::DW_AT_const_expr => {
+ flag!();
+ }
+ constants::DW_AT_enum_class => {
+ flag!();
+ }
+ constants::DW_AT_linkage_name => {
+ string!();
+ }
+ constants::DW_AT_string_length_bit_size => {
+ // TODO: constant
+ }
+ constants::DW_AT_string_length_byte_size => {
+ // TODO: constant
+ }
+ constants::DW_AT_rank => {
+ // TODO: constant
+ exprloc!();
+ }
+ constants::DW_AT_str_offsets_base => {
+ stroffsetsptr!();
+ }
+ constants::DW_AT_addr_base | constants::DW_AT_GNU_addr_base => {
+ addrptr!();
+ }
+ constants::DW_AT_rnglists_base | constants::DW_AT_GNU_ranges_base => {
+ rnglistsptr!();
+ }
+ constants::DW_AT_dwo_name => {
+ string!();
+ }
+ constants::DW_AT_reference => {
+ flag!();
+ }
+ constants::DW_AT_rvalue_reference => {
+ flag!();
+ }
+ constants::DW_AT_macros => {
+ macroptr!();
+ }
+ constants::DW_AT_call_all_calls => {
+ flag!();
+ }
+ constants::DW_AT_call_all_source_calls => {
+ flag!();
+ }
+ constants::DW_AT_call_all_tail_calls => {
+ flag!();
+ }
+ constants::DW_AT_call_return_pc => {
+ address!();
+ }
+ constants::DW_AT_call_value => {
+ exprloc!();
+ }
+ constants::DW_AT_call_origin => {
+ exprloc!();
+ }
+ constants::DW_AT_call_parameter => {
+ reference!();
+ }
+ constants::DW_AT_call_pc => {
+ address!();
+ }
+ constants::DW_AT_call_tail_call => {
+ flag!();
+ }
+ constants::DW_AT_call_target => {
+ exprloc!();
+ }
+ constants::DW_AT_call_target_clobbered => {
+ exprloc!();
+ }
+ constants::DW_AT_call_data_location => {
+ exprloc!();
+ }
+ constants::DW_AT_call_data_value => {
+ exprloc!();
+ }
+ constants::DW_AT_noreturn => {
+ flag!();
+ }
+ constants::DW_AT_alignment => {
+ // TODO: constant
+ }
+ constants::DW_AT_export_symbols => {
+ flag!();
+ }
+ constants::DW_AT_deleted => {
+ flag!();
+ }
+ constants::DW_AT_defaulted => {
+ // TODO: constant
+ }
+ constants::DW_AT_loclists_base => {
+ loclistsptr!();
+ }
+ constants::DW_AT_GNU_dwo_id => {
+ dwoid!();
+ }
+ _ => {}
+ }
+ self.value.clone()
+ }
+
+ /// Try to convert this attribute's value to a u8.
+ #[inline]
+ pub fn u8_value(&self) -> Option<u8> {
+ self.value.u8_value()
+ }
+
+ /// Try to convert this attribute's value to a u16.
+ #[inline]
+ pub fn u16_value(&self) -> Option<u16> {
+ self.value.u16_value()
+ }
+
+ /// Try to convert this attribute's value to an unsigned integer.
+ #[inline]
+ pub fn udata_value(&self) -> Option<u64> {
+ self.value.udata_value()
+ }
+
+ /// Try to convert this attribute's value to a signed integer.
+ #[inline]
+ pub fn sdata_value(&self) -> Option<i64> {
+ self.value.sdata_value()
+ }
+
+ /// Try to convert this attribute's value to an offset.
+ #[inline]
+ pub fn offset_value(&self) -> Option<R::Offset> {
+ self.value.offset_value()
+ }
+
+ /// Try to convert this attribute's value to an expression or location buffer.
+ ///
+ /// Expressions and locations may be `DW_FORM_block*` or `DW_FORM_exprloc`.
+ /// The standard doesn't mention `DW_FORM_block*` as a possible form, but
+ /// it is encountered in practice.
+ #[inline]
+ pub fn exprloc_value(&self) -> Option<Expression<R>> {
+ self.value.exprloc_value()
+ }
+
+ /// Try to return this attribute's value as a string slice.
+ ///
+ /// If this attribute's value is either an inline `DW_FORM_string` string,
+ /// or a `DW_FORM_strp` reference to an offset into the `.debug_str`
+ /// section, return the attribute's string value as `Some`. Other attribute
+ /// value forms are returned as `None`.
+ ///
+ /// Warning: this function does not handle all possible string forms.
+ /// Use `Dwarf::attr_string` instead.
+ #[inline]
+ pub fn string_value(&self, debug_str: &DebugStr<R>) -> Option<R> {
+ self.value.string_value(debug_str)
+ }
+
+ /// Try to return this attribute's value as a string slice.
+ ///
+ /// If this attribute's value is either an inline `DW_FORM_string` string,
+ /// or a `DW_FORM_strp` reference to an offset into the `.debug_str`
+ /// section, or a `DW_FORM_strp_sup` reference to an offset into a supplementary
+ /// object file, return the attribute's string value as `Some`. Other attribute
+ /// value forms are returned as `None`.
+ ///
+ /// Warning: this function does not handle all possible string forms.
+ /// Use `Dwarf::attr_string` instead.
+ #[inline]
+ pub fn string_value_sup(
+ &self,
+ debug_str: &DebugStr<R>,
+ debug_str_sup: Option<&DebugStr<R>>,
+ ) -> Option<R> {
+ self.value.string_value_sup(debug_str, debug_str_sup)
+ }
+}
+
+impl<R, Offset> AttributeValue<R, Offset>
+where
+ R: Reader<Offset = Offset>,
+ Offset: ReaderOffset,
+{
+ /// Try to convert this attribute's value to a u8.
+ pub fn u8_value(&self) -> Option<u8> {
+ if let Some(value) = self.udata_value() {
+ if value <= u64::from(u8::MAX) {
+ return Some(value as u8);
+ }
+ }
+ None
+ }
+
+ /// Try to convert this attribute's value to a u16.
+ pub fn u16_value(&self) -> Option<u16> {
+ if let Some(value) = self.udata_value() {
+ if value <= u64::from(u16::MAX) {
+ return Some(value as u16);
+ }
+ }
+ None
+ }
+
+ /// Try to convert this attribute's value to an unsigned integer.
+ pub fn udata_value(&self) -> Option<u64> {
+ Some(match *self {
+ AttributeValue::Data1(data) => u64::from(data),
+ AttributeValue::Data2(data) => u64::from(data),
+ AttributeValue::Data4(data) => u64::from(data),
+ AttributeValue::Data8(data) => data,
+ AttributeValue::Udata(data) => data,
+ AttributeValue::Sdata(data) => {
+ if data < 0 {
+ // Maybe we should emit a warning here
+ return None;
+ }
+ data as u64
+ }
+ _ => return None,
+ })
+ }
+
+ /// Try to convert this attribute's value to a signed integer.
+ pub fn sdata_value(&self) -> Option<i64> {
+ Some(match *self {
+ AttributeValue::Data1(data) => i64::from(data as i8),
+ AttributeValue::Data2(data) => i64::from(data as i16),
+ AttributeValue::Data4(data) => i64::from(data as i32),
+ AttributeValue::Data8(data) => data as i64,
+ AttributeValue::Sdata(data) => data,
+ AttributeValue::Udata(data) => {
+ if data > i64::max_value() as u64 {
+ // Maybe we should emit a warning here
+ return None;
+ }
+ data as i64
+ }
+ _ => return None,
+ })
+ }
+
+ /// Try to convert this attribute's value to an offset.
+ pub fn offset_value(&self) -> Option<R::Offset> {
+ // While offsets will be DW_FORM_data4/8 in DWARF version 2/3,
+ // these have already been converted to `SecOffset.
+ if let AttributeValue::SecOffset(offset) = *self {
+ Some(offset)
+ } else {
+ None
+ }
+ }
+
+ /// Try to convert this attribute's value to an expression or location buffer.
+ ///
+ /// Expressions and locations may be `DW_FORM_block*` or `DW_FORM_exprloc`.
+ /// The standard doesn't mention `DW_FORM_block*` as a possible form, but
+ /// it is encountered in practice.
+ pub fn exprloc_value(&self) -> Option<Expression<R>> {
+ Some(match *self {
+ AttributeValue::Block(ref data) => Expression(data.clone()),
+ AttributeValue::Exprloc(ref data) => data.clone(),
+ _ => return None,
+ })
+ }
+
+ /// Try to return this attribute's value as a string slice.
+ ///
+ /// If this attribute's value is either an inline `DW_FORM_string` string,
+ /// or a `DW_FORM_strp` reference to an offset into the `.debug_str`
+ /// section, return the attribute's string value as `Some`. Other attribute
+ /// value forms are returned as `None`.
+ ///
+ /// Warning: this function does not handle all possible string forms.
+ /// Use `Dwarf::attr_string` instead.
+ pub fn string_value(&self, debug_str: &DebugStr<R>) -> Option<R> {
+ match *self {
+ AttributeValue::String(ref string) => Some(string.clone()),
+ AttributeValue::DebugStrRef(offset) => debug_str.get_str(offset).ok(),
+ _ => None,
+ }
+ }
+
+ /// Try to return this attribute's value as a string slice.
+ ///
+ /// If this attribute's value is either an inline `DW_FORM_string` string,
+ /// or a `DW_FORM_strp` reference to an offset into the `.debug_str`
+ /// section, or a `DW_FORM_strp_sup` reference to an offset into a supplementary
+ /// object file, return the attribute's string value as `Some`. Other attribute
+ /// value forms are returned as `None`.
+ ///
+ /// Warning: this function does not handle all possible string forms.
+ /// Use `Dwarf::attr_string` instead.
+ pub fn string_value_sup(
+ &self,
+ debug_str: &DebugStr<R>,
+ debug_str_sup: Option<&DebugStr<R>>,
+ ) -> Option<R> {
+ match *self {
+ AttributeValue::String(ref string) => Some(string.clone()),
+ AttributeValue::DebugStrRef(offset) => debug_str.get_str(offset).ok(),
+ AttributeValue::DebugStrRefSup(offset) => {
+ debug_str_sup.and_then(|s| s.get_str(offset).ok())
+ }
+ _ => None,
+ }
+ }
+}
+
+fn length_u8_value<R: Reader>(input: &mut R) -> Result<R> {
+ let len = input.read_u8().map(R::Offset::from_u8)?;
+ input.split(len)
+}
+
+fn length_u16_value<R: Reader>(input: &mut R) -> Result<R> {
+ let len = input.read_u16().map(R::Offset::from_u16)?;
+ input.split(len)
+}
+
+fn length_u32_value<R: Reader>(input: &mut R) -> Result<R> {
+ let len = input.read_u32().map(R::Offset::from_u32)?;
+ input.split(len)
+}
+
+fn length_uleb128_value<R: Reader>(input: &mut R) -> Result<R> {
+ let len = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ input.split(len)
+}
+
+// Return true if the given `name` can be a section offset in DWARF version 2/3.
+// This is required to correctly handle relocations.
+fn allow_section_offset(name: constants::DwAt, version: u16) -> bool {
+ match name {
+ constants::DW_AT_location
+ | constants::DW_AT_stmt_list
+ | constants::DW_AT_string_length
+ | constants::DW_AT_return_addr
+ | constants::DW_AT_start_scope
+ | constants::DW_AT_frame_base
+ | constants::DW_AT_macro_info
+ | constants::DW_AT_macros
+ | constants::DW_AT_segment
+ | constants::DW_AT_static_link
+ | constants::DW_AT_use_location
+ | constants::DW_AT_vtable_elem_location
+ | constants::DW_AT_ranges => true,
+ constants::DW_AT_data_member_location => version == 2 || version == 3,
+ _ => false,
+ }
+}
+
+pub(crate) fn parse_attribute<'unit, R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ spec: AttributeSpecification,
+) -> Result<Attribute<R>> {
+ let mut form = spec.form();
+ loop {
+ let value = match form {
+ constants::DW_FORM_indirect => {
+ let dynamic_form = input.read_uleb128_u16()?;
+ form = constants::DwForm(dynamic_form);
+ continue;
+ }
+ constants::DW_FORM_addr => {
+ let addr = input.read_address(encoding.address_size)?;
+ AttributeValue::Addr(addr)
+ }
+ constants::DW_FORM_block1 => {
+ let block = length_u8_value(input)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block2 => {
+ let block = length_u16_value(input)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block4 => {
+ let block = length_u32_value(input)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_block => {
+ let block = length_uleb128_value(input)?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_data1 => {
+ let data = input.read_u8()?;
+ AttributeValue::Data1(data)
+ }
+ constants::DW_FORM_data2 => {
+ let data = input.read_u16()?;
+ AttributeValue::Data2(data)
+ }
+ constants::DW_FORM_data4 => {
+ // DWARF version 2/3 may use DW_FORM_data4/8 for section offsets.
+ // Ensure we handle relocations here.
+ if encoding.format == Format::Dwarf32
+ && allow_section_offset(spec.name(), encoding.version)
+ {
+ let offset = input.read_offset(Format::Dwarf32)?;
+ AttributeValue::SecOffset(offset)
+ } else {
+ let data = input.read_u32()?;
+ AttributeValue::Data4(data)
+ }
+ }
+ constants::DW_FORM_data8 => {
+ // DWARF version 2/3 may use DW_FORM_data4/8 for section offsets.
+ // Ensure we handle relocations here.
+ if encoding.format == Format::Dwarf64
+ && allow_section_offset(spec.name(), encoding.version)
+ {
+ let offset = input.read_offset(Format::Dwarf64)?;
+ AttributeValue::SecOffset(offset)
+ } else {
+ let data = input.read_u64()?;
+ AttributeValue::Data8(data)
+ }
+ }
+ constants::DW_FORM_data16 => {
+ let block = input.split(R::Offset::from_u8(16))?;
+ AttributeValue::Block(block)
+ }
+ constants::DW_FORM_udata => {
+ let data = input.read_uleb128()?;
+ AttributeValue::Udata(data)
+ }
+ constants::DW_FORM_sdata => {
+ let data = input.read_sleb128()?;
+ AttributeValue::Sdata(data)
+ }
+ constants::DW_FORM_exprloc => {
+ let block = length_uleb128_value(input)?;
+ AttributeValue::Exprloc(Expression(block))
+ }
+ constants::DW_FORM_flag => {
+ let present = input.read_u8()?;
+ AttributeValue::Flag(present != 0)
+ }
+ constants::DW_FORM_flag_present => {
+ // FlagPresent is this weird compile time always true thing that
+ // isn't actually present in the serialized DIEs, only in the abbreviation.
+ AttributeValue::Flag(true)
+ }
+ constants::DW_FORM_sec_offset => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::SecOffset(offset)
+ }
+ constants::DW_FORM_ref1 => {
+ let reference = input.read_u8().map(R::Offset::from_u8)?;
+ AttributeValue::UnitRef(UnitOffset(reference))
+ }
+ constants::DW_FORM_ref2 => {
+ let reference = input.read_u16().map(R::Offset::from_u16)?;
+ AttributeValue::UnitRef(UnitOffset(reference))
+ }
+ constants::DW_FORM_ref4 => {
+ let reference = input.read_u32().map(R::Offset::from_u32)?;
+ AttributeValue::UnitRef(UnitOffset(reference))
+ }
+ constants::DW_FORM_ref8 => {
+ let reference = input.read_u64().and_then(R::Offset::from_u64)?;
+ AttributeValue::UnitRef(UnitOffset(reference))
+ }
+ constants::DW_FORM_ref_udata => {
+ let reference = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::UnitRef(UnitOffset(reference))
+ }
+ constants::DW_FORM_ref_addr => {
+ // This is an offset, but DWARF version 2 specifies that DW_FORM_ref_addr
+ // has the same size as an address on the target system. This was changed
+ // in DWARF version 3.
+ let offset = if encoding.version == 2 {
+ input.read_sized_offset(encoding.address_size)?
+ } else {
+ input.read_offset(encoding.format)?
+ };
+ AttributeValue::DebugInfoRef(DebugInfoOffset(offset))
+ }
+ constants::DW_FORM_ref_sig8 => {
+ let signature = input.read_u64()?;
+ AttributeValue::DebugTypesRef(DebugTypeSignature(signature))
+ }
+ constants::DW_FORM_ref_sup4 => {
+ let offset = input.read_u32().map(R::Offset::from_u32)?;
+ AttributeValue::DebugInfoRefSup(DebugInfoOffset(offset))
+ }
+ constants::DW_FORM_ref_sup8 => {
+ let offset = input.read_u64().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugInfoRefSup(DebugInfoOffset(offset))
+ }
+ constants::DW_FORM_GNU_ref_alt => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugInfoRefSup(DebugInfoOffset(offset))
+ }
+ constants::DW_FORM_string => {
+ let string = input.read_null_terminated_slice()?;
+ AttributeValue::String(string)
+ }
+ constants::DW_FORM_strp => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugStrRef(DebugStrOffset(offset))
+ }
+ constants::DW_FORM_strp_sup | constants::DW_FORM_GNU_strp_alt => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugStrRefSup(DebugStrOffset(offset))
+ }
+ constants::DW_FORM_line_strp => {
+ let offset = input.read_offset(encoding.format)?;
+ AttributeValue::DebugLineStrRef(DebugLineStrOffset(offset))
+ }
+ constants::DW_FORM_implicit_const => {
+ let data = spec
+ .implicit_const_value()
+ .ok_or(Error::InvalidImplicitConst)?;
+ AttributeValue::Sdata(data)
+ }
+ constants::DW_FORM_strx | constants::DW_FORM_GNU_str_index => {
+ let index = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx1 => {
+ let index = input.read_u8().map(R::Offset::from_u8)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx2 => {
+ let index = input.read_u16().map(R::Offset::from_u16)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx3 => {
+ let index = input.read_uint(3).and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_strx4 => {
+ let index = input.read_u32().map(R::Offset::from_u32)?;
+ AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(index))
+ }
+ constants::DW_FORM_addrx | constants::DW_FORM_GNU_addr_index => {
+ let index = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugAddrIndex(DebugAddrIndex(index))
+ }
+ constants::DW_FORM_addrx1 => {
+ let index = input.read_u8().map(R::Offset::from_u8)?;
+ AttributeValue::DebugAddrIndex(DebugAddrIndex(index))
+ }
+ constants::DW_FORM_addrx2 => {
+ let index = input.read_u16().map(R::Offset::from_u16)?;
+ AttributeValue::DebugAddrIndex(DebugAddrIndex(index))
+ }
+ constants::DW_FORM_addrx3 => {
+ let index = input.read_uint(3).and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugAddrIndex(DebugAddrIndex(index))
+ }
+ constants::DW_FORM_addrx4 => {
+ let index = input.read_u32().map(R::Offset::from_u32)?;
+ AttributeValue::DebugAddrIndex(DebugAddrIndex(index))
+ }
+ constants::DW_FORM_loclistx => {
+ let index = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugLocListsIndex(DebugLocListsIndex(index))
+ }
+ constants::DW_FORM_rnglistx => {
+ let index = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ AttributeValue::DebugRngListsIndex(DebugRngListsIndex(index))
+ }
+ _ => {
+ return Err(Error::UnknownForm);
+ }
+ };
+ let attr = Attribute {
+ name: spec.name(),
+ value,
+ };
+ return Ok(attr);
+ }
+}
+
+pub(crate) fn skip_attributes<'unit, R: Reader>(
+ input: &mut R,
+ encoding: Encoding,
+ specs: &[AttributeSpecification],
+) -> Result<()> {
+ let mut skip_bytes = R::Offset::from_u8(0);
+ for spec in specs {
+ let mut form = spec.form();
+ loop {
+ if let Some(len) = get_attribute_size(form, encoding) {
+ // We know the length of this attribute. Accumulate that length.
+ skip_bytes += R::Offset::from_u8(len);
+ break;
+ }
+
+ // We have encountered a variable-length attribute.
+ if skip_bytes != R::Offset::from_u8(0) {
+ // Skip the accumulated skip bytes and then read the attribute normally.
+ input.skip(skip_bytes)?;
+ skip_bytes = R::Offset::from_u8(0);
+ }
+
+ match form {
+ constants::DW_FORM_indirect => {
+ let dynamic_form = input.read_uleb128_u16()?;
+ form = constants::DwForm(dynamic_form);
+ continue;
+ }
+ constants::DW_FORM_block1 => {
+ skip_bytes = input.read_u8().map(R::Offset::from_u8)?;
+ }
+ constants::DW_FORM_block2 => {
+ skip_bytes = input.read_u16().map(R::Offset::from_u16)?;
+ }
+ constants::DW_FORM_block4 => {
+ skip_bytes = input.read_u32().map(R::Offset::from_u32)?;
+ }
+ constants::DW_FORM_block | constants::DW_FORM_exprloc => {
+ skip_bytes = input.read_uleb128().and_then(R::Offset::from_u64)?;
+ }
+ constants::DW_FORM_string => {
+ let _ = input.read_null_terminated_slice()?;
+ }
+ constants::DW_FORM_udata
+ | constants::DW_FORM_sdata
+ | constants::DW_FORM_ref_udata
+ | constants::DW_FORM_strx
+ | constants::DW_FORM_GNU_str_index
+ | constants::DW_FORM_addrx
+ | constants::DW_FORM_GNU_addr_index
+ | constants::DW_FORM_loclistx
+ | constants::DW_FORM_rnglistx => {
+ input.skip_leb128()?;
+ }
+ _ => {
+ return Err(Error::UnknownForm);
+ }
+ };
+ break;
+ }
+ }
+ if skip_bytes != R::Offset::from_u8(0) {
+ // Skip the remaining accumulated skip bytes.
+ input.skip(skip_bytes)?;
+ }
+ Ok(())
+}
+
+/// An iterator over a particular entry's attributes.
+///
+/// See [the documentation for
+/// `DebuggingInformationEntry::attrs()`](./struct.DebuggingInformationEntry.html#method.attrs)
+/// for details.
+///
+/// Can be [used with
+/// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+#[derive(Clone, Copy, Debug)]
+pub struct AttrsIter<'abbrev, 'entry, 'unit, R: Reader> {
+ input: R,
+ attributes: &'abbrev [AttributeSpecification],
+ entry: &'entry DebuggingInformationEntry<'abbrev, 'unit, R>,
+}
+
+impl<'abbrev, 'entry, 'unit, R: Reader> AttrsIter<'abbrev, 'entry, 'unit, R> {
+ /// Advance the iterator and return the next attribute.
+ ///
+ /// Returns `None` when iteration is finished. If an error
+ /// occurs while parsing the next attribute, then this error
+ /// is returned, and all subsequent calls return `None`.
+ #[allow(clippy::inline_always)]
+ #[inline(always)]
+ pub fn next(&mut self) -> Result<Option<Attribute<R>>> {
+ if self.attributes.is_empty() {
+ // Now that we have parsed all of the attributes, we know where
+ // either (1) this entry's children start, if the abbreviation says
+ // this entry has children; or (2) where this entry's siblings
+ // begin.
+ if let Some(end) = self.entry.attrs_len.get() {
+ debug_assert_eq!(end, self.input.offset_from(&self.entry.attrs_slice));
+ } else {
+ self.entry
+ .attrs_len
+ .set(Some(self.input.offset_from(&self.entry.attrs_slice)));
+ }
+
+ return Ok(None);
+ }
+
+ let spec = self.attributes[0];
+ let rest_spec = &self.attributes[1..];
+ match parse_attribute(&mut self.input, self.entry.unit.encoding(), spec) {
+ Ok(attr) => {
+ self.attributes = rest_spec;
+ Ok(Some(attr))
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<'abbrev, 'entry, 'unit, R: Reader> fallible_iterator::FallibleIterator
+ for AttrsIter<'abbrev, 'entry, 'unit, R>
+{
+ type Item = Attribute<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ AttrsIter::next(self)
+ }
+}
+
+/// A raw reader of the data that defines the Debugging Information Entries.
+///
+/// `EntriesRaw` provides primitives to read the components of Debugging Information
+/// Entries (DIEs). A DIE consists of an abbreviation code (read with `read_abbreviation`)
+/// followed by a number of attributes (read with `read_attribute`).
+/// The user must provide the control flow to read these correctly.
+/// In particular, all attributes must always be read before reading another
+/// abbreviation code.
+///
+/// `EntriesRaw` lacks some features of `EntriesCursor`, such as the ability to skip
+/// to the next sibling DIE. However, this also allows it to optimize better, since it
+/// does not need to perform the extra bookkeeping required to support these features,
+/// and thus it is suitable for cases where performance is important.
+///
+/// ## Example Usage
+/// ```rust,no_run
+/// # fn example() -> Result<(), gimli::Error> {
+/// # let debug_info = gimli::DebugInfo::new(&[], gimli::LittleEndian);
+/// # let get_some_unit = || debug_info.units().next().unwrap().unwrap();
+/// let unit = get_some_unit();
+/// # let debug_abbrev = gimli::DebugAbbrev::new(&[], gimli::LittleEndian);
+/// # let get_abbrevs_for_unit = |_| unit.abbreviations(&debug_abbrev).unwrap();
+/// let abbrevs = get_abbrevs_for_unit(&unit);
+///
+/// let mut entries = unit.entries_raw(&abbrevs, None)?;
+/// while !entries.is_empty() {
+/// let abbrev = if let Some(abbrev) = entries.read_abbreviation()? {
+/// abbrev
+/// } else {
+/// // Null entry with no attributes.
+/// continue
+/// };
+/// match abbrev.tag() {
+/// gimli::DW_TAG_subprogram => {
+/// // Loop over attributes for DIEs we care about.
+/// for spec in abbrev.attributes() {
+/// let attr = entries.read_attribute(*spec)?;
+/// match attr.name() {
+/// // Handle attributes.
+/// _ => {}
+/// }
+/// }
+/// }
+/// _ => {
+/// // Skip attributes for DIEs we don't care about.
+/// entries.skip_attributes(abbrev.attributes());
+/// }
+/// }
+/// }
+/// # unreachable!()
+/// # }
+/// ```
+#[derive(Clone, Debug)]
+pub struct EntriesRaw<'abbrev, 'unit, R>
+where
+ R: Reader,
+{
+ input: R,
+ unit: &'unit UnitHeader<R>,
+ abbreviations: &'abbrev Abbreviations,
+ depth: isize,
+}
+
+impl<'abbrev, 'unit, R: Reader> EntriesRaw<'abbrev, 'unit, R> {
+ /// Return true if there is no more input.
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.input.is_empty()
+ }
+
+ /// Return the unit offset at which the reader will read next.
+ ///
+ /// If you want the offset of the next entry, then this must be called prior to reading
+ /// the next entry.
+ pub fn next_offset(&self) -> UnitOffset<R::Offset> {
+ UnitOffset(self.unit.header_size() + self.input.offset_from(&self.unit.entries_buf))
+ }
+
+ /// Return the depth of the next entry.
+ ///
+ /// This depth is updated when `read_abbreviation` is called, and is updated
+ /// based on null entries and the `has_children` field in the abbreviation.
+ #[inline]
+ pub fn next_depth(&self) -> isize {
+ self.depth
+ }
+
+ /// Read an abbreviation code and lookup the corresponding `Abbreviation`.
+ ///
+ /// Returns `Ok(None)` for null entries.
+ #[inline]
+ pub fn read_abbreviation(&mut self) -> Result<Option<&'abbrev Abbreviation>> {
+ let code = self.input.read_uleb128()?;
+ if code == 0 {
+ self.depth -= 1;
+ return Ok(None);
+ };
+ let abbrev = self
+ .abbreviations
+ .get(code)
+ .ok_or(Error::UnknownAbbreviation)?;
+ if abbrev.has_children() {
+ self.depth += 1;
+ }
+ Ok(Some(abbrev))
+ }
+
+ /// Read an attribute.
+ #[inline]
+ pub fn read_attribute(&mut self, spec: AttributeSpecification) -> Result<Attribute<R>> {
+ parse_attribute(&mut self.input, self.unit.encoding(), spec)
+ }
+
+ /// Skip all the attributes of an abbreviation.
+ #[inline]
+ pub fn skip_attributes(&mut self, specs: &[AttributeSpecification]) -> Result<()> {
+ skip_attributes(&mut self.input, self.unit.encoding(), specs)
+ }
+}
+
+/// A cursor into the Debugging Information Entries tree for a compilation unit.
+///
+/// The `EntriesCursor` can traverse the DIE tree in DFS order using `next_dfs()`,
+/// or skip to the next sibling of the entry the cursor is currently pointing to
+/// using `next_sibling()`.
+///
+/// It is also possible to traverse the DIE tree at a lower abstraction level
+/// using `next_entry()`. This method does not skip over null entries, or provide
+/// any indication of the current tree depth. In this case, you must use `current()`
+/// to obtain the current entry, and `current().has_children()` to determine if
+/// the entry following the current entry will be a sibling or child. `current()`
+/// will return `None` if the current entry is a null entry, which signifies the
+/// end of the current tree depth.
+#[derive(Clone, Debug)]
+pub struct EntriesCursor<'abbrev, 'unit, R>
+where
+ R: Reader,
+{
+ input: R,
+ unit: &'unit UnitHeader<R>,
+ abbreviations: &'abbrev Abbreviations,
+ cached_current: Option<DebuggingInformationEntry<'abbrev, 'unit, R>>,
+ delta_depth: isize,
+}
+
+impl<'abbrev, 'unit, R: Reader> EntriesCursor<'abbrev, 'unit, R> {
+ /// Get a reference to the entry that the cursor is currently pointing to.
+ ///
+ /// If the cursor is not pointing at an entry, or if the current entry is a
+ /// null entry, then `None` is returned.
+ #[inline]
+ pub fn current(&self) -> Option<&DebuggingInformationEntry<'abbrev, 'unit, R>> {
+ self.cached_current.as_ref()
+ }
+
+ /// Move the cursor to the next DIE in the tree.
+ ///
+ /// Returns `Some` if there is a next entry, even if this entry is null.
+ /// If there is no next entry, then `None` is returned.
+ pub fn next_entry(&mut self) -> Result<Option<()>> {
+ if let Some(ref current) = self.cached_current {
+ self.input = current.after_attrs()?;
+ }
+
+ if self.input.is_empty() {
+ self.cached_current = None;
+ self.delta_depth = 0;
+ return Ok(None);
+ }
+
+ match DebuggingInformationEntry::parse(&mut self.input, self.unit, self.abbreviations) {
+ Ok(Some(entry)) => {
+ self.delta_depth = entry.has_children() as isize;
+ self.cached_current = Some(entry);
+ Ok(Some(()))
+ }
+ Ok(None) => {
+ self.delta_depth = -1;
+ self.cached_current = None;
+ Ok(Some(()))
+ }
+ Err(e) => {
+ self.input.empty();
+ self.delta_depth = 0;
+ self.cached_current = None;
+ Err(e)
+ }
+ }
+ }
+
+ /// Move the cursor to the next DIE in the tree in DFS order.
+ ///
+ /// Upon successful movement of the cursor, return the delta traversal
+ /// depth and the entry:
+ ///
+ /// * If we moved down into the previous current entry's children, we get
+ /// `Some((1, entry))`.
+ ///
+ /// * If we moved to the previous current entry's sibling, we get
+ /// `Some((0, entry))`.
+ ///
+ /// * If the previous entry does not have any siblings and we move up to
+ /// its parent's next sibling, then we get `Some((-1, entry))`. Note that
+ /// if the parent doesn't have a next sibling, then it could go up to the
+ /// parent's parent's next sibling and return `Some((-2, entry))`, etc.
+ ///
+ /// If there is no next entry, then `None` is returned.
+ ///
+ /// Here is an example that finds the first entry in a compilation unit that
+ /// does not have any children.
+ ///
+ /// ```
+ /// # use gimli::{DebugAbbrev, DebugInfo, LittleEndian};
+ /// # let info_buf = [
+ /// # // Comilation unit header
+ /// #
+ /// # // 32-bit unit length = 25
+ /// # 0x19, 0x00, 0x00, 0x00,
+ /// # // Version 4
+ /// # 0x04, 0x00,
+ /// # // debug_abbrev_offset
+ /// # 0x00, 0x00, 0x00, 0x00,
+ /// # // Address size
+ /// # 0x04,
+ /// #
+ /// # // DIEs
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// # ];
+ /// # let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+ /// #
+ /// # let abbrev_buf = [
+ /// # // Code
+ /// # 0x01,
+ /// # // DW_TAG_subprogram
+ /// # 0x2e,
+ /// # // DW_CHILDREN_yes
+ /// # 0x01,
+ /// # // Begin attributes
+ /// # // Attribute name = DW_AT_name
+ /// # 0x03,
+ /// # // Attribute form = DW_FORM_string
+ /// # 0x08,
+ /// # // End attributes
+ /// # 0x00,
+ /// # 0x00,
+ /// # // Null terminator
+ /// # 0x00
+ /// # ];
+ /// # let debug_abbrev = DebugAbbrev::new(&abbrev_buf, LittleEndian);
+ /// #
+ /// # let get_some_unit = || debug_info.units().next().unwrap().unwrap();
+ ///
+ /// let unit = get_some_unit();
+ /// # let get_abbrevs_for_unit = |_| unit.abbreviations(&debug_abbrev).unwrap();
+ /// let abbrevs = get_abbrevs_for_unit(&unit);
+ ///
+ /// let mut first_entry_with_no_children = None;
+ /// let mut cursor = unit.entries(&abbrevs);
+ ///
+ /// // Move the cursor to the root.
+ /// assert!(cursor.next_dfs().unwrap().is_some());
+ ///
+ /// // Traverse the DIE tree in depth-first search order.
+ /// let mut depth = 0;
+ /// while let Some((delta_depth, current)) = cursor.next_dfs().expect("Should parse next dfs") {
+ /// // Update depth value, and break out of the loop when we
+ /// // return to the original starting position.
+ /// depth += delta_depth;
+ /// if depth <= 0 {
+ /// break;
+ /// }
+ ///
+ /// first_entry_with_no_children = Some(current.clone());
+ /// }
+ ///
+ /// println!("The first entry with no children is {:?}",
+ /// first_entry_with_no_children.unwrap());
+ /// ```
+ #[allow(clippy::type_complexity)]
+ pub fn next_dfs(
+ &mut self,
+ ) -> Result<Option<(isize, &DebuggingInformationEntry<'abbrev, 'unit, R>)>> {
+ let mut delta_depth = self.delta_depth;
+ loop {
+ // The next entry should be the one we want.
+ if self.next_entry()?.is_some() {
+ if let Some(ref entry) = self.cached_current {
+ return Ok(Some((delta_depth, entry)));
+ }
+
+ // next_entry() read a null entry.
+ delta_depth += self.delta_depth;
+ } else {
+ return Ok(None);
+ }
+ }
+ }
+
+ /// Move the cursor to the next sibling DIE of the current one.
+ ///
+ /// Returns `Ok(Some(entry))` when the cursor has been moved to
+ /// the next sibling, `Ok(None)` when there is no next sibling.
+ ///
+ /// The depth of the cursor is never changed if this method returns `Ok`.
+ /// Once `Ok(None)` is returned, this method will continue to return
+ /// `Ok(None)` until either `next_entry` or `next_dfs` is called.
+ ///
+ /// Here is an example that iterates over all of the direct children of the
+ /// root entry:
+ ///
+ /// ```
+ /// # use gimli::{DebugAbbrev, DebugInfo, LittleEndian};
+ /// # let info_buf = [
+ /// # // Comilation unit header
+ /// #
+ /// # // 32-bit unit length = 25
+ /// # 0x19, 0x00, 0x00, 0x00,
+ /// # // Version 4
+ /// # 0x04, 0x00,
+ /// # // debug_abbrev_offset
+ /// # 0x00, 0x00, 0x00, 0x00,
+ /// # // Address size
+ /// # 0x04,
+ /// #
+ /// # // DIEs
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // Abbreviation code
+ /// # 0x01,
+ /// # // Attribute of form DW_FORM_string = "foo\0"
+ /// # 0x66, 0x6f, 0x6f, 0x00,
+ /// #
+ /// # // Children
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// #
+ /// # // End of children
+ /// # 0x00,
+ /// # ];
+ /// # let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+ /// #
+ /// # let get_some_unit = || debug_info.units().next().unwrap().unwrap();
+ ///
+ /// # let abbrev_buf = [
+ /// # // Code
+ /// # 0x01,
+ /// # // DW_TAG_subprogram
+ /// # 0x2e,
+ /// # // DW_CHILDREN_yes
+ /// # 0x01,
+ /// # // Begin attributes
+ /// # // Attribute name = DW_AT_name
+ /// # 0x03,
+ /// # // Attribute form = DW_FORM_string
+ /// # 0x08,
+ /// # // End attributes
+ /// # 0x00,
+ /// # 0x00,
+ /// # // Null terminator
+ /// # 0x00
+ /// # ];
+ /// # let debug_abbrev = DebugAbbrev::new(&abbrev_buf, LittleEndian);
+ /// #
+ /// let unit = get_some_unit();
+ /// # let get_abbrevs_for_unit = |_| unit.abbreviations(&debug_abbrev).unwrap();
+ /// let abbrevs = get_abbrevs_for_unit(&unit);
+ ///
+ /// let mut cursor = unit.entries(&abbrevs);
+ ///
+ /// // Move the cursor to the root.
+ /// assert!(cursor.next_dfs().unwrap().is_some());
+ ///
+ /// // Move the cursor to the root's first child.
+ /// assert!(cursor.next_dfs().unwrap().is_some());
+ ///
+ /// // Iterate the root's children.
+ /// loop {
+ /// {
+ /// let current = cursor.current().expect("Should be at an entry");
+ /// println!("{:?} is a child of the root", current);
+ /// }
+ ///
+ /// if cursor.next_sibling().expect("Should parse next sibling").is_none() {
+ /// break;
+ /// }
+ /// }
+ /// ```
+ pub fn next_sibling(
+ &mut self,
+ ) -> Result<Option<&DebuggingInformationEntry<'abbrev, 'unit, R>>> {
+ if self.current().is_none() {
+ // We're already at the null for the end of the sibling list.
+ return Ok(None);
+ }
+
+ // Loop until we find an entry at the current level.
+ let mut depth = 0;
+ loop {
+ // Use is_some() and unwrap() to keep borrow checker happy.
+ if self.current().is_some() && self.current().unwrap().has_children() {
+ if let Some(sibling_input) = self.current().unwrap().sibling() {
+ // Fast path: this entry has a DW_AT_sibling
+ // attribute pointing to its sibling, so jump
+ // to it (which keeps us at the same depth).
+ self.input = sibling_input;
+ self.cached_current = None;
+ } else {
+ // This entry has children, so the next entry is
+ // down one level.
+ depth += 1;
+ }
+ }
+
+ if self.next_entry()?.is_none() {
+ // End of input.
+ return Ok(None);
+ }
+
+ if depth == 0 {
+ // Found an entry at the current level.
+ return Ok(self.current());
+ }
+
+ if self.current().is_none() {
+ // A null entry means the end of a child list, so we're
+ // back up a level.
+ depth -= 1;
+ }
+ }
+ }
+}
+
+/// The state information for a tree view of the Debugging Information Entries.
+///
+/// The `EntriesTree` can be used to recursively iterate through the DIE
+/// tree, following the parent/child relationships. The `EntriesTree` contains
+/// shared state for all nodes in the tree, avoiding any duplicate parsing of
+/// entries during the traversal.
+///
+/// ## Example Usage
+/// ```rust,no_run
+/// # fn example() -> Result<(), gimli::Error> {
+/// # let debug_info = gimli::DebugInfo::new(&[], gimli::LittleEndian);
+/// # let get_some_unit = || debug_info.units().next().unwrap().unwrap();
+/// let unit = get_some_unit();
+/// # let debug_abbrev = gimli::DebugAbbrev::new(&[], gimli::LittleEndian);
+/// # let get_abbrevs_for_unit = |_| unit.abbreviations(&debug_abbrev).unwrap();
+/// let abbrevs = get_abbrevs_for_unit(&unit);
+///
+/// let mut tree = unit.entries_tree(&abbrevs, None)?;
+/// let root = tree.root()?;
+/// process_tree(root)?;
+/// # unreachable!()
+/// # }
+///
+/// fn process_tree<R>(mut node: gimli::EntriesTreeNode<R>) -> gimli::Result<()>
+/// where R: gimli::Reader
+/// {
+/// {
+/// // Examine the entry attributes.
+/// let mut attrs = node.entry().attrs();
+/// while let Some(attr) = attrs.next()? {
+/// }
+/// }
+/// let mut children = node.children();
+/// while let Some(child) = children.next()? {
+/// // Recursively process a child.
+/// process_tree(child);
+/// }
+/// Ok(())
+/// }
+/// ```
+#[derive(Clone, Debug)]
+pub struct EntriesTree<'abbrev, 'unit, R>
+where
+ R: Reader,
+{
+ root: R,
+ unit: &'unit UnitHeader<R>,
+ abbreviations: &'abbrev Abbreviations,
+ input: R,
+ entry: Option<DebuggingInformationEntry<'abbrev, 'unit, R>>,
+ depth: isize,
+}
+
+impl<'abbrev, 'unit, R: Reader> EntriesTree<'abbrev, 'unit, R> {
+ fn new(root: R, unit: &'unit UnitHeader<R>, abbreviations: &'abbrev Abbreviations) -> Self {
+ let input = root.clone();
+ EntriesTree {
+ root,
+ unit,
+ abbreviations,
+ input,
+ entry: None,
+ depth: 0,
+ }
+ }
+
+ /// Returns the root node of the tree.
+ pub fn root<'me>(&'me mut self) -> Result<EntriesTreeNode<'abbrev, 'unit, 'me, R>> {
+ self.input = self.root.clone();
+ self.entry =
+ DebuggingInformationEntry::parse(&mut self.input, self.unit, self.abbreviations)?;
+ if self.entry.is_none() {
+ return Err(Error::UnexpectedNull);
+ }
+ self.depth = 0;
+ Ok(EntriesTreeNode::new(self, 1))
+ }
+
+ /// Move the cursor to the next entry at the specified depth.
+ ///
+ /// Requires `depth <= self.depth + 1`.
+ ///
+ /// Returns `true` if successful.
+ fn next(&mut self, depth: isize) -> Result<bool> {
+ if self.depth < depth {
+ debug_assert_eq!(self.depth + 1, depth);
+
+ match self.entry {
+ Some(ref entry) => {
+ if !entry.has_children() {
+ return Ok(false);
+ }
+ self.depth += 1;
+ self.input = entry.after_attrs()?;
+ }
+ None => return Ok(false),
+ }
+
+ if self.input.is_empty() {
+ self.entry = None;
+ return Ok(false);
+ }
+
+ return match DebuggingInformationEntry::parse(
+ &mut self.input,
+ self.unit,
+ self.abbreviations,
+ ) {
+ Ok(entry) => {
+ self.entry = entry;
+ Ok(self.entry.is_some())
+ }
+ Err(e) => {
+ self.input.empty();
+ self.entry = None;
+ Err(e)
+ }
+ };
+ }
+
+ loop {
+ match self.entry {
+ Some(ref entry) => {
+ if entry.has_children() {
+ if let Some(sibling_input) = entry.sibling() {
+ // Fast path: this entry has a DW_AT_sibling
+ // attribute pointing to its sibling, so jump
+ // to it (which keeps us at the same depth).
+ self.input = sibling_input;
+ } else {
+ // This entry has children, so the next entry is
+ // down one level.
+ self.depth += 1;
+ self.input = entry.after_attrs()?;
+ }
+ } else {
+ // This entry has no children, so next entry is at same depth.
+ self.input = entry.after_attrs()?;
+ }
+ }
+ None => {
+ // This entry is a null, so next entry is up one level.
+ self.depth -= 1;
+ }
+ }
+
+ if self.input.is_empty() {
+ self.entry = None;
+ return Ok(false);
+ }
+
+ match DebuggingInformationEntry::parse(&mut self.input, self.unit, self.abbreviations) {
+ Ok(entry) => {
+ self.entry = entry;
+ if self.depth == depth {
+ return Ok(self.entry.is_some());
+ }
+ }
+ Err(e) => {
+ self.input.empty();
+ self.entry = None;
+ return Err(e);
+ }
+ }
+ }
+ }
+}
+
+/// A node in the Debugging Information Entry tree.
+///
+/// The root node of a tree can be obtained
+/// via [`EntriesTree::root`](./struct.EntriesTree.html#method.root).
+#[derive(Debug)]
+pub struct EntriesTreeNode<'abbrev, 'unit, 'tree, R: Reader> {
+ tree: &'tree mut EntriesTree<'abbrev, 'unit, R>,
+ depth: isize,
+}
+
+impl<'abbrev, 'unit, 'tree, R: Reader> EntriesTreeNode<'abbrev, 'unit, 'tree, R> {
+ fn new(
+ tree: &'tree mut EntriesTree<'abbrev, 'unit, R>,
+ depth: isize,
+ ) -> EntriesTreeNode<'abbrev, 'unit, 'tree, R> {
+ debug_assert!(tree.entry.is_some());
+ EntriesTreeNode { tree, depth }
+ }
+
+ /// Returns the current entry in the tree.
+ pub fn entry(&self) -> &DebuggingInformationEntry<'abbrev, 'unit, R> {
+ // We never create a node without an entry.
+ self.tree.entry.as_ref().unwrap()
+ }
+
+ /// Create an iterator for the children of the current entry.
+ ///
+ /// The current entry can no longer be accessed after creating the
+ /// iterator.
+ pub fn children(self) -> EntriesTreeIter<'abbrev, 'unit, 'tree, R> {
+ EntriesTreeIter::new(self.tree, self.depth)
+ }
+}
+
+/// An iterator that allows traversal of the children of an
+/// `EntriesTreeNode`.
+///
+/// The items returned by this iterator are also `EntriesTreeNode`s,
+/// which allow recursive traversal of grandchildren, etc.
+#[derive(Debug)]
+pub struct EntriesTreeIter<'abbrev, 'unit, 'tree, R: Reader> {
+ tree: &'tree mut EntriesTree<'abbrev, 'unit, R>,
+ depth: isize,
+ empty: bool,
+}
+
+impl<'abbrev, 'unit, 'tree, R: Reader> EntriesTreeIter<'abbrev, 'unit, 'tree, R> {
+ fn new(
+ tree: &'tree mut EntriesTree<'abbrev, 'unit, R>,
+ depth: isize,
+ ) -> EntriesTreeIter<'abbrev, 'unit, 'tree, R> {
+ EntriesTreeIter {
+ tree,
+ depth,
+ empty: false,
+ }
+ }
+
+ /// Returns an `EntriesTreeNode` for the next child entry.
+ ///
+ /// Returns `None` if there are no more children.
+ pub fn next<'me>(&'me mut self) -> Result<Option<EntriesTreeNode<'abbrev, 'unit, 'me, R>>> {
+ if self.empty {
+ Ok(None)
+ } else if self.tree.next(self.depth)? {
+ Ok(Some(EntriesTreeNode::new(self.tree, self.depth + 1)))
+ } else {
+ self.empty = true;
+ Ok(None)
+ }
+ }
+}
+
+/// Parse a type unit header's unique type signature. Callers should handle
+/// unique-ness checking.
+fn parse_type_signature<R: Reader>(input: &mut R) -> Result<DebugTypeSignature> {
+ input.read_u64().map(DebugTypeSignature)
+}
+
+/// Parse a type unit header's type offset.
+fn parse_type_offset<R: Reader>(input: &mut R, format: Format) -> Result<UnitOffset<R::Offset>> {
+ input.read_offset(format).map(UnitOffset)
+}
+
+/// The `DebugTypes` struct represents the DWARF type information
+/// found in the `.debug_types` section.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct DebugTypes<R> {
+ debug_types_section: R,
+}
+
+impl<'input, Endian> DebugTypes<EndianSlice<'input, Endian>>
+where
+ Endian: Endianity,
+{
+ /// Construct a new `DebugTypes` instance from the data in the `.debug_types`
+ /// section.
+ ///
+ /// It is the caller's responsibility to read the `.debug_types` section and
+ /// present it as a `&[u8]` slice. That means using some ELF loader on
+ /// Linux, a Mach-O loader on macOS, etc.
+ ///
+ /// ```
+ /// use gimli::{DebugTypes, LittleEndian};
+ ///
+ /// # let buf = [0x00, 0x01, 0x02, 0x03];
+ /// # let read_debug_types_section_somehow = || &buf;
+ /// let debug_types = DebugTypes::new(read_debug_types_section_somehow(), LittleEndian);
+ /// ```
+ pub fn new(debug_types_section: &'input [u8], endian: Endian) -> Self {
+ Self::from(EndianSlice::new(debug_types_section, endian))
+ }
+}
+
+impl<T> DebugTypes<T> {
+ /// Create a `DebugTypes` section that references the data in `self`.
+ ///
+ /// This is useful when `R` implements `Reader` but `T` does not.
+ ///
+ /// ## Example Usage
+ ///
+ /// ```rust,no_run
+ /// # let load_section = || unimplemented!();
+ /// // Read the DWARF section into a `Vec` with whatever object loader you're using.
+ /// let owned_section: gimli::DebugTypes<Vec<u8>> = load_section();
+ /// // Create a reference to the DWARF section.
+ /// let section = owned_section.borrow(|section| {
+ /// gimli::EndianSlice::new(&section, gimli::LittleEndian)
+ /// });
+ /// ```
+ pub fn borrow<'a, F, R>(&'a self, mut borrow: F) -> DebugTypes<R>
+ where
+ F: FnMut(&'a T) -> R,
+ {
+ borrow(&self.debug_types_section).into()
+ }
+}
+
+impl<R> Section<R> for DebugTypes<R> {
+ fn id() -> SectionId {
+ SectionId::DebugTypes
+ }
+
+ fn reader(&self) -> &R {
+ &self.debug_types_section
+ }
+}
+
+impl<R> From<R> for DebugTypes<R> {
+ fn from(debug_types_section: R) -> Self {
+ DebugTypes {
+ debug_types_section,
+ }
+ }
+}
+
+impl<R: Reader> DebugTypes<R> {
+ /// Iterate the type-units in this `.debug_types` section.
+ ///
+ /// ```
+ /// use gimli::{DebugTypes, LittleEndian};
+ ///
+ /// # let buf = [];
+ /// # let read_debug_types_section_somehow = || &buf;
+ /// let debug_types = DebugTypes::new(read_debug_types_section_somehow(), LittleEndian);
+ ///
+ /// let mut iter = debug_types.units();
+ /// while let Some(unit) = iter.next().unwrap() {
+ /// println!("unit's length is {}", unit.unit_length());
+ /// }
+ /// ```
+ ///
+ /// Can be [used with
+ /// `FallibleIterator`](./index.html#using-with-fallibleiterator).
+ pub fn units(&self) -> DebugTypesUnitHeadersIter<R> {
+ DebugTypesUnitHeadersIter {
+ input: self.debug_types_section.clone(),
+ offset: DebugTypesOffset(R::Offset::from_u8(0)),
+ }
+ }
+}
+
+/// An iterator over the type-units of this `.debug_types` section.
+///
+/// See the [documentation on
+/// `DebugTypes::units`](./struct.DebugTypes.html#method.units) for
+/// more detail.
+#[derive(Clone, Debug)]
+pub struct DebugTypesUnitHeadersIter<R: Reader> {
+ input: R,
+ offset: DebugTypesOffset<R::Offset>,
+}
+
+impl<R: Reader> DebugTypesUnitHeadersIter<R> {
+ /// Advance the iterator to the next type unit header.
+ pub fn next(&mut self) -> Result<Option<UnitHeader<R>>> {
+ if self.input.is_empty() {
+ Ok(None)
+ } else {
+ let len = self.input.len();
+ match parse_unit_header(&mut self.input, self.offset.into()) {
+ Ok(header) => {
+ self.offset.0 += len - self.input.len();
+ Ok(Some(header))
+ }
+ Err(e) => {
+ self.input.empty();
+ Err(e)
+ }
+ }
+ }
+ }
+}
+
+#[cfg(feature = "fallible-iterator")]
+impl<R: Reader> fallible_iterator::FallibleIterator for DebugTypesUnitHeadersIter<R> {
+ type Item = UnitHeader<R>;
+ type Error = Error;
+
+ fn next(&mut self) -> ::core::result::Result<Option<Self::Item>, Self::Error> {
+ DebugTypesUnitHeadersIter::next(self)
+ }
+}
+
+#[cfg(test)]
+// Tests require leb128::write.
+#[cfg(feature = "write")]
+mod tests {
+ use super::*;
+ use crate::constants;
+ use crate::constants::*;
+ use crate::endianity::{Endianity, LittleEndian};
+ use crate::leb128;
+ use crate::read::abbrev::tests::AbbrevSectionMethods;
+ use crate::read::{
+ Abbreviation, AttributeSpecification, DebugAbbrev, EndianSlice, Error, Result,
+ };
+ use crate::test_util::GimliSectionMethods;
+ use alloc::vec::Vec;
+ use core::cell::Cell;
+ use test_assembler::{Endian, Label, LabelMaker, Section};
+
+ // Mixin methods for `Section` to help define binary test data.
+
+ trait UnitSectionMethods {
+ fn unit<'input, E>(self, unit: &mut UnitHeader<EndianSlice<'input, E>>) -> Self
+ where
+ E: Endianity;
+ fn die<F>(self, code: u64, attr: F) -> Self
+ where
+ F: Fn(Section) -> Section;
+ fn die_null(self) -> Self;
+ fn attr_string(self, s: &str) -> Self;
+ fn attr_ref1(self, o: u8) -> Self;
+ fn offset(self, offset: usize, format: Format) -> Self;
+ }
+
+ impl UnitSectionMethods for Section {
+ fn unit<'input, E>(self, unit: &mut UnitHeader<EndianSlice<'input, E>>) -> Self
+ where
+ E: Endianity,
+ {
+ let size = self.size();
+ let length = Label::new();
+ let start = Label::new();
+ let end = Label::new();
+
+ let section = match unit.format() {
+ Format::Dwarf32 => self.L32(&length),
+ Format::Dwarf64 => self.L32(0xffff_ffff).L64(&length),
+ };
+
+ let section = match unit.version() {
+ 2 | 3 | 4 => section
+ .mark(&start)
+ .L16(unit.version())
+ .offset(unit.debug_abbrev_offset.0, unit.format())
+ .D8(unit.address_size()),
+ 5 => section
+ .mark(&start)
+ .L16(unit.version())
+ .D8(unit.type_().dw_ut().0)
+ .D8(unit.address_size())
+ .offset(unit.debug_abbrev_offset.0, unit.format()),
+ _ => unreachable!(),
+ };
+
+ let section = match unit.type_() {
+ UnitType::Compilation | UnitType::Partial => {
+ unit.unit_offset = DebugInfoOffset(size as usize).into();
+ section
+ }
+ UnitType::Type {
+ type_signature,
+ type_offset,
+ }
+ | UnitType::SplitType {
+ type_signature,
+ type_offset,
+ } => {
+ if unit.version() == 5 {
+ unit.unit_offset = DebugInfoOffset(size as usize).into();
+ } else {
+ unit.unit_offset = DebugTypesOffset(size as usize).into();
+ }
+ section
+ .L64(type_signature.0)
+ .offset(type_offset.0, unit.format())
+ }
+ UnitType::Skeleton(dwo_id) | UnitType::SplitCompilation(dwo_id) => {
+ unit.unit_offset = DebugInfoOffset(size as usize).into();
+ section.L64(dwo_id.0)
+ }
+ };
+
+ let section = section.append_bytes(unit.entries_buf.into()).mark(&end);
+
+ unit.unit_length = (&end - &start) as usize;
+ length.set_const(unit.unit_length as u64);
+
+ section
+ }
+
+ fn die<F>(self, code: u64, attr: F) -> Self
+ where
+ F: Fn(Section) -> Section,
+ {
+ let section = self.uleb(code);
+ attr(section)
+ }
+
+ fn die_null(self) -> Self {
+ self.D8(0)
+ }
+
+ fn attr_string(self, attr: &str) -> Self {
+ self.append_bytes(attr.as_bytes()).D8(0)
+ }
+
+ fn attr_ref1(self, attr: u8) -> Self {
+ self.D8(attr)
+ }
+
+ fn offset(self, offset: usize, format: Format) -> Self {
+ match format {
+ Format::Dwarf32 => self.L32(offset as u32),
+ Format::Dwarf64 => self.L64(offset as u64),
+ }
+ }
+ }
+
+ /// Ensure that `UnitHeader<R>` is covariant wrt R.
+ #[test]
+ fn test_unit_header_variance() {
+ /// This only needs to compile.
+ fn _f<'a: 'b, 'b, E: Endianity>(
+ x: UnitHeader<EndianSlice<'a, E>>,
+ ) -> UnitHeader<EndianSlice<'b, E>> {
+ x
+ }
+ }
+
+ #[test]
+ fn test_parse_debug_abbrev_offset_32() {
+ let section = Section::with_endian(Endian::Little).L32(0x0403_0201);
+ let buf = section.get_contents().unwrap();
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_abbrev_offset(buf, Format::Dwarf32) {
+ Ok(val) => assert_eq!(val, DebugAbbrevOffset(0x0403_0201)),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_debug_abbrev_offset_32_incomplete() {
+ let buf = [0x01, 0x02];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_abbrev_offset(buf, Format::Dwarf32) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_debug_abbrev_offset_64() {
+ let section = Section::with_endian(Endian::Little).L64(0x0807_0605_0403_0201);
+ let buf = section.get_contents().unwrap();
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_abbrev_offset(buf, Format::Dwarf64) {
+ Ok(val) => assert_eq!(val, DebugAbbrevOffset(0x0807_0605_0403_0201)),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_debug_abbrev_offset_64_incomplete() {
+ let buf = [0x01, 0x02];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_abbrev_offset(buf, Format::Dwarf64) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_debug_info_offset_32() {
+ let section = Section::with_endian(Endian::Little).L32(0x0403_0201);
+ let buf = section.get_contents().unwrap();
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_info_offset(buf, Format::Dwarf32) {
+ Ok(val) => assert_eq!(val, DebugInfoOffset(0x0403_0201)),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_debug_info_offset_32_incomplete() {
+ let buf = [0x01, 0x02];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_info_offset(buf, Format::Dwarf32) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_debug_info_offset_64() {
+ let section = Section::with_endian(Endian::Little).L64(0x0807_0605_0403_0201);
+ let buf = section.get_contents().unwrap();
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_info_offset(buf, Format::Dwarf64) {
+ Ok(val) => assert_eq!(val, DebugInfoOffset(0x0807_0605_0403_0201)),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_debug_info_offset_64_incomplete() {
+ let buf = [0x01, 0x02];
+ let buf = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_debug_info_offset(buf, Format::Dwarf64) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_units() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let mut unit64 = UnitHeader {
+ encoding: Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ },
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let mut unit32 = UnitHeader {
+ encoding: Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ },
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut unit64)
+ .unit(&mut unit32);
+ let buf = section.get_contents().unwrap();
+
+ let debug_info = DebugInfo::new(&buf, LittleEndian);
+ let mut units = debug_info.units();
+
+ assert_eq!(units.next(), Ok(Some(unit64)));
+ assert_eq!(units.next(), Ok(Some(unit32)));
+ assert_eq!(units.next(), Ok(None));
+ }
+
+ #[test]
+ fn test_unit_version_unknown_version() {
+ let buf = [0x02, 0x00, 0x00, 0x00, 0xab, 0xcd];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_unit_header(rest, DebugInfoOffset(0).into()) {
+ Err(Error::UnknownVersion(0xcdab)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+
+ let buf = [0x02, 0x00, 0x00, 0x00, 0x1, 0x0];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_unit_header(rest, DebugInfoOffset(0).into()) {
+ Err(Error::UnknownVersion(1)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_unit_version_incomplete() {
+ let buf = [0x01, 0x00, 0x00, 0x00, 0x04];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_unit_header(rest, DebugInfoOffset(0).into()) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_partial_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Partial,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_partial_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Partial,
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_skeleton_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Skeleton(DwoId(0x0706_5040_0302_1000)),
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_skeleton_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Skeleton(DwoId(0x0706_5040_0302_1000)),
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_split_compilation_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 4,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::SplitCompilation(DwoId(0x0706_5040_0302_1000)),
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_split_compilation_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::SplitCompilation(DwoId(0x0706_5040_0302_1000)),
+ debug_abbrev_offset: DebugAbbrevOffset(0x0102_0304_0506_0708),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_type_offset_32_ok() {
+ let buf = [0x12, 0x34, 0x56, 0x78, 0x00];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_type_offset(rest, Format::Dwarf32) {
+ Ok(offset) => {
+ assert_eq!(rest.len(), 1);
+ assert_eq!(UnitOffset(0x7856_3412), offset);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_type_offset_64_ok() {
+ let buf = [0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xff, 0x00];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_type_offset(rest, Format::Dwarf64) {
+ Ok(offset) => {
+ assert_eq!(rest.len(), 1);
+ assert_eq!(UnitOffset(0xffde_bc9a_7856_3412), offset);
+ }
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ }
+ }
+
+ #[test]
+ fn test_parse_type_offset_incomplete() {
+ // Need at least 4 bytes.
+ let buf = [0xff, 0xff, 0xff];
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ match parse_type_offset(rest, Format::Dwarf32) {
+ Err(Error::UnexpectedEof(_)) => assert!(true),
+ otherwise => panic!("Unexpected result: {:?}", otherwise),
+ };
+ }
+
+ #[test]
+ fn test_parse_type_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugTypesOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugTypesOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_type_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 4,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412_7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugTypesOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugTypesOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_type_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_type_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412_7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ fn test_parse_v5_split_type_unit_header_32_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::SplitType {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_v5_split_type_unit_header_64_ok() {
+ let expected_rest = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
+ let encoding = Encoding {
+ format: Format::Dwarf64,
+ version: 5,
+ address_size: 8,
+ };
+ let mut expected_unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::SplitType {
+ type_signature: DebugTypeSignature(0xdead_beef_dead_beef),
+ type_offset: UnitOffset(0x7856_3412_7856_3412),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0x0807_0605),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(expected_rest, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little)
+ .unit(&mut expected_unit)
+ .append_bytes(expected_rest);
+ let buf = section.get_contents().unwrap();
+ let rest = &mut EndianSlice::new(&buf, LittleEndian);
+
+ assert_eq!(
+ parse_unit_header(rest, DebugInfoOffset(0).into()),
+ Ok(expected_unit)
+ );
+ assert_eq!(*rest, EndianSlice::new(expected_rest, LittleEndian));
+ }
+
+ fn section_contents<F>(f: F) -> Vec<u8>
+ where
+ F: Fn(Section) -> Section,
+ {
+ f(Section::with_endian(Endian::Little))
+ .get_contents()
+ .unwrap()
+ }
+
+ #[test]
+ fn test_attribute_value() {
+ let mut unit = test_parse_attribute_unit_default();
+ let endian = unit.entries_buf.endian();
+
+ let block_data = &[1, 2, 3, 4];
+ let buf = section_contents(|s| s.uleb(block_data.len() as u64).append_bytes(block_data));
+ let block = EndianSlice::new(&buf, endian);
+
+ let buf = section_contents(|s| s.L32(0x0102_0304));
+ let data4 = EndianSlice::new(&buf, endian);
+
+ let buf = section_contents(|s| s.L64(0x0102_0304_0506_0708));
+ let data8 = EndianSlice::new(&buf, endian);
+
+ let tests = [
+ (
+ Format::Dwarf32,
+ 2,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_block,
+ block,
+ AttributeValue::Block(EndianSlice::new(block_data, endian)),
+ AttributeValue::Exprloc(Expression(EndianSlice::new(block_data, endian))),
+ ),
+ (
+ Format::Dwarf32,
+ 2,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data4,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::LocationListsRef(LocationListsOffset(0x0102_0304)),
+ ),
+ (
+ Format::Dwarf64,
+ 2,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data4,
+ data4,
+ AttributeValue::Data4(0x0102_0304),
+ AttributeValue::Udata(0x0102_0304),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data4,
+ data4,
+ AttributeValue::Data4(0x0102_0304),
+ AttributeValue::Udata(0x0102_0304),
+ ),
+ (
+ Format::Dwarf32,
+ 2,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data8,
+ data8,
+ AttributeValue::Data8(0x0102_0304_0506_0708),
+ AttributeValue::Udata(0x0102_0304_0506_0708),
+ ),
+ #[cfg(target_pointer_width = "64")]
+ (
+ Format::Dwarf64,
+ 2,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data8,
+ data8,
+ AttributeValue::SecOffset(0x0102_0304_0506_0708),
+ AttributeValue::LocationListsRef(LocationListsOffset(0x0102_0304_0506_0708)),
+ ),
+ (
+ Format::Dwarf64,
+ 4,
+ constants::DW_AT_data_member_location,
+ constants::DW_FORM_data8,
+ data8,
+ AttributeValue::Data8(0x0102_0304_0506_0708),
+ AttributeValue::Udata(0x0102_0304_0506_0708),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_location,
+ constants::DW_FORM_data4,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::LocationListsRef(LocationListsOffset(0x0102_0304)),
+ ),
+ #[cfg(target_pointer_width = "64")]
+ (
+ Format::Dwarf64,
+ 4,
+ constants::DW_AT_location,
+ constants::DW_FORM_data8,
+ data8,
+ AttributeValue::SecOffset(0x0102_0304_0506_0708),
+ AttributeValue::LocationListsRef(LocationListsOffset(0x0102_0304_0506_0708)),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_str_offsets_base,
+ constants::DW_FORM_sec_offset,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::DebugStrOffsetsBase(DebugStrOffsetsBase(0x0102_0304)),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_stmt_list,
+ constants::DW_FORM_sec_offset,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::DebugLineRef(DebugLineOffset(0x0102_0304)),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_addr_base,
+ constants::DW_FORM_sec_offset,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::DebugAddrBase(DebugAddrBase(0x0102_0304)),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_rnglists_base,
+ constants::DW_FORM_sec_offset,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::DebugRngListsBase(DebugRngListsBase(0x0102_0304)),
+ ),
+ (
+ Format::Dwarf32,
+ 4,
+ constants::DW_AT_loclists_base,
+ constants::DW_FORM_sec_offset,
+ data4,
+ AttributeValue::SecOffset(0x0102_0304),
+ AttributeValue::DebugLocListsBase(DebugLocListsBase(0x0102_0304)),
+ ),
+ ];
+
+ for test in tests.iter() {
+ let (format, version, name, form, mut input, expect_raw, expect_value) = *test;
+ unit.encoding.format = format;
+ unit.encoding.version = version;
+ let spec = AttributeSpecification::new(name, form, None);
+ let attribute =
+ parse_attribute(&mut input, unit.encoding(), spec).expect("Should parse attribute");
+ assert_eq!(attribute.raw_value(), expect_raw);
+ assert_eq!(attribute.value(), expect_value);
+ }
+ }
+
+ #[test]
+ fn test_attribute_udata_sdata_value() {
+ #[allow(clippy::type_complexity)]
+ let tests: &[(
+ AttributeValue<EndianSlice<LittleEndian>>,
+ Option<u64>,
+ Option<i64>,
+ )] = &[
+ (AttributeValue::Data1(1), Some(1), Some(1)),
+ (
+ AttributeValue::Data1(core::u8::MAX),
+ Some(u64::from(std::u8::MAX)),
+ Some(-1),
+ ),
+ (AttributeValue::Data2(1), Some(1), Some(1)),
+ (
+ AttributeValue::Data2(core::u16::MAX),
+ Some(u64::from(std::u16::MAX)),
+ Some(-1),
+ ),
+ (AttributeValue::Data4(1), Some(1), Some(1)),
+ (
+ AttributeValue::Data4(core::u32::MAX),
+ Some(u64::from(std::u32::MAX)),
+ Some(-1),
+ ),
+ (AttributeValue::Data8(1), Some(1), Some(1)),
+ (
+ AttributeValue::Data8(core::u64::MAX),
+ Some(core::u64::MAX),
+ Some(-1),
+ ),
+ (AttributeValue::Sdata(1), Some(1), Some(1)),
+ (AttributeValue::Sdata(-1), None, Some(-1)),
+ (AttributeValue::Udata(1), Some(1), Some(1)),
+ (AttributeValue::Udata(1u64 << 63), Some(1u64 << 63), None),
+ ];
+ for test in tests.iter() {
+ let (value, expect_udata, expect_sdata) = *test;
+ let attribute = Attribute {
+ name: DW_AT_data_member_location,
+ value,
+ };
+ assert_eq!(attribute.udata_value(), expect_udata);
+ assert_eq!(attribute.sdata_value(), expect_sdata);
+ }
+ }
+
+ fn test_parse_attribute_unit<Endian>(
+ address_size: u8,
+ format: Format,
+ endian: Endian,
+ ) -> UnitHeader<EndianSlice<'static, Endian>>
+ where
+ Endian: Endianity,
+ {
+ let encoding = Encoding {
+ format,
+ version: 4,
+ address_size,
+ };
+ UnitHeader::new(
+ encoding,
+ 7,
+ UnitType::Compilation,
+ DebugAbbrevOffset(0x0807_0605),
+ DebugInfoOffset(0).into(),
+ EndianSlice::new(&[], endian),
+ )
+ }
+
+ fn test_parse_attribute_unit_default() -> UnitHeader<EndianSlice<'static, LittleEndian>> {
+ test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian)
+ }
+
+ fn test_parse_attribute<'input, Endian>(
+ buf: &'input [u8],
+ len: usize,
+ unit: &UnitHeader<EndianSlice<'input, Endian>>,
+ form: constants::DwForm,
+ value: AttributeValue<EndianSlice<'input, Endian>>,
+ ) where
+ Endian: Endianity,
+ {
+ let spec = AttributeSpecification::new(constants::DW_AT_low_pc, form, None);
+
+ let expect = Attribute {
+ name: constants::DW_AT_low_pc,
+ value,
+ };
+
+ let rest = &mut EndianSlice::new(buf, Endian::default());
+ match parse_attribute(rest, unit.encoding(), spec) {
+ Ok(attr) => {
+ assert_eq!(attr, expect);
+ assert_eq!(*rest, EndianSlice::new(&buf[len..], Endian::default()));
+ if let Some(size) = spec.size(unit) {
+ assert_eq!(rest.len() + size, buf.len());
+ }
+ }
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ };
+ }
+
+ #[test]
+ fn test_parse_attribute_addr() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_addr;
+ let value = AttributeValue::Addr(0x0403_0201);
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addr8() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
+ let unit = test_parse_attribute_unit(8, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_addr;
+ let value = AttributeValue::Addr(0x0807_0605_0403_0201);
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_block1() {
+ // Length of data (3), three bytes of data, two bytes of left over input.
+ let buf = [0x03, 0x09, 0x09, 0x09, 0x00, 0x00];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_block1;
+ let value = AttributeValue::Block(EndianSlice::new(&buf[1..4], LittleEndian));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_block2() {
+ // Two byte length of data (2), two bytes of data, two bytes of left over input.
+ let buf = [0x02, 0x00, 0x09, 0x09, 0x00, 0x00];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_block2;
+ let value = AttributeValue::Block(EndianSlice::new(&buf[2..4], LittleEndian));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_block4() {
+ // Four byte length of data (2), two bytes of data, no left over input.
+ let buf = [0x02, 0x00, 0x00, 0x00, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_block4;
+ let value = AttributeValue::Block(EndianSlice::new(&buf[4..], LittleEndian));
+ test_parse_attribute(&buf, 6, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_block() {
+ // LEB length of data (2, one byte), two bytes of data, no left over input.
+ let buf = [0x02, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_block;
+ let value = AttributeValue::Block(EndianSlice::new(&buf[1..], LittleEndian));
+ test_parse_attribute(&buf, 3, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_data1() {
+ let buf = [0x03];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_data1;
+ let value = AttributeValue::Data1(0x03);
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_data2() {
+ let buf = [0x02, 0x01, 0x0];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_data2;
+ let value = AttributeValue::Data2(0x0102);
+ test_parse_attribute(&buf, 2, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_data4() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_data4;
+ let value = AttributeValue::Data4(0x0403_0201);
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_data8() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_data8;
+ let value = AttributeValue::Data8(0x0807_0605_0403_0201);
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_udata() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_udata;
+ let value = AttributeValue::Udata(4097);
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_sdata() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::signed(&mut writable, -4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_sdata;
+ let value = AttributeValue::Sdata(-4097);
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_exprloc() {
+ // LEB length of data (2, one byte), two bytes of data, one byte left over input.
+ let buf = [0x02, 0x99, 0x99, 0x11];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_exprloc;
+ let value = AttributeValue::Exprloc(Expression(EndianSlice::new(&buf[1..3], LittleEndian)));
+ test_parse_attribute(&buf, 3, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_flag_true() {
+ let buf = [0x42];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_flag;
+ let value = AttributeValue::Flag(true);
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_flag_false() {
+ let buf = [0x00];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_flag;
+ let value = AttributeValue::Flag(false);
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_flag_present() {
+ let buf = [0x01, 0x02, 0x03, 0x04];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_flag_present;
+ let value = AttributeValue::Flag(true);
+ // DW_FORM_flag_present does not consume any bytes of the input stream.
+ test_parse_attribute(&buf, 0, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_sec_offset_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_sec_offset;
+ let value = AttributeValue::SecOffset(0x0403_0201);
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_sec_offset_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_sec_offset;
+ let value = AttributeValue::SecOffset(0x0807_0605_0403_0201);
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_ref1() {
+ let buf = [0x03];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref1;
+ let value = AttributeValue::UnitRef(UnitOffset(3));
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_ref2() {
+ let buf = [0x02, 0x01, 0x0];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref2;
+ let value = AttributeValue::UnitRef(UnitOffset(258));
+ test_parse_attribute(&buf, 2, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_ref4() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref4;
+ let value = AttributeValue::UnitRef(UnitOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_ref8() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref8;
+ let value = AttributeValue::UnitRef(UnitOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_ref_sup4() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref_sup4;
+ let value = AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_ref_sup8() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref_sup8;
+ let value = AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_refudata() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref_udata;
+ let value = AttributeValue::UnitRef(UnitOffset(4097));
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_refaddr_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_ref_addr;
+ let value = AttributeValue::DebugInfoRef(DebugInfoOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_refaddr_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_ref_addr;
+ let value = AttributeValue::DebugInfoRef(DebugInfoOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_refaddr_version2() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let mut unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ unit.encoding.version = 2;
+ let form = constants::DW_FORM_ref_addr;
+ let value = AttributeValue::DebugInfoRef(DebugInfoOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_refaddr8_version2() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let mut unit = test_parse_attribute_unit(8, Format::Dwarf32, LittleEndian);
+ unit.encoding.version = 2;
+ let form = constants::DW_FORM_ref_addr;
+ let value = AttributeValue::DebugInfoRef(DebugInfoOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_gnu_ref_alt_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_GNU_ref_alt;
+ let value = AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_gnu_ref_alt_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_GNU_ref_alt;
+ let value = AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_refsig8() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_ref_sig8;
+ let value = AttributeValue::DebugTypesRef(DebugTypeSignature(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_string() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x0, 0x99, 0x99];
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_string;
+ let value = AttributeValue::String(EndianSlice::new(&buf[..5], LittleEndian));
+ test_parse_attribute(&buf, 6, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strp_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_strp;
+ let value = AttributeValue::DebugStrRef(DebugStrOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_strp_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strp;
+ let value = AttributeValue::DebugStrRef(DebugStrOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strp_sup_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_strp_sup;
+ let value = AttributeValue::DebugStrRefSup(DebugStrOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_strp_sup_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strp_sup;
+ let value = AttributeValue::DebugStrRefSup(DebugStrOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_gnu_strp_alt_32() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf32, LittleEndian);
+ let form = constants::DW_FORM_GNU_strp_alt;
+ let value = AttributeValue::DebugStrRefSup(DebugStrOffset(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ #[cfg(target_pointer_width = "64")]
+ fn test_parse_attribute_gnu_strp_alt_64() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_GNU_strp_alt;
+ let value = AttributeValue::DebugStrRefSup(DebugStrOffset(0x0807_0605_0403_0201));
+ test_parse_attribute(&buf, 8, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strx() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_strx;
+ let value = AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(4097));
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strx1() {
+ let buf = [0x01, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strx1;
+ let value = AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(0x01));
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strx2() {
+ let buf = [0x01, 0x02, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strx2;
+ let value = AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(0x0201));
+ test_parse_attribute(&buf, 2, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strx3() {
+ let buf = [0x01, 0x02, 0x03, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strx3;
+ let value = AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(0x03_0201));
+ test_parse_attribute(&buf, 3, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_strx4() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_strx4;
+ let value = AttributeValue::DebugStrOffsetsIndex(DebugStrOffsetsIndex(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addrx() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_addrx;
+ let value = AttributeValue::DebugAddrIndex(DebugAddrIndex(4097));
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addrx1() {
+ let buf = [0x01, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_addrx1;
+ let value = AttributeValue::DebugAddrIndex(DebugAddrIndex(0x01));
+ test_parse_attribute(&buf, 1, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addrx2() {
+ let buf = [0x01, 0x02, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_addrx2;
+ let value = AttributeValue::DebugAddrIndex(DebugAddrIndex(0x0201));
+ test_parse_attribute(&buf, 2, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addrx3() {
+ let buf = [0x01, 0x02, 0x03, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_addrx3;
+ let value = AttributeValue::DebugAddrIndex(DebugAddrIndex(0x03_0201));
+ test_parse_attribute(&buf, 3, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_addrx4() {
+ let buf = [0x01, 0x02, 0x03, 0x04, 0x99, 0x99];
+ let unit = test_parse_attribute_unit(4, Format::Dwarf64, LittleEndian);
+ let form = constants::DW_FORM_addrx4;
+ let value = AttributeValue::DebugAddrIndex(DebugAddrIndex(0x0403_0201));
+ test_parse_attribute(&buf, 4, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_loclistx() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_loclistx;
+ let value = AttributeValue::DebugLocListsIndex(DebugLocListsIndex(4097));
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_rnglistx() {
+ let mut buf = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, 4097).expect("should write ok")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_rnglistx;
+ let value = AttributeValue::DebugRngListsIndex(DebugRngListsIndex(4097));
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_indirect() {
+ let mut buf = [0; 100];
+
+ let bytes_written = {
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, constants::DW_FORM_udata.0.into())
+ .expect("should write udata")
+ + leb128::write::unsigned(&mut writable, 9_999_999).expect("should write value")
+ };
+
+ let unit = test_parse_attribute_unit_default();
+ let form = constants::DW_FORM_indirect;
+ let value = AttributeValue::Udata(9_999_999);
+ test_parse_attribute(&buf, bytes_written, &unit, form, value);
+ }
+
+ #[test]
+ fn test_parse_attribute_indirect_implicit_const() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut buf = [0; 100];
+ let mut writable = &mut buf[..];
+ leb128::write::unsigned(&mut writable, constants::DW_FORM_implicit_const.0.into())
+ .expect("should write implicit_const");
+
+ let input = &mut EndianSlice::new(&buf, LittleEndian);
+ let spec =
+ AttributeSpecification::new(constants::DW_AT_low_pc, constants::DW_FORM_indirect, None);
+ assert_eq!(
+ parse_attribute(input, encoding, spec),
+ Err(Error::InvalidImplicitConst)
+ );
+ }
+
+ #[test]
+ fn test_attrs_iter() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let unit = UnitHeader::new(
+ encoding,
+ 7,
+ UnitType::Compilation,
+ DebugAbbrevOffset(0x0807_0605),
+ DebugInfoOffset(0).into(),
+ EndianSlice::new(&[], LittleEndian),
+ );
+
+ let abbrev = Abbreviation::new(
+ 42,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_yes,
+ vec![
+ AttributeSpecification::new(constants::DW_AT_name, constants::DW_FORM_string, None),
+ AttributeSpecification::new(constants::DW_AT_low_pc, constants::DW_FORM_addr, None),
+ AttributeSpecification::new(
+ constants::DW_AT_high_pc,
+ constants::DW_FORM_addr,
+ None,
+ ),
+ ]
+ .into(),
+ );
+
+ // "foo", 42, 1337, 4 dangling bytes of 0xaa where children would be
+ let buf = [
+ 0x66, 0x6f, 0x6f, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x39, 0x05, 0x00, 0x00, 0xaa, 0xaa,
+ 0xaa, 0xaa,
+ ];
+
+ let entry = DebuggingInformationEntry {
+ offset: UnitOffset(0),
+ attrs_slice: EndianSlice::new(&buf, LittleEndian),
+ attrs_len: Cell::new(None),
+ abbrev: &abbrev,
+ unit: &unit,
+ };
+
+ let mut attrs = AttrsIter {
+ input: EndianSlice::new(&buf, LittleEndian),
+ attributes: abbrev.attributes(),
+ entry: &entry,
+ };
+
+ match attrs.next() {
+ Ok(Some(attr)) => {
+ assert_eq!(
+ attr,
+ Attribute {
+ name: constants::DW_AT_name,
+ value: AttributeValue::String(EndianSlice::new(b"foo", LittleEndian)),
+ }
+ );
+ }
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+
+ assert!(entry.attrs_len.get().is_none());
+
+ match attrs.next() {
+ Ok(Some(attr)) => {
+ assert_eq!(
+ attr,
+ Attribute {
+ name: constants::DW_AT_low_pc,
+ value: AttributeValue::Addr(0x2a),
+ }
+ );
+ }
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+
+ assert!(entry.attrs_len.get().is_none());
+
+ match attrs.next() {
+ Ok(Some(attr)) => {
+ assert_eq!(
+ attr,
+ Attribute {
+ name: constants::DW_AT_high_pc,
+ value: AttributeValue::Addr(0x539),
+ }
+ );
+ }
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+
+ assert!(entry.attrs_len.get().is_none());
+
+ assert!(attrs.next().expect("should parse next").is_none());
+ assert!(entry.attrs_len.get().is_some());
+ assert_eq!(
+ entry.attrs_len.get().expect("should have entry.attrs_len"),
+ buf.len() - 4
+ )
+ }
+
+ #[test]
+ fn test_attrs_iter_incomplete() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let unit = UnitHeader::new(
+ encoding,
+ 7,
+ UnitType::Compilation,
+ DebugAbbrevOffset(0x0807_0605),
+ DebugInfoOffset(0).into(),
+ EndianSlice::new(&[], LittleEndian),
+ );
+
+ let abbrev = Abbreviation::new(
+ 42,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_yes,
+ vec![
+ AttributeSpecification::new(constants::DW_AT_name, constants::DW_FORM_string, None),
+ AttributeSpecification::new(constants::DW_AT_low_pc, constants::DW_FORM_addr, None),
+ AttributeSpecification::new(
+ constants::DW_AT_high_pc,
+ constants::DW_FORM_addr,
+ None,
+ ),
+ ]
+ .into(),
+ );
+
+ // "foo"
+ let buf = [0x66, 0x6f, 0x6f, 0x00];
+
+ let entry = DebuggingInformationEntry {
+ offset: UnitOffset(0),
+ attrs_slice: EndianSlice::new(&buf, LittleEndian),
+ attrs_len: Cell::new(None),
+ abbrev: &abbrev,
+ unit: &unit,
+ };
+
+ let mut attrs = AttrsIter {
+ input: EndianSlice::new(&buf, LittleEndian),
+ attributes: abbrev.attributes(),
+ entry: &entry,
+ };
+
+ match attrs.next() {
+ Ok(Some(attr)) => {
+ assert_eq!(
+ attr,
+ Attribute {
+ name: constants::DW_AT_name,
+ value: AttributeValue::String(EndianSlice::new(b"foo", LittleEndian)),
+ }
+ );
+ }
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+
+ assert!(entry.attrs_len.get().is_none());
+
+ // Return error for incomplete attribute.
+ assert!(attrs.next().is_err());
+ assert!(entry.attrs_len.get().is_none());
+
+ // Return error for all subsequent calls.
+ assert!(attrs.next().is_err());
+ assert!(attrs.next().is_err());
+ assert!(attrs.next().is_err());
+ assert!(attrs.next().is_err());
+ assert!(entry.attrs_len.get().is_none());
+ }
+
+ fn assert_entry_name<Endian>(entry: &DebuggingInformationEntry<EndianSlice<Endian>>, name: &str)
+ where
+ Endian: Endianity,
+ {
+ let value = entry
+ .attr_value(constants::DW_AT_name)
+ .expect("Should have parsed the name attribute")
+ .expect("Should have found the name attribute");
+
+ assert_eq!(
+ value,
+ AttributeValue::String(EndianSlice::new(name.as_bytes(), Endian::default()))
+ );
+ }
+
+ fn assert_current_name<Endian>(cursor: &EntriesCursor<EndianSlice<Endian>>, name: &str)
+ where
+ Endian: Endianity,
+ {
+ let entry = cursor.current().expect("Should have an entry result");
+ assert_entry_name(entry, name);
+ }
+
+ fn assert_next_entry<Endian>(cursor: &mut EntriesCursor<EndianSlice<Endian>>, name: &str)
+ where
+ Endian: Endianity,
+ {
+ cursor
+ .next_entry()
+ .expect("Should parse next entry")
+ .expect("Should have an entry");
+ assert_current_name(cursor, name);
+ }
+
+ fn assert_next_entry_null<Endian>(cursor: &mut EntriesCursor<EndianSlice<Endian>>)
+ where
+ Endian: Endianity,
+ {
+ cursor
+ .next_entry()
+ .expect("Should parse next entry")
+ .expect("Should have an entry");
+ assert!(cursor.current().is_none());
+ }
+
+ fn assert_next_dfs<Endian>(
+ cursor: &mut EntriesCursor<EndianSlice<Endian>>,
+ name: &str,
+ depth: isize,
+ ) where
+ Endian: Endianity,
+ {
+ {
+ let (val, entry) = cursor
+ .next_dfs()
+ .expect("Should parse next dfs")
+ .expect("Should not be done with traversal");
+ assert_eq!(val, depth);
+ assert_entry_name(entry, name);
+ }
+ assert_current_name(cursor, name);
+ }
+
+ fn assert_next_sibling<Endian>(cursor: &mut EntriesCursor<EndianSlice<Endian>>, name: &str)
+ where
+ Endian: Endianity,
+ {
+ {
+ let entry = cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .expect("Should not be done with traversal");
+ assert_entry_name(entry, name);
+ }
+ assert_current_name(cursor, name);
+ }
+
+ fn assert_valid_sibling_ptr<Endian>(cursor: &EntriesCursor<EndianSlice<Endian>>)
+ where
+ Endian: Endianity,
+ {
+ let sibling_ptr = cursor
+ .current()
+ .expect("Should have current entry")
+ .attr_value(constants::DW_AT_sibling);
+ match sibling_ptr {
+ Ok(Some(AttributeValue::UnitRef(offset))) => {
+ cursor
+ .unit
+ .range_from(offset..)
+ .expect("Sibling offset should be valid");
+ }
+ _ => panic!("Invalid sibling pointer {:?}", sibling_ptr),
+ }
+ }
+
+ fn entries_cursor_tests_abbrev_buf() -> Vec<u8> {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .abbrev(1, DW_TAG_subprogram, DW_CHILDREN_yes)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev_null();
+ section.get_contents().unwrap()
+ }
+
+ fn entries_cursor_tests_debug_info_buf() -> Vec<u8> {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .die(1, |s| s.attr_string("001"))
+ .die(1, |s| s.attr_string("002"))
+ .die(1, |s| s.attr_string("003"))
+ .die_null()
+ .die_null()
+ .die(1, |s| s.attr_string("004"))
+ .die(1, |s| s.attr_string("005"))
+ .die_null()
+ .die(1, |s| s.attr_string("006"))
+ .die_null()
+ .die_null()
+ .die(1, |s| s.attr_string("007"))
+ .die(1, |s| s.attr_string("008"))
+ .die(1, |s| s.attr_string("009"))
+ .die_null()
+ .die_null()
+ .die_null()
+ .die(1, |s| s.attr_string("010"))
+ .die_null()
+ .die_null();
+ let entries_buf = section.get_contents().unwrap();
+
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&entries_buf, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little).unit(&mut unit);
+ section.get_contents().unwrap()
+ }
+
+ #[test]
+ fn test_cursor_next_entry_incomplete() {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .die(1, |s| s.attr_string("001"))
+ .die(1, |s| s.attr_string("002"))
+ .die(1, |s| s);
+ let entries_buf = section.get_contents().unwrap();
+
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&entries_buf, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little).unit(&mut unit);
+ let info_buf = &section.get_contents().unwrap();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ assert_next_entry(&mut cursor, "001");
+ assert_next_entry(&mut cursor, "002");
+
+ {
+ // Entry code is present, but none of the attributes.
+ cursor
+ .next_entry()
+ .expect("Should parse next entry")
+ .expect("Should have an entry");
+ let entry = cursor.current().expect("Should have an entry result");
+ assert!(entry.attrs().next().is_err());
+ }
+
+ assert!(cursor.next_entry().is_err());
+ assert!(cursor.next_entry().is_err());
+ }
+
+ #[test]
+ fn test_cursor_next_entry() {
+ let info_buf = &entries_cursor_tests_debug_info_buf();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ assert_next_entry(&mut cursor, "001");
+ assert_next_entry(&mut cursor, "002");
+ assert_next_entry(&mut cursor, "003");
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry(&mut cursor, "004");
+ assert_next_entry(&mut cursor, "005");
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry(&mut cursor, "006");
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry(&mut cursor, "007");
+ assert_next_entry(&mut cursor, "008");
+ assert_next_entry(&mut cursor, "009");
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry(&mut cursor, "010");
+ assert_next_entry_null(&mut cursor);
+ assert_next_entry_null(&mut cursor);
+
+ assert!(cursor
+ .next_entry()
+ .expect("Should parse next entry")
+ .is_none());
+ assert!(cursor.current().is_none());
+ }
+
+ #[test]
+ fn test_cursor_next_dfs() {
+ let info_buf = &entries_cursor_tests_debug_info_buf();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ assert_next_dfs(&mut cursor, "001", 0);
+ assert_next_dfs(&mut cursor, "002", 1);
+ assert_next_dfs(&mut cursor, "003", 1);
+ assert_next_dfs(&mut cursor, "004", -1);
+ assert_next_dfs(&mut cursor, "005", 1);
+ assert_next_dfs(&mut cursor, "006", 0);
+ assert_next_dfs(&mut cursor, "007", -1);
+ assert_next_dfs(&mut cursor, "008", 1);
+ assert_next_dfs(&mut cursor, "009", 1);
+ assert_next_dfs(&mut cursor, "010", -2);
+
+ assert!(cursor.next_dfs().expect("Should parse next dfs").is_none());
+ assert!(cursor.current().is_none());
+ }
+
+ #[test]
+ fn test_cursor_next_sibling_no_sibling_ptr() {
+ let info_buf = &entries_cursor_tests_debug_info_buf();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ assert_next_dfs(&mut cursor, "001", 0);
+
+ // Down to the first child of the root entry.
+
+ assert_next_dfs(&mut cursor, "002", 1);
+
+ // Now iterate all children of the root via `next_sibling`.
+
+ assert_next_sibling(&mut cursor, "004");
+ assert_next_sibling(&mut cursor, "007");
+ assert_next_sibling(&mut cursor, "010");
+
+ // There should be no more siblings.
+
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor.current().is_none());
+ }
+
+ #[test]
+ fn test_cursor_next_sibling_continuation() {
+ let info_buf = &entries_cursor_tests_debug_info_buf();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ assert_next_dfs(&mut cursor, "001", 0);
+
+ // Down to the first child of the root entry.
+
+ assert_next_dfs(&mut cursor, "002", 1);
+
+ // Get the next sibling, then iterate its children
+
+ assert_next_sibling(&mut cursor, "004");
+ assert_next_dfs(&mut cursor, "005", 1);
+ assert_next_sibling(&mut cursor, "006");
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+
+ // And we should be able to continue with the children of the root entry.
+
+ assert_next_dfs(&mut cursor, "007", -1);
+ assert_next_sibling(&mut cursor, "010");
+
+ // There should be no more siblings.
+
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor.current().is_none());
+ }
+
+ fn entries_cursor_sibling_abbrev_buf() -> Vec<u8> {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .abbrev(1, DW_TAG_subprogram, DW_CHILDREN_yes)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr(DW_AT_sibling, DW_FORM_ref1)
+ .abbrev_attr_null()
+ .abbrev(2, DW_TAG_subprogram, DW_CHILDREN_yes)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev_null();
+ section.get_contents().unwrap()
+ }
+
+ fn entries_cursor_sibling_entries_buf(header_size: usize) -> Vec<u8> {
+ let start = Label::new();
+ let sibling004_ref = Label::new();
+ let sibling004 = Label::new();
+ let sibling009_ref = Label::new();
+ let sibling009 = Label::new();
+
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .mark(&start)
+ .die(2, |s| s.attr_string("001"))
+ // Valid sibling attribute.
+ .die(1, |s| s.attr_string("002").D8(&sibling004_ref))
+ // Invalid code to ensure the sibling attribute was used.
+ .die(10, |s| s.attr_string("003"))
+ .die_null()
+ .die_null()
+ .mark(&sibling004)
+ // Invalid sibling attribute.
+ .die(1, |s| s.attr_string("004").attr_ref1(255))
+ .die(2, |s| s.attr_string("005"))
+ .die_null()
+ .die_null()
+ // Sibling attribute in child only.
+ .die(2, |s| s.attr_string("006"))
+ // Valid sibling attribute.
+ .die(1, |s| s.attr_string("007").D8(&sibling009_ref))
+ // Invalid code to ensure the sibling attribute was used.
+ .die(10, |s| s.attr_string("008"))
+ .die_null()
+ .die_null()
+ .mark(&sibling009)
+ .die(2, |s| s.attr_string("009"))
+ .die_null()
+ .die_null()
+ // No sibling attribute.
+ .die(2, |s| s.attr_string("010"))
+ .die(2, |s| s.attr_string("011"))
+ .die_null()
+ .die_null()
+ .die_null();
+
+ let offset = header_size as u64 + (&sibling004 - &start) as u64;
+ sibling004_ref.set_const(offset);
+
+ let offset = header_size as u64 + (&sibling009 - &start) as u64;
+ sibling009_ref.set_const(offset);
+
+ section.get_contents().unwrap()
+ }
+
+ fn test_cursor_next_sibling_with_ptr(cursor: &mut EntriesCursor<EndianSlice<LittleEndian>>) {
+ assert_next_dfs(cursor, "001", 0);
+
+ // Down to the first child of the root.
+
+ assert_next_dfs(cursor, "002", 1);
+
+ // Now iterate all children of the root via `next_sibling`.
+
+ assert_valid_sibling_ptr(&cursor);
+ assert_next_sibling(cursor, "004");
+ assert_next_sibling(cursor, "006");
+ assert_next_sibling(cursor, "010");
+
+ // There should be no more siblings.
+
+ assert!(cursor
+ .next_sibling()
+ .expect("Should parse next sibling")
+ .is_none());
+ assert!(cursor.current().is_none());
+ }
+
+ #[test]
+ fn test_debug_info_next_sibling_with_ptr() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&[], LittleEndian),
+ };
+ let header_size = unit.size_of_header();
+ let entries_buf = entries_cursor_sibling_entries_buf(header_size);
+ unit.entries_buf = EndianSlice::new(&entries_buf, LittleEndian);
+ let section = Section::with_endian(Endian::Little).unit(&mut unit);
+ let info_buf = section.get_contents().unwrap();
+ let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrev_buf = entries_cursor_sibling_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(&abbrev_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ test_cursor_next_sibling_with_ptr(&mut cursor);
+ }
+
+ #[test]
+ fn test_debug_types_next_sibling_with_ptr() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0),
+ type_offset: UnitOffset(0),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugTypesOffset(0).into(),
+ entries_buf: EndianSlice::new(&[], LittleEndian),
+ };
+ let header_size = unit.size_of_header();
+ let entries_buf = entries_cursor_sibling_entries_buf(header_size);
+ unit.entries_buf = EndianSlice::new(&entries_buf, LittleEndian);
+ let section = Section::with_endian(Endian::Little).unit(&mut unit);
+ let info_buf = section.get_contents().unwrap();
+ let debug_types = DebugTypes::new(&info_buf, LittleEndian);
+
+ let unit = debug_types
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrev_buf = entries_cursor_sibling_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(&abbrev_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ test_cursor_next_sibling_with_ptr(&mut cursor);
+ }
+
+ #[test]
+ fn test_entries_at_offset() {
+ let info_buf = &entries_cursor_tests_debug_info_buf();
+ let debug_info = DebugInfo::new(info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs_buf = &entries_cursor_tests_abbrev_buf();
+ let debug_abbrev = DebugAbbrev::new(abbrevs_buf, LittleEndian);
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit
+ .entries_at_offset(&abbrevs, UnitOffset(unit.header_size()))
+ .unwrap();
+ assert_next_entry(&mut cursor, "001");
+
+ let cursor = unit.entries_at_offset(&abbrevs, UnitOffset(0));
+ match cursor {
+ Err(Error::OffsetOutOfBounds) => {}
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+ }
+
+ fn entries_tree_tests_debug_abbrevs_buf() -> Vec<u8> {
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .abbrev(1, DW_TAG_subprogram, DW_CHILDREN_yes)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev(2, DW_TAG_subprogram, DW_CHILDREN_no)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .get_contents()
+ .unwrap();
+ section
+ }
+
+ fn entries_tree_tests_debug_info_buf(header_size: usize) -> (Vec<u8>, UnitOffset) {
+ let start = Label::new();
+ let entry2 = Label::new();
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .mark(&start)
+ .die(1, |s| s.attr_string("root"))
+ .die(1, |s| s.attr_string("1"))
+ .die(1, |s| s.attr_string("1a"))
+ .die_null()
+ .die(2, |s| s.attr_string("1b"))
+ .die_null()
+ .mark(&entry2)
+ .die(1, |s| s.attr_string("2"))
+ .die(1, |s| s.attr_string("2a"))
+ .die(1, |s| s.attr_string("2a1"))
+ .die_null()
+ .die_null()
+ .die(1, |s| s.attr_string("2b"))
+ .die(2, |s| s.attr_string("2b1"))
+ .die_null()
+ .die_null()
+ .die(1, |s| s.attr_string("3"))
+ .die(1, |s| s.attr_string("3a"))
+ .die(2, |s| s.attr_string("3a1"))
+ .die(2, |s| s.attr_string("3a2"))
+ .die_null()
+ .die(2, |s| s.attr_string("3b"))
+ .die_null()
+ .die(2, |s| s.attr_string("final"))
+ .die_null()
+ .get_contents()
+ .unwrap();
+ let entry2 = UnitOffset(header_size + (&entry2 - &start) as usize);
+ (section, entry2)
+ }
+
+ #[test]
+ fn test_entries_tree() {
+ fn assert_entry<'input, 'abbrev, 'unit, 'tree, Endian>(
+ node: Result<
+ Option<EntriesTreeNode<'abbrev, 'unit, 'tree, EndianSlice<'input, Endian>>>,
+ >,
+ name: &str,
+ ) -> EntriesTreeIter<'abbrev, 'unit, 'tree, EndianSlice<'input, Endian>>
+ where
+ Endian: Endianity,
+ {
+ let node = node
+ .expect("Should parse entry")
+ .expect("Should have entry");
+ assert_entry_name(node.entry(), name);
+ node.children()
+ }
+
+ fn assert_null<E: Endianity>(node: Result<Option<EntriesTreeNode<EndianSlice<E>>>>) {
+ match node {
+ Ok(None) => {}
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+ }
+
+ let abbrevs_buf = entries_tree_tests_debug_abbrevs_buf();
+ let debug_abbrev = DebugAbbrev::new(&abbrevs_buf, LittleEndian);
+
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&[], LittleEndian),
+ };
+ let header_size = unit.size_of_header();
+ let (entries_buf, entry2) = entries_tree_tests_debug_info_buf(header_size);
+ unit.entries_buf = EndianSlice::new(&entries_buf, LittleEndian);
+ let info_buf = Section::with_endian(Endian::Little)
+ .unit(&mut unit)
+ .get_contents()
+ .unwrap();
+ let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("Should parse unit")
+ .expect("and it should be some");
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+ let mut tree = unit
+ .entries_tree(&abbrevs, None)
+ .expect("Should have entries tree");
+
+ // Test we can restart iteration of the tree.
+ {
+ let mut iter = assert_entry(tree.root().map(Some), "root");
+ assert_entry(iter.next(), "1");
+ }
+ {
+ let mut iter = assert_entry(tree.root().map(Some), "root");
+ assert_entry(iter.next(), "1");
+ }
+
+ let mut iter = assert_entry(tree.root().map(Some), "root");
+ {
+ // Test iteration with children.
+ let mut iter = assert_entry(iter.next(), "1");
+ {
+ // Test iteration with children flag, but no children.
+ let mut iter = assert_entry(iter.next(), "1a");
+ assert_null(iter.next());
+ assert_null(iter.next());
+ }
+ {
+ // Test iteration without children flag.
+ let mut iter = assert_entry(iter.next(), "1b");
+ assert_null(iter.next());
+ assert_null(iter.next());
+ }
+ assert_null(iter.next());
+ assert_null(iter.next());
+ }
+ {
+ // Test skipping over children.
+ let mut iter = assert_entry(iter.next(), "2");
+ assert_entry(iter.next(), "2a");
+ assert_entry(iter.next(), "2b");
+ assert_null(iter.next());
+ }
+ {
+ // Test skipping after partial iteration.
+ let mut iter = assert_entry(iter.next(), "3");
+ {
+ let mut iter = assert_entry(iter.next(), "3a");
+ assert_entry(iter.next(), "3a1");
+ // Parent iter should be able to skip over "3a2".
+ }
+ assert_entry(iter.next(), "3b");
+ assert_null(iter.next());
+ }
+ assert_entry(iter.next(), "final");
+ assert_null(iter.next());
+
+ // Test starting at an offset.
+ let mut tree = unit
+ .entries_tree(&abbrevs, Some(entry2))
+ .expect("Should have entries tree");
+ let mut iter = assert_entry(tree.root().map(Some), "2");
+ assert_entry(iter.next(), "2a");
+ assert_entry(iter.next(), "2b");
+ assert_null(iter.next());
+ }
+
+ #[test]
+ fn test_entries_raw() {
+ fn assert_abbrev<'input, 'abbrev, 'unit, Endian>(
+ entries: &mut EntriesRaw<'abbrev, 'unit, EndianSlice<'input, Endian>>,
+ tag: DwTag,
+ ) -> &'abbrev Abbreviation
+ where
+ Endian: Endianity,
+ {
+ let abbrev = entries
+ .read_abbreviation()
+ .expect("Should parse abbrev")
+ .expect("Should have abbrev");
+ assert_eq!(abbrev.tag(), tag);
+ abbrev
+ }
+
+ fn assert_null<'input, 'abbrev, 'unit, Endian>(
+ entries: &mut EntriesRaw<'abbrev, 'unit, EndianSlice<'input, Endian>>,
+ ) where
+ Endian: Endianity,
+ {
+ match entries.read_abbreviation() {
+ Ok(None) => {}
+ otherwise => {
+ assert!(false, "Unexpected parse result = {:#?}", otherwise);
+ }
+ }
+ }
+
+ fn assert_attr<'input, 'abbrev, 'unit, Endian>(
+ entries: &mut EntriesRaw<'abbrev, 'unit, EndianSlice<'input, Endian>>,
+ spec: Option<AttributeSpecification>,
+ name: DwAt,
+ value: &str,
+ ) where
+ Endian: Endianity,
+ {
+ let spec = spec.expect("Should have attribute specification");
+ let attr = entries
+ .read_attribute(spec)
+ .expect("Should parse attribute");
+ assert_eq!(attr.name(), name);
+ assert_eq!(
+ attr.value(),
+ AttributeValue::String(EndianSlice::new(value.as_bytes(), Endian::default()))
+ );
+ }
+
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .abbrev(1, DW_TAG_subprogram, DW_CHILDREN_yes)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr(DW_AT_linkage_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev(2, DW_TAG_variable, DW_CHILDREN_no)
+ .abbrev_attr(DW_AT_name, DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev_null();
+ let abbrevs_buf = section.get_contents().unwrap();
+ let debug_abbrev = DebugAbbrev::new(&abbrevs_buf, LittleEndian);
+
+ #[rustfmt::skip]
+ let section = Section::with_endian(Endian::Little)
+ .die(1, |s| s.attr_string("f1").attr_string("l1"))
+ .die(2, |s| s.attr_string("v1"))
+ .die(2, |s| s.attr_string("v2"))
+ .die(1, |s| s.attr_string("f2").attr_string("l2"))
+ .die_null()
+ .die_null();
+ let entries_buf = section.get_contents().unwrap();
+
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&entries_buf, LittleEndian),
+ };
+ let section = Section::with_endian(Endian::Little).unit(&mut unit);
+ let info_buf = section.get_contents().unwrap();
+ let debug_info = DebugInfo::new(&info_buf, LittleEndian);
+
+ let unit = debug_info
+ .units()
+ .next()
+ .expect("should have a unit result")
+ .expect("and it should be ok");
+
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut entries = unit
+ .entries_raw(&abbrevs, None)
+ .expect("Should have entries");
+
+ assert_eq!(entries.next_depth(), 0);
+ let abbrev = assert_abbrev(&mut entries, DW_TAG_subprogram);
+ let mut attrs = abbrev.attributes().iter().copied();
+ assert_attr(&mut entries, attrs.next(), DW_AT_name, "f1");
+ assert_attr(&mut entries, attrs.next(), DW_AT_linkage_name, "l1");
+ assert!(attrs.next().is_none());
+
+ assert_eq!(entries.next_depth(), 1);
+ let abbrev = assert_abbrev(&mut entries, DW_TAG_variable);
+ let mut attrs = abbrev.attributes().iter().copied();
+ assert_attr(&mut entries, attrs.next(), DW_AT_name, "v1");
+ assert!(attrs.next().is_none());
+
+ assert_eq!(entries.next_depth(), 1);
+ let abbrev = assert_abbrev(&mut entries, DW_TAG_variable);
+ let mut attrs = abbrev.attributes().iter().copied();
+ assert_attr(&mut entries, attrs.next(), DW_AT_name, "v2");
+ assert!(attrs.next().is_none());
+
+ assert_eq!(entries.next_depth(), 1);
+ let abbrev = assert_abbrev(&mut entries, DW_TAG_subprogram);
+ let mut attrs = abbrev.attributes().iter().copied();
+ assert_attr(&mut entries, attrs.next(), DW_AT_name, "f2");
+ assert_attr(&mut entries, attrs.next(), DW_AT_linkage_name, "l2");
+ assert!(attrs.next().is_none());
+
+ assert_eq!(entries.next_depth(), 2);
+ assert_null(&mut entries);
+
+ assert_eq!(entries.next_depth(), 1);
+ assert_null(&mut entries);
+
+ assert_eq!(entries.next_depth(), 0);
+ assert!(entries.is_empty());
+ }
+
+ #[test]
+ fn test_debug_info_offset() {
+ let padding = &[0; 10];
+ let entries = &[0; 20];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(entries, LittleEndian),
+ };
+ Section::with_endian(Endian::Little)
+ .append_bytes(padding)
+ .unit(&mut unit);
+ let offset = padding.len();
+ let header_length = unit.size_of_header();
+ let length = unit.length_including_self();
+ assert_eq!(DebugInfoOffset(0).to_unit_offset(&unit), None);
+ assert_eq!(DebugInfoOffset(offset - 1).to_unit_offset(&unit), None);
+ assert_eq!(DebugInfoOffset(offset).to_unit_offset(&unit), None);
+ assert_eq!(
+ DebugInfoOffset(offset + header_length - 1).to_unit_offset(&unit),
+ None
+ );
+ assert_eq!(
+ DebugInfoOffset(offset + header_length).to_unit_offset(&unit),
+ Some(UnitOffset(header_length))
+ );
+ assert_eq!(
+ DebugInfoOffset(offset + length - 1).to_unit_offset(&unit),
+ Some(UnitOffset(length - 1))
+ );
+ assert_eq!(DebugInfoOffset(offset + length).to_unit_offset(&unit), None);
+ assert_eq!(
+ UnitOffset(header_length).to_debug_info_offset(&unit),
+ Some(DebugInfoOffset(offset + header_length))
+ );
+ assert_eq!(
+ UnitOffset(length - 1).to_debug_info_offset(&unit),
+ Some(DebugInfoOffset(offset + length - 1))
+ );
+ }
+
+ #[test]
+ fn test_debug_types_offset() {
+ let padding = &[0; 10];
+ let entries = &[0; 20];
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Type {
+ type_signature: DebugTypeSignature(0),
+ type_offset: UnitOffset(0),
+ },
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugTypesOffset(0).into(),
+ entries_buf: EndianSlice::new(entries, LittleEndian),
+ };
+ Section::with_endian(Endian::Little)
+ .append_bytes(padding)
+ .unit(&mut unit);
+ let offset = padding.len();
+ let header_length = unit.size_of_header();
+ let length = unit.length_including_self();
+ assert_eq!(DebugTypesOffset(0).to_unit_offset(&unit), None);
+ assert_eq!(DebugTypesOffset(offset - 1).to_unit_offset(&unit), None);
+ assert_eq!(DebugTypesOffset(offset).to_unit_offset(&unit), None);
+ assert_eq!(
+ DebugTypesOffset(offset + header_length - 1).to_unit_offset(&unit),
+ None
+ );
+ assert_eq!(
+ DebugTypesOffset(offset + header_length).to_unit_offset(&unit),
+ Some(UnitOffset(header_length))
+ );
+ assert_eq!(
+ DebugTypesOffset(offset + length - 1).to_unit_offset(&unit),
+ Some(UnitOffset(length - 1))
+ );
+ assert_eq!(
+ DebugTypesOffset(offset + length).to_unit_offset(&unit),
+ None
+ );
+ assert_eq!(
+ UnitOffset(header_length).to_debug_types_offset(&unit),
+ Some(DebugTypesOffset(offset + header_length))
+ );
+ assert_eq!(
+ UnitOffset(length - 1).to_debug_types_offset(&unit),
+ Some(DebugTypesOffset(offset + length - 1))
+ );
+ }
+
+ #[test]
+ fn test_length_including_self() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let mut unit = UnitHeader {
+ encoding,
+ unit_length: 0,
+ unit_type: UnitType::Compilation,
+ debug_abbrev_offset: DebugAbbrevOffset(0),
+ unit_offset: DebugInfoOffset(0).into(),
+ entries_buf: EndianSlice::new(&[], LittleEndian),
+ };
+ unit.encoding.format = Format::Dwarf32;
+ assert_eq!(unit.length_including_self(), 4);
+ unit.encoding.format = Format::Dwarf64;
+ assert_eq!(unit.length_including_self(), 12);
+ unit.unit_length = 10;
+ assert_eq!(unit.length_including_self(), 22);
+ }
+
+ #[test]
+ fn test_parse_type_unit_abbrevs() {
+ let types_buf = [
+ // Type unit header
+ 0x25, 0x00, 0x00, 0x00, // 32-bit unit length = 37
+ 0x04, 0x00, // Version 4
+ 0x00, 0x00, 0x00, 0x00, // debug_abbrev_offset
+ 0x04, // Address size
+ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, // Type signature
+ 0x01, 0x02, 0x03, 0x04, // Type offset
+ // DIEs
+ // Abbreviation code
+ 0x01, // Attribute of form DW_FORM_string = "foo\0"
+ 0x66, 0x6f, 0x6f, 0x00, // Children
+ // Abbreviation code
+ 0x01, // Attribute of form DW_FORM_string = "foo\0"
+ 0x66, 0x6f, 0x6f, 0x00, // Children
+ // Abbreviation code
+ 0x01, // Attribute of form DW_FORM_string = "foo\0"
+ 0x66, 0x6f, 0x6f, 0x00, // Children
+ 0x00, // End of children
+ 0x00, // End of children
+ 0x00, // End of children
+ ];
+ let debug_types = DebugTypes::new(&types_buf, LittleEndian);
+
+ let abbrev_buf = [
+ // Code
+ 0x01, // DW_TAG_subprogram
+ 0x2e, // DW_CHILDREN_yes
+ 0x01, // Begin attributes
+ 0x03, // Attribute name = DW_AT_name
+ 0x08, // Attribute form = DW_FORM_string
+ 0x00, 0x00, // End attributes
+ 0x00, // Null terminator
+ ];
+
+ let get_some_type_unit = || debug_types.units().next().unwrap().unwrap();
+
+ let unit = get_some_type_unit();
+
+ let read_debug_abbrev_section_somehow = || &abbrev_buf;
+ let debug_abbrev = DebugAbbrev::new(read_debug_abbrev_section_somehow(), LittleEndian);
+ let _abbrevs_for_unit = unit.abbreviations(&debug_abbrev).unwrap();
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/util.rs b/vendor/gimli-0.26.2/src/read/util.rs
new file mode 100644
index 000000000..16eafdde4
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/util.rs
@@ -0,0 +1,250 @@
+#[cfg(feature = "read")]
+use alloc::boxed::Box;
+#[cfg(feature = "read")]
+use alloc::vec::Vec;
+use core::fmt;
+use core::mem::MaybeUninit;
+use core::ops;
+use core::ptr;
+use core::slice;
+
+mod sealed {
+ // SAFETY: Implementer must not modify the content in storage.
+ pub unsafe trait Sealed {
+ type Storage;
+
+ fn new_storage() -> Self::Storage;
+
+ fn grow(_storage: &mut Self::Storage, _additional: usize) -> Result<(), CapacityFull> {
+ Err(CapacityFull)
+ }
+ }
+
+ #[derive(Clone, Copy, Debug)]
+ pub struct CapacityFull;
+}
+
+use sealed::*;
+
+/// Marker trait for types that can be used as backing storage when a growable array type is needed.
+///
+/// This trait is sealed and cannot be implemented for types outside this crate.
+pub trait ArrayLike: Sealed {
+ /// Type of the elements being stored.
+ type Item;
+
+ #[doc(hidden)]
+ fn as_slice(storage: &Self::Storage) -> &[MaybeUninit<Self::Item>];
+
+ #[doc(hidden)]
+ fn as_mut_slice(storage: &mut Self::Storage) -> &mut [MaybeUninit<Self::Item>];
+}
+
+// Use macro since const generics can't be used due to MSRV.
+macro_rules! impl_array {
+ () => {};
+ ($n:literal $($rest:tt)*) => {
+ // SAFETY: does not modify the content in storage.
+ unsafe impl<T> Sealed for [T; $n] {
+ type Storage = [MaybeUninit<T>; $n];
+
+ fn new_storage() -> Self::Storage {
+ // SAFETY: An uninitialized `[MaybeUninit<_>; _]` is valid.
+ unsafe { MaybeUninit::uninit().assume_init() }
+ }
+ }
+
+ impl<T> ArrayLike for [T; $n] {
+ type Item = T;
+
+ fn as_slice(storage: &Self::Storage) -> &[MaybeUninit<T>] {
+ storage
+ }
+
+ fn as_mut_slice(storage: &mut Self::Storage) -> &mut [MaybeUninit<T>] {
+ storage
+ }
+ }
+
+ impl_array!($($rest)*);
+ }
+}
+
+impl_array!(0 1 2 3 4 8 16 32 64 128 192);
+
+#[cfg(feature = "read")]
+unsafe impl<T> Sealed for Vec<T> {
+ type Storage = Box<[MaybeUninit<T>]>;
+
+ fn new_storage() -> Self::Storage {
+ Box::new([])
+ }
+
+ fn grow(storage: &mut Self::Storage, additional: usize) -> Result<(), CapacityFull> {
+ let mut vec: Vec<_> = core::mem::replace(storage, Box::new([])).into();
+ vec.reserve(additional);
+ // SAFETY: This is a `Vec` of `MaybeUninit`.
+ unsafe { vec.set_len(vec.capacity()) };
+ *storage = vec.into_boxed_slice();
+ Ok(())
+ }
+}
+
+#[cfg(feature = "read")]
+impl<T> ArrayLike for Vec<T> {
+ type Item = T;
+
+ fn as_slice(storage: &Self::Storage) -> &[MaybeUninit<T>] {
+ storage
+ }
+
+ fn as_mut_slice(storage: &mut Self::Storage) -> &mut [MaybeUninit<T>] {
+ storage
+ }
+}
+
+pub(crate) struct ArrayVec<A: ArrayLike> {
+ storage: A::Storage,
+ len: usize,
+}
+
+impl<A: ArrayLike> ArrayVec<A> {
+ pub fn new() -> Self {
+ Self {
+ storage: A::new_storage(),
+ len: 0,
+ }
+ }
+
+ pub fn clear(&mut self) {
+ let ptr: *mut [A::Item] = &mut **self;
+ // Set length first so the type invariant is upheld even if `drop_in_place` panicks.
+ self.len = 0;
+ // SAFETY: `ptr` contains valid elements only and we "forget" them by setting the length.
+ unsafe { ptr::drop_in_place(ptr) };
+ }
+
+ pub fn try_push(&mut self, value: A::Item) -> Result<(), CapacityFull> {
+ let mut storage = A::as_mut_slice(&mut self.storage);
+ if self.len >= storage.len() {
+ A::grow(&mut self.storage, 1)?;
+ storage = A::as_mut_slice(&mut self.storage);
+ }
+
+ storage[self.len] = MaybeUninit::new(value);
+ self.len += 1;
+ Ok(())
+ }
+
+ pub fn try_insert(&mut self, index: usize, element: A::Item) -> Result<(), CapacityFull> {
+ assert!(index <= self.len);
+
+ let mut storage = A::as_mut_slice(&mut self.storage);
+ if self.len >= storage.len() {
+ A::grow(&mut self.storage, 1)?;
+ storage = A::as_mut_slice(&mut self.storage);
+ }
+
+ // SAFETY: storage[index] is filled later.
+ unsafe {
+ let p = storage.as_mut_ptr().add(index);
+ core::ptr::copy(p as *const _, p.add(1), self.len - index);
+ }
+ storage[index] = MaybeUninit::new(element);
+ self.len += 1;
+ Ok(())
+ }
+
+ pub fn pop(&mut self) -> Option<A::Item> {
+ if self.len == 0 {
+ None
+ } else {
+ self.len -= 1;
+ // SAFETY: this element is valid and we "forget" it by setting the length.
+ Some(unsafe { A::as_slice(&mut self.storage)[self.len].as_ptr().read() })
+ }
+ }
+
+ pub fn swap_remove(&mut self, index: usize) -> A::Item {
+ assert!(self.len > 0);
+ A::as_mut_slice(&mut self.storage).swap(index, self.len - 1);
+ self.pop().unwrap()
+ }
+}
+
+#[cfg(feature = "read")]
+impl<T> ArrayVec<Vec<T>> {
+ pub fn into_vec(mut self) -> Vec<T> {
+ let len = core::mem::replace(&mut self.len, 0);
+ let storage = core::mem::replace(&mut self.storage, Box::new([]));
+ let slice = Box::leak(storage);
+ debug_assert!(len <= slice.len());
+ // SAFETY: valid elements.
+ unsafe { Vec::from_raw_parts(slice.as_mut_ptr() as *mut T, len, slice.len()) }
+ }
+}
+
+impl<A: ArrayLike> Drop for ArrayVec<A> {
+ fn drop(&mut self) {
+ self.clear();
+ }
+}
+
+impl<A: ArrayLike> Default for ArrayVec<A> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<A: ArrayLike> ops::Deref for ArrayVec<A> {
+ type Target = [A::Item];
+
+ fn deref(&self) -> &[A::Item] {
+ let slice = &A::as_slice(&self.storage);
+ debug_assert!(self.len <= slice.len());
+ // SAFETY: valid elements.
+ unsafe { slice::from_raw_parts(slice.as_ptr() as _, self.len) }
+ }
+}
+
+impl<A: ArrayLike> ops::DerefMut for ArrayVec<A> {
+ fn deref_mut(&mut self) -> &mut [A::Item] {
+ let slice = &mut A::as_mut_slice(&mut self.storage);
+ debug_assert!(self.len <= slice.len());
+ // SAFETY: valid elements.
+ unsafe { slice::from_raw_parts_mut(slice.as_mut_ptr() as _, self.len) }
+ }
+}
+
+impl<A: ArrayLike> Clone for ArrayVec<A>
+where
+ A::Item: Clone,
+{
+ fn clone(&self) -> Self {
+ let mut new = Self::default();
+ for value in &**self {
+ new.try_push(value.clone()).unwrap();
+ }
+ new
+ }
+}
+
+impl<A: ArrayLike> PartialEq for ArrayVec<A>
+where
+ A::Item: PartialEq,
+{
+ fn eq(&self, other: &Self) -> bool {
+ **self == **other
+ }
+}
+
+impl<A: ArrayLike> Eq for ArrayVec<A> where A::Item: Eq {}
+
+impl<A: ArrayLike> fmt::Debug for ArrayVec<A>
+where
+ A::Item: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/read/value.rs b/vendor/gimli-0.26.2/src/read/value.rs
new file mode 100644
index 000000000..6f43ebb26
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/read/value.rs
@@ -0,0 +1,1621 @@
+//! Definitions for values used in DWARF expressions.
+
+use crate::constants;
+#[cfg(feature = "read")]
+use crate::read::{AttributeValue, DebuggingInformationEntry};
+use crate::read::{Error, Reader, Result};
+
+/// Convert a u64 to an i64, with sign extension if required.
+///
+/// This is primarily used when needing to treat `Value::Generic`
+/// as a signed value.
+#[inline]
+fn sign_extend(value: u64, mask: u64) -> i64 {
+ let value = (value & mask) as i64;
+ let sign = ((mask >> 1) + 1) as i64;
+ (value ^ sign).wrapping_sub(sign)
+}
+
+#[inline]
+fn mask_bit_size(addr_mask: u64) -> u32 {
+ 64 - addr_mask.leading_zeros()
+}
+
+/// The type of an entry on the DWARF stack.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum ValueType {
+ /// The generic type, which is address-sized and of unspecified sign,
+ /// as specified in the DWARF 5 standard, section 2.5.1.
+ /// This type is also used to represent address base types.
+ Generic,
+ /// Signed 8-bit integer type.
+ I8,
+ /// Unsigned 8-bit integer type.
+ U8,
+ /// Signed 16-bit integer type.
+ I16,
+ /// Unsigned 16-bit integer type.
+ U16,
+ /// Signed 32-bit integer type.
+ I32,
+ /// Unsigned 32-bit integer type.
+ U32,
+ /// Signed 64-bit integer type.
+ I64,
+ /// Unsigned 64-bit integer type.
+ U64,
+ /// 32-bit floating point type.
+ F32,
+ /// 64-bit floating point type.
+ F64,
+}
+
+/// The value of an entry on the DWARF stack.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Value {
+ /// A generic value, which is address-sized and of unspecified sign.
+ Generic(u64),
+ /// A signed 8-bit integer value.
+ I8(i8),
+ /// An unsigned 8-bit integer value.
+ U8(u8),
+ /// A signed 16-bit integer value.
+ I16(i16),
+ /// An unsigned 16-bit integer value.
+ U16(u16),
+ /// A signed 32-bit integer value.
+ I32(i32),
+ /// An unsigned 32-bit integer value.
+ U32(u32),
+ /// A signed 64-bit integer value.
+ I64(i64),
+ /// An unsigned 64-bit integer value.
+ U64(u64),
+ /// A 32-bit floating point value.
+ F32(f32),
+ /// A 64-bit floating point value.
+ F64(f64),
+}
+
+impl ValueType {
+ /// The size in bits of a value for this type.
+ pub fn bit_size(self, addr_mask: u64) -> u32 {
+ match self {
+ ValueType::Generic => mask_bit_size(addr_mask),
+ ValueType::I8 | ValueType::U8 => 8,
+ ValueType::I16 | ValueType::U16 => 16,
+ ValueType::I32 | ValueType::U32 | ValueType::F32 => 32,
+ ValueType::I64 | ValueType::U64 | ValueType::F64 => 64,
+ }
+ }
+
+ /// Construct a `ValueType` from the attributes of a base type DIE.
+ pub fn from_encoding(encoding: constants::DwAte, byte_size: u64) -> Option<ValueType> {
+ Some(match (encoding, byte_size) {
+ (constants::DW_ATE_signed, 1) => ValueType::I8,
+ (constants::DW_ATE_signed, 2) => ValueType::I16,
+ (constants::DW_ATE_signed, 4) => ValueType::I32,
+ (constants::DW_ATE_signed, 8) => ValueType::I64,
+ (constants::DW_ATE_unsigned, 1) => ValueType::U8,
+ (constants::DW_ATE_unsigned, 2) => ValueType::U16,
+ (constants::DW_ATE_unsigned, 4) => ValueType::U32,
+ (constants::DW_ATE_unsigned, 8) => ValueType::U64,
+ (constants::DW_ATE_float, 4) => ValueType::F32,
+ (constants::DW_ATE_float, 8) => ValueType::F64,
+ _ => return None,
+ })
+ }
+
+ /// Construct a `ValueType` from a base type DIE.
+ #[cfg(feature = "read")]
+ pub fn from_entry<R: Reader>(
+ entry: &DebuggingInformationEntry<R>,
+ ) -> Result<Option<ValueType>> {
+ if entry.tag() != constants::DW_TAG_base_type {
+ return Ok(None);
+ }
+ let mut encoding = None;
+ let mut byte_size = None;
+ let mut endianity = constants::DW_END_default;
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next()? {
+ match attr.name() {
+ constants::DW_AT_byte_size => byte_size = attr.udata_value(),
+ constants::DW_AT_encoding => {
+ if let AttributeValue::Encoding(x) = attr.value() {
+ encoding = Some(x);
+ }
+ }
+ constants::DW_AT_endianity => {
+ if let AttributeValue::Endianity(x) = attr.value() {
+ endianity = x;
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if endianity != constants::DW_END_default {
+ // TODO: we could check if it matches the reader endianity,
+ // but normally it would use DW_END_default in that case.
+ return Ok(None);
+ }
+
+ if let (Some(encoding), Some(byte_size)) = (encoding, byte_size) {
+ Ok(ValueType::from_encoding(encoding, byte_size))
+ } else {
+ Ok(None)
+ }
+ }
+}
+
+impl Value {
+ /// Return the `ValueType` corresponding to this `Value`.
+ pub fn value_type(&self) -> ValueType {
+ match *self {
+ Value::Generic(_) => ValueType::Generic,
+ Value::I8(_) => ValueType::I8,
+ Value::U8(_) => ValueType::U8,
+ Value::I16(_) => ValueType::I16,
+ Value::U16(_) => ValueType::U16,
+ Value::I32(_) => ValueType::I32,
+ Value::U32(_) => ValueType::U32,
+ Value::I64(_) => ValueType::I64,
+ Value::U64(_) => ValueType::U64,
+ Value::F32(_) => ValueType::F32,
+ Value::F64(_) => ValueType::F64,
+ }
+ }
+
+ /// Read a `Value` with the given `value_type` from a `Reader`.
+ pub fn parse<R: Reader>(value_type: ValueType, mut bytes: R) -> Result<Value> {
+ let value = match value_type {
+ ValueType::I8 => Value::I8(bytes.read_i8()?),
+ ValueType::U8 => Value::U8(bytes.read_u8()?),
+ ValueType::I16 => Value::I16(bytes.read_i16()?),
+ ValueType::U16 => Value::U16(bytes.read_u16()?),
+ ValueType::I32 => Value::I32(bytes.read_i32()?),
+ ValueType::U32 => Value::U32(bytes.read_u32()?),
+ ValueType::I64 => Value::I64(bytes.read_i64()?),
+ ValueType::U64 => Value::U64(bytes.read_u64()?),
+ ValueType::F32 => Value::F32(bytes.read_f32()?),
+ ValueType::F64 => Value::F64(bytes.read_f64()?),
+ _ => return Err(Error::UnsupportedTypeOperation),
+ };
+ Ok(value)
+ }
+
+ /// Convert a `Value` to a `u64`.
+ ///
+ /// The `ValueType` of `self` must be integral.
+ /// Values are sign extended if the source value is signed.
+ pub fn to_u64(self, addr_mask: u64) -> Result<u64> {
+ let value = match self {
+ Value::Generic(value) => value & addr_mask,
+ Value::I8(value) => value as u64,
+ Value::U8(value) => u64::from(value),
+ Value::I16(value) => value as u64,
+ Value::U16(value) => u64::from(value),
+ Value::I32(value) => value as u64,
+ Value::U32(value) => u64::from(value),
+ Value::I64(value) => value as u64,
+ Value::U64(value) => value as u64,
+ _ => return Err(Error::IntegralTypeRequired),
+ };
+ Ok(value)
+ }
+
+ /// Create a `Value` with the given `value_type` from a `u64` value.
+ ///
+ /// The `value_type` may be integral or floating point.
+ /// The result is truncated if the `u64` value does
+ /// not fit the bounds of the `value_type`.
+ pub fn from_u64(value_type: ValueType, value: u64) -> Result<Value> {
+ let value = match value_type {
+ ValueType::Generic => Value::Generic(value),
+ ValueType::I8 => Value::I8(value as i8),
+ ValueType::U8 => Value::U8(value as u8),
+ ValueType::I16 => Value::I16(value as i16),
+ ValueType::U16 => Value::U16(value as u16),
+ ValueType::I32 => Value::I32(value as i32),
+ ValueType::U32 => Value::U32(value as u32),
+ ValueType::I64 => Value::I64(value as i64),
+ ValueType::U64 => Value::U64(value),
+ ValueType::F32 => Value::F32(value as f32),
+ ValueType::F64 => Value::F64(value as f64),
+ };
+ Ok(value)
+ }
+
+ /// Create a `Value` with the given `value_type` from a `f32` value.
+ ///
+ /// The `value_type` may be integral or floating point.
+ /// The result is not defined if the `f32` value does
+ /// not fit the bounds of the `value_type`.
+ fn from_f32(value_type: ValueType, value: f32) -> Result<Value> {
+ let value = match value_type {
+ ValueType::Generic => Value::Generic(value as u64),
+ ValueType::I8 => Value::I8(value as i8),
+ ValueType::U8 => Value::U8(value as u8),
+ ValueType::I16 => Value::I16(value as i16),
+ ValueType::U16 => Value::U16(value as u16),
+ ValueType::I32 => Value::I32(value as i32),
+ ValueType::U32 => Value::U32(value as u32),
+ ValueType::I64 => Value::I64(value as i64),
+ ValueType::U64 => Value::U64(value as u64),
+ ValueType::F32 => Value::F32(value),
+ ValueType::F64 => Value::F64(f64::from(value)),
+ };
+ Ok(value)
+ }
+
+ /// Create a `Value` with the given `value_type` from a `f64` value.
+ ///
+ /// The `value_type` may be integral or floating point.
+ /// The result is not defined if the `f64` value does
+ /// not fit the bounds of the `value_type`.
+ fn from_f64(value_type: ValueType, value: f64) -> Result<Value> {
+ let value = match value_type {
+ ValueType::Generic => Value::Generic(value as u64),
+ ValueType::I8 => Value::I8(value as i8),
+ ValueType::U8 => Value::U8(value as u8),
+ ValueType::I16 => Value::I16(value as i16),
+ ValueType::U16 => Value::U16(value as u16),
+ ValueType::I32 => Value::I32(value as i32),
+ ValueType::U32 => Value::U32(value as u32),
+ ValueType::I64 => Value::I64(value as i64),
+ ValueType::U64 => Value::U64(value as u64),
+ ValueType::F32 => Value::F32(value as f32),
+ ValueType::F64 => Value::F64(value),
+ };
+ Ok(value)
+ }
+
+ /// Convert a `Value` to the given `value_type`.
+ ///
+ /// When converting between integral types, the result is truncated
+ /// if the source value does not fit the bounds of the `value_type`.
+ /// When converting from floating point types, the result is not defined
+ /// if the source value does not fit the bounds of the `value_type`.
+ ///
+ /// This corresponds to the DWARF `DW_OP_convert` operation.
+ pub fn convert(self, value_type: ValueType, addr_mask: u64) -> Result<Value> {
+ match self {
+ Value::F32(value) => Value::from_f32(value_type, value),
+ Value::F64(value) => Value::from_f64(value_type, value),
+ _ => Value::from_u64(value_type, self.to_u64(addr_mask)?),
+ }
+ }
+
+ /// Reinterpret the bits in a `Value` as the given `value_type`.
+ ///
+ /// The source and result value types must have equal sizes.
+ ///
+ /// This corresponds to the DWARF `DW_OP_reinterpret` operation.
+ pub fn reinterpret(self, value_type: ValueType, addr_mask: u64) -> Result<Value> {
+ if self.value_type().bit_size(addr_mask) != value_type.bit_size(addr_mask) {
+ return Err(Error::TypeMismatch);
+ }
+ let bits = match self {
+ Value::Generic(value) => value,
+ Value::I8(value) => value as u64,
+ Value::U8(value) => u64::from(value),
+ Value::I16(value) => value as u64,
+ Value::U16(value) => u64::from(value),
+ Value::I32(value) => value as u64,
+ Value::U32(value) => u64::from(value),
+ Value::I64(value) => value as u64,
+ Value::U64(value) => value,
+ Value::F32(value) => u64::from(f32::to_bits(value)),
+ Value::F64(value) => f64::to_bits(value),
+ };
+ let value = match value_type {
+ ValueType::Generic => Value::Generic(bits),
+ ValueType::I8 => Value::I8(bits as i8),
+ ValueType::U8 => Value::U8(bits as u8),
+ ValueType::I16 => Value::I16(bits as i16),
+ ValueType::U16 => Value::U16(bits as u16),
+ ValueType::I32 => Value::I32(bits as i32),
+ ValueType::U32 => Value::U32(bits as u32),
+ ValueType::I64 => Value::I64(bits as i64),
+ ValueType::U64 => Value::U64(bits),
+ ValueType::F32 => Value::F32(f32::from_bits(bits as u32)),
+ ValueType::F64 => Value::F64(f64::from_bits(bits)),
+ };
+ Ok(value)
+ }
+
+ /// Perform an absolute value operation.
+ ///
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_abs` operation.
+ pub fn abs(self, addr_mask: u64) -> Result<Value> {
+ // wrapping_abs() can be used because DWARF specifies that the result is undefined
+ // for negative minimal values.
+ let value = match self {
+ Value::Generic(value) => {
+ Value::Generic(sign_extend(value, addr_mask).wrapping_abs() as u64)
+ }
+ Value::I8(value) => Value::I8(value.wrapping_abs()),
+ Value::I16(value) => Value::I16(value.wrapping_abs()),
+ Value::I32(value) => Value::I32(value.wrapping_abs()),
+ Value::I64(value) => Value::I64(value.wrapping_abs()),
+ // f32/f64::abs() is not available in libcore
+ Value::F32(value) => Value::F32(if value < 0. { -value } else { value }),
+ Value::F64(value) => Value::F64(if value < 0. { -value } else { value }),
+ Value::U8(_) | Value::U16(_) | Value::U32(_) | Value::U64(_) => self,
+ };
+ Ok(value)
+ }
+
+ /// Perform a negation operation.
+ ///
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_neg` operation.
+ pub fn neg(self, addr_mask: u64) -> Result<Value> {
+ // wrapping_neg() can be used because DWARF specifies that the result is undefined
+ // for negative minimal values.
+ let value = match self {
+ Value::Generic(value) => {
+ Value::Generic(sign_extend(value, addr_mask).wrapping_neg() as u64)
+ }
+ Value::I8(value) => Value::I8(value.wrapping_neg()),
+ Value::I16(value) => Value::I16(value.wrapping_neg()),
+ Value::I32(value) => Value::I32(value.wrapping_neg()),
+ Value::I64(value) => Value::I64(value.wrapping_neg()),
+ Value::F32(value) => Value::F32(-value),
+ Value::F64(value) => Value::F64(-value),
+ // It's unclear if these should implicity convert to a signed value.
+ // For now, we don't support them.
+ Value::U8(_) | Value::U16(_) | Value::U32(_) | Value::U64(_) => {
+ return Err(Error::UnsupportedTypeOperation);
+ }
+ };
+ Ok(value)
+ }
+
+ /// Perform an addition operation.
+ ///
+ /// This operation requires matching types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_plus` operation.
+ pub fn add(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ Value::Generic(v1.wrapping_add(v2) & addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => Value::I8(v1.wrapping_add(v2)),
+ (Value::U8(v1), Value::U8(v2)) => Value::U8(v1.wrapping_add(v2)),
+ (Value::I16(v1), Value::I16(v2)) => Value::I16(v1.wrapping_add(v2)),
+ (Value::U16(v1), Value::U16(v2)) => Value::U16(v1.wrapping_add(v2)),
+ (Value::I32(v1), Value::I32(v2)) => Value::I32(v1.wrapping_add(v2)),
+ (Value::U32(v1), Value::U32(v2)) => Value::U32(v1.wrapping_add(v2)),
+ (Value::I64(v1), Value::I64(v2)) => Value::I64(v1.wrapping_add(v2)),
+ (Value::U64(v1), Value::U64(v2)) => Value::U64(v1.wrapping_add(v2)),
+ (Value::F32(v1), Value::F32(v2)) => Value::F32(v1 + v2),
+ (Value::F64(v1), Value::F64(v2)) => Value::F64(v1 + v2),
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(value)
+ }
+
+ /// Perform a subtraction operation.
+ ///
+ /// This operation requires matching types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_minus` operation.
+ pub fn sub(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ Value::Generic(v1.wrapping_sub(v2) & addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => Value::I8(v1.wrapping_sub(v2)),
+ (Value::U8(v1), Value::U8(v2)) => Value::U8(v1.wrapping_sub(v2)),
+ (Value::I16(v1), Value::I16(v2)) => Value::I16(v1.wrapping_sub(v2)),
+ (Value::U16(v1), Value::U16(v2)) => Value::U16(v1.wrapping_sub(v2)),
+ (Value::I32(v1), Value::I32(v2)) => Value::I32(v1.wrapping_sub(v2)),
+ (Value::U32(v1), Value::U32(v2)) => Value::U32(v1.wrapping_sub(v2)),
+ (Value::I64(v1), Value::I64(v2)) => Value::I64(v1.wrapping_sub(v2)),
+ (Value::U64(v1), Value::U64(v2)) => Value::U64(v1.wrapping_sub(v2)),
+ (Value::F32(v1), Value::F32(v2)) => Value::F32(v1 - v2),
+ (Value::F64(v1), Value::F64(v2)) => Value::F64(v1 - v2),
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(value)
+ }
+
+ /// Perform a multiplication operation.
+ ///
+ /// This operation requires matching types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_mul` operation.
+ pub fn mul(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ Value::Generic(v1.wrapping_mul(v2) & addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => Value::I8(v1.wrapping_mul(v2)),
+ (Value::U8(v1), Value::U8(v2)) => Value::U8(v1.wrapping_mul(v2)),
+ (Value::I16(v1), Value::I16(v2)) => Value::I16(v1.wrapping_mul(v2)),
+ (Value::U16(v1), Value::U16(v2)) => Value::U16(v1.wrapping_mul(v2)),
+ (Value::I32(v1), Value::I32(v2)) => Value::I32(v1.wrapping_mul(v2)),
+ (Value::U32(v1), Value::U32(v2)) => Value::U32(v1.wrapping_mul(v2)),
+ (Value::I64(v1), Value::I64(v2)) => Value::I64(v1.wrapping_mul(v2)),
+ (Value::U64(v1), Value::U64(v2)) => Value::U64(v1.wrapping_mul(v2)),
+ (Value::F32(v1), Value::F32(v2)) => Value::F32(v1 * v2),
+ (Value::F64(v1), Value::F64(v2)) => Value::F64(v1 * v2),
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(value)
+ }
+
+ /// Perform a division operation.
+ ///
+ /// This operation requires matching types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_div` operation.
+ pub fn div(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ match rhs {
+ Value::Generic(v2) if sign_extend(v2, addr_mask) == 0 => {
+ return Err(Error::DivisionByZero);
+ }
+ Value::I8(0)
+ | Value::U8(0)
+ | Value::I16(0)
+ | Value::U16(0)
+ | Value::I32(0)
+ | Value::U32(0)
+ | Value::I64(0)
+ | Value::U64(0) => {
+ return Err(Error::DivisionByZero);
+ }
+ _ => {}
+ }
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ // Signed division
+ Value::Generic(
+ sign_extend(v1, addr_mask).wrapping_div(sign_extend(v2, addr_mask)) as u64,
+ )
+ }
+ (Value::I8(v1), Value::I8(v2)) => Value::I8(v1.wrapping_div(v2)),
+ (Value::U8(v1), Value::U8(v2)) => Value::U8(v1.wrapping_div(v2)),
+ (Value::I16(v1), Value::I16(v2)) => Value::I16(v1.wrapping_div(v2)),
+ (Value::U16(v1), Value::U16(v2)) => Value::U16(v1.wrapping_div(v2)),
+ (Value::I32(v1), Value::I32(v2)) => Value::I32(v1.wrapping_div(v2)),
+ (Value::U32(v1), Value::U32(v2)) => Value::U32(v1.wrapping_div(v2)),
+ (Value::I64(v1), Value::I64(v2)) => Value::I64(v1.wrapping_div(v2)),
+ (Value::U64(v1), Value::U64(v2)) => Value::U64(v1.wrapping_div(v2)),
+ (Value::F32(v1), Value::F32(v2)) => Value::F32(v1 / v2),
+ (Value::F64(v1), Value::F64(v2)) => Value::F64(v1 / v2),
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(value)
+ }
+
+ /// Perform a remainder operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as an unsigned value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_mod` operation.
+ pub fn rem(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ match rhs {
+ Value::Generic(rhs) if (rhs & addr_mask) == 0 => {
+ return Err(Error::DivisionByZero);
+ }
+ Value::I8(0)
+ | Value::U8(0)
+ | Value::I16(0)
+ | Value::U16(0)
+ | Value::I32(0)
+ | Value::U32(0)
+ | Value::I64(0)
+ | Value::U64(0) => {
+ return Err(Error::DivisionByZero);
+ }
+ _ => {}
+ }
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ // Unsigned modulus
+ Value::Generic((v1 & addr_mask).wrapping_rem(v2 & addr_mask))
+ }
+ (Value::I8(v1), Value::I8(v2)) => Value::I8(v1.wrapping_rem(v2)),
+ (Value::U8(v1), Value::U8(v2)) => Value::U8(v1.wrapping_rem(v2)),
+ (Value::I16(v1), Value::I16(v2)) => Value::I16(v1.wrapping_rem(v2)),
+ (Value::U16(v1), Value::U16(v2)) => Value::U16(v1.wrapping_rem(v2)),
+ (Value::I32(v1), Value::I32(v2)) => Value::I32(v1.wrapping_rem(v2)),
+ (Value::U32(v1), Value::U32(v2)) => Value::U32(v1.wrapping_rem(v2)),
+ (Value::I64(v1), Value::I64(v2)) => Value::I64(v1.wrapping_rem(v2)),
+ (Value::U64(v1), Value::U64(v2)) => Value::U64(v1.wrapping_rem(v2)),
+ (Value::F32(_), Value::F32(_)) => return Err(Error::IntegralTypeRequired),
+ (Value::F64(_), Value::F64(_)) => return Err(Error::IntegralTypeRequired),
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(value)
+ }
+
+ /// Perform a bitwise not operation.
+ ///
+ /// This operation requires matching integral types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_not` operation.
+ pub fn not(self, addr_mask: u64) -> Result<Value> {
+ let value_type = self.value_type();
+ let v = self.to_u64(addr_mask)?;
+ Value::from_u64(value_type, !v)
+ }
+
+ /// Perform a bitwise and operation.
+ ///
+ /// This operation requires matching integral types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_and` operation.
+ pub fn and(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value_type = self.value_type();
+ if value_type != rhs.value_type() {
+ return Err(Error::TypeMismatch);
+ }
+ let v1 = self.to_u64(addr_mask)?;
+ let v2 = rhs.to_u64(addr_mask)?;
+ Value::from_u64(value_type, v1 & v2)
+ }
+
+ /// Perform a bitwise or operation.
+ ///
+ /// This operation requires matching integral types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_or` operation.
+ pub fn or(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value_type = self.value_type();
+ if value_type != rhs.value_type() {
+ return Err(Error::TypeMismatch);
+ }
+ let v1 = self.to_u64(addr_mask)?;
+ let v2 = rhs.to_u64(addr_mask)?;
+ Value::from_u64(value_type, v1 | v2)
+ }
+
+ /// Perform a bitwise exclusive-or operation.
+ ///
+ /// This operation requires matching integral types.
+ ///
+ /// This corresponds to the DWARF `DW_OP_xor` operation.
+ pub fn xor(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value_type = self.value_type();
+ if value_type != rhs.value_type() {
+ return Err(Error::TypeMismatch);
+ }
+ let v1 = self.to_u64(addr_mask)?;
+ let v2 = rhs.to_u64(addr_mask)?;
+ Value::from_u64(value_type, v1 ^ v2)
+ }
+
+ /// Convert value to bit length suitable for a shift operation.
+ ///
+ /// If the value is negative then an error is returned.
+ fn shift_length(self) -> Result<u64> {
+ let value = match self {
+ Value::Generic(value) => value,
+ Value::I8(value) if value >= 0 => value as u64,
+ Value::U8(value) => u64::from(value),
+ Value::I16(value) if value >= 0 => value as u64,
+ Value::U16(value) => u64::from(value),
+ Value::I32(value) if value >= 0 => value as u64,
+ Value::U32(value) => u64::from(value),
+ Value::I64(value) if value >= 0 => value as u64,
+ Value::U64(value) => value,
+ _ => return Err(Error::InvalidShiftExpression),
+ };
+ Ok(value)
+ }
+
+ /// Perform a shift left operation.
+ ///
+ /// This operation requires integral types.
+ /// If the shift length exceeds the type size, then 0 is returned.
+ /// If the shift length is negative then an error is returned.
+ ///
+ /// This corresponds to the DWARF `DW_OP_shl` operation.
+ pub fn shl(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let v2 = rhs.shift_length()?;
+ let value = match self {
+ Value::Generic(v1) => Value::Generic(if v2 >= u64::from(mask_bit_size(addr_mask)) {
+ 0
+ } else {
+ (v1 & addr_mask) << v2
+ }),
+ Value::I8(v1) => Value::I8(if v2 >= 8 { 0 } else { v1 << v2 }),
+ Value::U8(v1) => Value::U8(if v2 >= 8 { 0 } else { v1 << v2 }),
+ Value::I16(v1) => Value::I16(if v2 >= 16 { 0 } else { v1 << v2 }),
+ Value::U16(v1) => Value::U16(if v2 >= 16 { 0 } else { v1 << v2 }),
+ Value::I32(v1) => Value::I32(if v2 >= 32 { 0 } else { v1 << v2 }),
+ Value::U32(v1) => Value::U32(if v2 >= 32 { 0 } else { v1 << v2 }),
+ Value::I64(v1) => Value::I64(if v2 >= 64 { 0 } else { v1 << v2 }),
+ Value::U64(v1) => Value::U64(if v2 >= 64 { 0 } else { v1 << v2 }),
+ _ => return Err(Error::IntegralTypeRequired),
+ };
+ Ok(value)
+ }
+
+ /// Perform a logical shift right operation.
+ ///
+ /// This operation requires an unsigned integral type for the value.
+ /// If the value type is `Generic`, then it is interpreted as an unsigned value.
+ ///
+ /// This operation requires an integral type for the shift length.
+ /// If the shift length exceeds the type size, then 0 is returned.
+ /// If the shift length is negative then an error is returned.
+ ///
+ /// This corresponds to the DWARF `DW_OP_shr` operation.
+ pub fn shr(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let v2 = rhs.shift_length()?;
+ let value = match self {
+ Value::Generic(v1) => Value::Generic(if v2 >= u64::from(mask_bit_size(addr_mask)) {
+ 0
+ } else {
+ (v1 & addr_mask) >> v2
+ }),
+ Value::U8(v1) => Value::U8(if v2 >= 8 { 0 } else { v1 >> v2 }),
+ Value::U16(v1) => Value::U16(if v2 >= 16 { 0 } else { v1 >> v2 }),
+ Value::U32(v1) => Value::U32(if v2 >= 32 { 0 } else { v1 >> v2 }),
+ Value::U64(v1) => Value::U64(if v2 >= 64 { 0 } else { v1 >> v2 }),
+ // It's unclear if signed values should implicity convert to an unsigned value.
+ // For now, we don't support them.
+ Value::I8(_) | Value::I16(_) | Value::I32(_) | Value::I64(_) => {
+ return Err(Error::UnsupportedTypeOperation);
+ }
+ _ => return Err(Error::IntegralTypeRequired),
+ };
+ Ok(value)
+ }
+
+ /// Perform an arithmetic shift right operation.
+ ///
+ /// This operation requires a signed integral type for the value.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This operation requires an integral type for the shift length.
+ /// If the shift length exceeds the type size, then 0 is returned for positive values,
+ /// and -1 is returned for negative values.
+ /// If the shift length is negative then an error is returned.
+ ///
+ /// This corresponds to the DWARF `DW_OP_shra` operation.
+ pub fn shra(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let v2 = rhs.shift_length()?;
+ let value = match self {
+ Value::Generic(v1) => {
+ let v1 = sign_extend(v1, addr_mask);
+ let value = if v2 >= u64::from(mask_bit_size(addr_mask)) {
+ if v1 < 0 {
+ !0
+ } else {
+ 0
+ }
+ } else {
+ (v1 >> v2) as u64
+ };
+ Value::Generic(value)
+ }
+ Value::I8(v1) => Value::I8(if v2 >= 8 {
+ if v1 < 0 {
+ !0
+ } else {
+ 0
+ }
+ } else {
+ v1 >> v2
+ }),
+ Value::I16(v1) => Value::I16(if v2 >= 16 {
+ if v1 < 0 {
+ !0
+ } else {
+ 0
+ }
+ } else {
+ v1 >> v2
+ }),
+ Value::I32(v1) => Value::I32(if v2 >= 32 {
+ if v1 < 0 {
+ !0
+ } else {
+ 0
+ }
+ } else {
+ v1 >> v2
+ }),
+ Value::I64(v1) => Value::I64(if v2 >= 64 {
+ if v1 < 0 {
+ !0
+ } else {
+ 0
+ }
+ } else {
+ v1 >> v2
+ }),
+ // It's unclear if unsigned values should implicity convert to a signed value.
+ // For now, we don't support them.
+ Value::U8(_) | Value::U16(_) | Value::U32(_) | Value::U64(_) => {
+ return Err(Error::UnsupportedTypeOperation);
+ }
+ _ => return Err(Error::IntegralTypeRequired),
+ };
+ Ok(value)
+ }
+
+ /// Perform the `==` relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_eq` operation.
+ pub fn eq(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) == sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 == v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 == v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 == v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 == v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 == v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 == v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 == v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 == v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 == v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 == v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+
+ /// Perform the `>=` relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_ge` operation.
+ pub fn ge(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) >= sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 >= v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 >= v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 >= v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 >= v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 >= v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 >= v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 >= v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 >= v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 >= v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 >= v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+
+ /// Perform the `>` relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_gt` operation.
+ pub fn gt(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) > sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 > v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 > v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 > v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 > v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 > v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 > v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 > v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 > v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 > v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 > v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+
+ /// Perform the `<= relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_le` operation.
+ pub fn le(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) <= sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 <= v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 <= v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 <= v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 <= v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 <= v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 <= v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 <= v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 <= v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 <= v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 <= v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+
+ /// Perform the `< relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_lt` operation.
+ pub fn lt(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) < sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 < v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 < v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 < v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 < v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 < v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 < v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 < v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 < v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 < v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 < v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+
+ /// Perform the `!= relational operation.
+ ///
+ /// This operation requires matching integral types.
+ /// If the value type is `Generic`, then it is interpreted as a signed value.
+ ///
+ /// This corresponds to the DWARF `DW_OP_ne` operation.
+ pub fn ne(self, rhs: Value, addr_mask: u64) -> Result<Value> {
+ let value = match (self, rhs) {
+ (Value::Generic(v1), Value::Generic(v2)) => {
+ sign_extend(v1, addr_mask) != sign_extend(v2, addr_mask)
+ }
+ (Value::I8(v1), Value::I8(v2)) => v1 != v2,
+ (Value::U8(v1), Value::U8(v2)) => v1 != v2,
+ (Value::I16(v1), Value::I16(v2)) => v1 != v2,
+ (Value::U16(v1), Value::U16(v2)) => v1 != v2,
+ (Value::I32(v1), Value::I32(v2)) => v1 != v2,
+ (Value::U32(v1), Value::U32(v2)) => v1 != v2,
+ (Value::I64(v1), Value::I64(v2)) => v1 != v2,
+ (Value::U64(v1), Value::U64(v2)) => v1 != v2,
+ (Value::F32(v1), Value::F32(v2)) => v1 != v2,
+ (Value::F64(v1), Value::F64(v2)) => v1 != v2,
+ _ => return Err(Error::TypeMismatch),
+ };
+ Ok(Value::Generic(value as u64))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::common::{DebugAbbrevOffset, DebugInfoOffset, Encoding, Format};
+ use crate::endianity::LittleEndian;
+ use crate::read::{
+ Abbreviation, AttributeSpecification, DebuggingInformationEntry, EndianSlice, UnitHeader,
+ UnitOffset, UnitType,
+ };
+
+ #[test]
+ #[rustfmt::skip]
+ fn valuetype_from_encoding() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 4,
+ };
+ let unit = UnitHeader::new(
+ encoding,
+ 7,
+ UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ EndianSlice::new(&[], LittleEndian),
+ );
+
+ let abbrev = Abbreviation::new(
+ 42,
+ constants::DW_TAG_base_type,
+ constants::DW_CHILDREN_no,
+ vec![
+ AttributeSpecification::new(
+ constants::DW_AT_byte_size,
+ constants::DW_FORM_udata,
+ None,
+ ),
+ AttributeSpecification::new(
+ constants::DW_AT_encoding,
+ constants::DW_FORM_udata,
+ None,
+ ),
+ AttributeSpecification::new(
+ constants::DW_AT_endianity,
+ constants::DW_FORM_udata,
+ None,
+ ),
+ ].into(),
+ );
+
+ for &(attrs, result) in &[
+ ([0x01, constants::DW_ATE_signed.0, constants::DW_END_default.0], ValueType::I8),
+ ([0x02, constants::DW_ATE_signed.0, constants::DW_END_default.0], ValueType::I16),
+ ([0x04, constants::DW_ATE_signed.0, constants::DW_END_default.0], ValueType::I32),
+ ([0x08, constants::DW_ATE_signed.0, constants::DW_END_default.0], ValueType::I64),
+ ([0x01, constants::DW_ATE_unsigned.0, constants::DW_END_default.0], ValueType::U8),
+ ([0x02, constants::DW_ATE_unsigned.0, constants::DW_END_default.0], ValueType::U16),
+ ([0x04, constants::DW_ATE_unsigned.0, constants::DW_END_default.0], ValueType::U32),
+ ([0x08, constants::DW_ATE_unsigned.0, constants::DW_END_default.0], ValueType::U64),
+ ([0x04, constants::DW_ATE_float.0, constants::DW_END_default.0], ValueType::F32),
+ ([0x08, constants::DW_ATE_float.0, constants::DW_END_default.0], ValueType::F64),
+ ] {
+ let entry = DebuggingInformationEntry::new(
+ UnitOffset(0),
+ EndianSlice::new(&attrs, LittleEndian),
+ &abbrev,
+ &unit,
+ );
+ assert_eq!(ValueType::from_entry(&entry), Ok(Some(result)));
+ }
+
+ for attrs in &[
+ [0x03, constants::DW_ATE_signed.0, constants::DW_END_default.0],
+ [0x02, constants::DW_ATE_signed.0, constants::DW_END_big.0],
+ ] {
+ let entry = DebuggingInformationEntry::new(
+ UnitOffset(0),
+ EndianSlice::new(attrs, LittleEndian),
+ &abbrev,
+ &unit,
+ );
+ assert_eq!(ValueType::from_entry(&entry), Ok(None));
+ }
+ }
+
+ #[test]
+ fn value_convert() {
+ let addr_mask = !0 >> 32;
+ for &(v, t, result) in &[
+ (Value::Generic(1), ValueType::I8, Ok(Value::I8(1))),
+ (Value::I8(1), ValueType::U8, Ok(Value::U8(1))),
+ (Value::U8(1), ValueType::I16, Ok(Value::I16(1))),
+ (Value::I16(1), ValueType::U16, Ok(Value::U16(1))),
+ (Value::U16(1), ValueType::I32, Ok(Value::I32(1))),
+ (Value::I32(1), ValueType::U32, Ok(Value::U32(1))),
+ (Value::U32(1), ValueType::F32, Ok(Value::F32(1.))),
+ (Value::F32(1.), ValueType::I64, Ok(Value::I64(1))),
+ (Value::I64(1), ValueType::U64, Ok(Value::U64(1))),
+ (Value::U64(1), ValueType::F64, Ok(Value::F64(1.))),
+ (Value::F64(1.), ValueType::Generic, Ok(Value::Generic(1))),
+ ] {
+ assert_eq!(v.convert(t, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_reinterpret() {
+ let addr_mask = !0 >> 32;
+ for &(v, t, result) in &[
+ // 8-bit
+ (Value::I8(-1), ValueType::U8, Ok(Value::U8(0xff))),
+ (Value::U8(0xff), ValueType::I8, Ok(Value::I8(-1))),
+ // 16-bit
+ (Value::I16(1), ValueType::U16, Ok(Value::U16(1))),
+ (Value::U16(1), ValueType::I16, Ok(Value::I16(1))),
+ // 32-bit
+ (Value::Generic(1), ValueType::I32, Ok(Value::I32(1))),
+ (Value::I32(1), ValueType::U32, Ok(Value::U32(1))),
+ (Value::U32(0x3f80_0000), ValueType::F32, Ok(Value::F32(1.0))),
+ (Value::F32(1.0), ValueType::Generic, Ok(Value::Generic(0x3f80_0000))),
+ // Type mismatches
+ (Value::Generic(1), ValueType::U8, Err(Error::TypeMismatch)),
+ (Value::U8(1), ValueType::U16, Err(Error::TypeMismatch)),
+ (Value::U16(1), ValueType::U32, Err(Error::TypeMismatch)),
+ (Value::U32(1), ValueType::U64, Err(Error::TypeMismatch)),
+ (Value::U64(1), ValueType::Generic, Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v.reinterpret(t, addr_mask), result);
+ }
+
+ let addr_mask = !0;
+ for &(v, t, result) in &[
+ // 64-bit
+ (Value::Generic(1), ValueType::I64, Ok(Value::I64(1))),
+ (Value::I64(1), ValueType::U64, Ok(Value::U64(1))),
+ (Value::U64(0x3ff0_0000_0000_0000), ValueType::F64, Ok(Value::F64(1.0))),
+ (Value::F64(1.0), ValueType::Generic, Ok(Value::Generic(0x3ff0_0000_0000_0000))),
+ ] {
+ assert_eq!(v.reinterpret(t, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_abs() {
+ let addr_mask = 0xffff_ffff;
+ for &(v, result) in &[
+ (Value::Generic(0xffff_ffff), Ok(Value::Generic(1))),
+ (Value::I8(-1), Ok(Value::I8(1))),
+ (Value::U8(1), Ok(Value::U8(1))),
+ (Value::I16(-1), Ok(Value::I16(1))),
+ (Value::U16(1), Ok(Value::U16(1))),
+ (Value::I32(-1), Ok(Value::I32(1))),
+ (Value::U32(1), Ok(Value::U32(1))),
+ (Value::I64(-1), Ok(Value::I64(1))),
+ (Value::U64(1), Ok(Value::U64(1))),
+ (Value::F32(-1.), Ok(Value::F32(1.))),
+ (Value::F64(-1.), Ok(Value::F64(1.))),
+ ] {
+ assert_eq!(v.abs(addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_neg() {
+ let addr_mask = 0xffff_ffff;
+ for &(v, result) in &[
+ (Value::Generic(0xffff_ffff), Ok(Value::Generic(1))),
+ (Value::I8(1), Ok(Value::I8(-1))),
+ (Value::U8(1), Err(Error::UnsupportedTypeOperation)),
+ (Value::I16(1), Ok(Value::I16(-1))),
+ (Value::U16(1), Err(Error::UnsupportedTypeOperation)),
+ (Value::I32(1), Ok(Value::I32(-1))),
+ (Value::U32(1), Err(Error::UnsupportedTypeOperation)),
+ (Value::I64(1), Ok(Value::I64(-1))),
+ (Value::U64(1), Err(Error::UnsupportedTypeOperation)),
+ (Value::F32(1.), Ok(Value::F32(-1.))),
+ (Value::F64(1.), Ok(Value::F64(-1.))),
+ ] {
+ assert_eq!(v.neg(addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_add() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(1), Value::Generic(2), Ok(Value::Generic(3))),
+ (Value::I8(-1), Value::I8(2), Ok(Value::I8(1))),
+ (Value::U8(1), Value::U8(2), Ok(Value::U8(3))),
+ (Value::I16(-1), Value::I16(2), Ok(Value::I16(1))),
+ (Value::U16(1), Value::U16(2), Ok(Value::U16(3))),
+ (Value::I32(-1), Value::I32(2), Ok(Value::I32(1))),
+ (Value::U32(1), Value::U32(2), Ok(Value::U32(3))),
+ (Value::I64(-1), Value::I64(2), Ok(Value::I64(1))),
+ (Value::U64(1), Value::U64(2), Ok(Value::U64(3))),
+ (Value::F32(-1.), Value::F32(2.), Ok(Value::F32(1.))),
+ (Value::F64(-1.), Value::F64(2.), Ok(Value::F64(1.))),
+ (Value::Generic(1), Value::U32(2), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.add(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_sub() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(2), Ok(Value::Generic(1))),
+ (Value::I8(-1), Value::I8(2), Ok(Value::I8(-3))),
+ (Value::U8(3), Value::U8(2), Ok(Value::U8(1))),
+ (Value::I16(-1), Value::I16(2), Ok(Value::I16(-3))),
+ (Value::U16(3), Value::U16(2), Ok(Value::U16(1))),
+ (Value::I32(-1), Value::I32(2), Ok(Value::I32(-3))),
+ (Value::U32(3), Value::U32(2), Ok(Value::U32(1))),
+ (Value::I64(-1), Value::I64(2), Ok(Value::I64(-3))),
+ (Value::U64(3), Value::U64(2), Ok(Value::U64(1))),
+ (Value::F32(-1.), Value::F32(2.), Ok(Value::F32(-3.))),
+ (Value::F64(-1.), Value::F64(2.), Ok(Value::F64(-3.))),
+ (Value::Generic(3), Value::U32(2), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.sub(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_mul() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(2), Value::Generic(3), Ok(Value::Generic(6))),
+ (Value::I8(-2), Value::I8(3), Ok(Value::I8(-6))),
+ (Value::U8(2), Value::U8(3), Ok(Value::U8(6))),
+ (Value::I16(-2), Value::I16(3), Ok(Value::I16(-6))),
+ (Value::U16(2), Value::U16(3), Ok(Value::U16(6))),
+ (Value::I32(-2), Value::I32(3), Ok(Value::I32(-6))),
+ (Value::U32(2), Value::U32(3), Ok(Value::U32(6))),
+ (Value::I64(-2), Value::I64(3), Ok(Value::I64(-6))),
+ (Value::U64(2), Value::U64(3), Ok(Value::U64(6))),
+ (Value::F32(-2.), Value::F32(3.), Ok(Value::F32(-6.))),
+ (Value::F64(-2.), Value::F64(3.), Ok(Value::F64(-6.))),
+ (Value::Generic(2), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.mul(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_div() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(6), Value::Generic(3), Ok(Value::Generic(2))),
+ (Value::I8(-6), Value::I8(3), Ok(Value::I8(-2))),
+ (Value::U8(6), Value::U8(3), Ok(Value::U8(2))),
+ (Value::I16(-6), Value::I16(3), Ok(Value::I16(-2))),
+ (Value::U16(6), Value::U16(3), Ok(Value::U16(2))),
+ (Value::I32(-6), Value::I32(3), Ok(Value::I32(-2))),
+ (Value::U32(6), Value::U32(3), Ok(Value::U32(2))),
+ (Value::I64(-6), Value::I64(3), Ok(Value::I64(-2))),
+ (Value::U64(6), Value::U64(3), Ok(Value::U64(2))),
+ (Value::F32(-6.), Value::F32(3.), Ok(Value::F32(-2.))),
+ (Value::F64(-6.), Value::F64(3.), Ok(Value::F64(-2.))),
+ (Value::Generic(6), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.div(v2, addr_mask), result);
+ }
+ for &(v1, v2, result) in &[
+ (Value::Generic(6), Value::Generic(0), Err(Error::DivisionByZero)),
+ (Value::I8(-6), Value::I8(0), Err(Error::DivisionByZero)),
+ (Value::U8(6), Value::U8(0), Err(Error::DivisionByZero)),
+ (Value::I16(-6), Value::I16(0), Err(Error::DivisionByZero)),
+ (Value::U16(6), Value::U16(0), Err(Error::DivisionByZero)),
+ (Value::I32(-6), Value::I32(0), Err(Error::DivisionByZero)),
+ (Value::U32(6), Value::U32(0), Err(Error::DivisionByZero)),
+ (Value::I64(-6), Value::I64(0), Err(Error::DivisionByZero)),
+ (Value::U64(6), Value::U64(0), Err(Error::DivisionByZero)),
+ (Value::F32(-6.), Value::F32(0.), Ok(Value::F32(-6. / 0.))),
+ (Value::F64(-6.), Value::F64(0.), Ok(Value::F64(-6. / 0.))),
+ ] {
+ assert_eq!(v1.div(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_rem() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(2), Ok(Value::Generic(1))),
+ (Value::I8(-3), Value::I8(2), Ok(Value::I8(-1))),
+ (Value::U8(3), Value::U8(2), Ok(Value::U8(1))),
+ (Value::I16(-3), Value::I16(2), Ok(Value::I16(-1))),
+ (Value::U16(3), Value::U16(2), Ok(Value::U16(1))),
+ (Value::I32(-3), Value::I32(2), Ok(Value::I32(-1))),
+ (Value::U32(3), Value::U32(2), Ok(Value::U32(1))),
+ (Value::I64(-3), Value::I64(2), Ok(Value::I64(-1))),
+ (Value::U64(3), Value::U64(2), Ok(Value::U64(1))),
+ (Value::F32(-3.), Value::F32(2.), Err(Error::IntegralTypeRequired)),
+ (Value::F64(-3.), Value::F64(2.), Err(Error::IntegralTypeRequired)),
+ (Value::Generic(3), Value::U32(2), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.rem(v2, addr_mask), result);
+ }
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(0), Err(Error::DivisionByZero)),
+ (Value::I8(-3), Value::I8(0), Err(Error::DivisionByZero)),
+ (Value::U8(3), Value::U8(0), Err(Error::DivisionByZero)),
+ (Value::I16(-3), Value::I16(0), Err(Error::DivisionByZero)),
+ (Value::U16(3), Value::U16(0), Err(Error::DivisionByZero)),
+ (Value::I32(-3), Value::I32(0), Err(Error::DivisionByZero)),
+ (Value::U32(3), Value::U32(0), Err(Error::DivisionByZero)),
+ (Value::I64(-3), Value::I64(0), Err(Error::DivisionByZero)),
+ (Value::U64(3), Value::U64(0), Err(Error::DivisionByZero)),
+ ] {
+ assert_eq!(v1.rem(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_not() {
+ let addr_mask = 0xffff_ffff;
+ for &(v, result) in &[
+ (Value::Generic(1), Ok(Value::Generic(!1))),
+ (Value::I8(1), Ok(Value::I8(!1))),
+ (Value::U8(1), Ok(Value::U8(!1))),
+ (Value::I16(1), Ok(Value::I16(!1))),
+ (Value::U16(1), Ok(Value::U16(!1))),
+ (Value::I32(1), Ok(Value::I32(!1))),
+ (Value::U32(1), Ok(Value::U32(!1))),
+ (Value::I64(1), Ok(Value::I64(!1))),
+ (Value::U64(1), Ok(Value::U64(!1))),
+ (Value::F32(1.), Err(Error::IntegralTypeRequired)),
+ (Value::F64(1.), Err(Error::IntegralTypeRequired)),
+ ] {
+ assert_eq!(v.not(addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_and() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(5), Ok(Value::Generic(1))),
+ (Value::I8(3), Value::I8(5), Ok(Value::I8(1))),
+ (Value::U8(3), Value::U8(5), Ok(Value::U8(1))),
+ (Value::I16(3), Value::I16(5), Ok(Value::I16(1))),
+ (Value::U16(3), Value::U16(5), Ok(Value::U16(1))),
+ (Value::I32(3), Value::I32(5), Ok(Value::I32(1))),
+ (Value::U32(3), Value::U32(5), Ok(Value::U32(1))),
+ (Value::I64(3), Value::I64(5), Ok(Value::I64(1))),
+ (Value::U64(3), Value::U64(5), Ok(Value::U64(1))),
+ (Value::F32(3.), Value::F32(5.), Err(Error::IntegralTypeRequired)),
+ (Value::F64(3.), Value::F64(5.), Err(Error::IntegralTypeRequired)),
+ (Value::Generic(3), Value::U32(5), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.and(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_or() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(5), Ok(Value::Generic(7))),
+ (Value::I8(3), Value::I8(5), Ok(Value::I8(7))),
+ (Value::U8(3), Value::U8(5), Ok(Value::U8(7))),
+ (Value::I16(3), Value::I16(5), Ok(Value::I16(7))),
+ (Value::U16(3), Value::U16(5), Ok(Value::U16(7))),
+ (Value::I32(3), Value::I32(5), Ok(Value::I32(7))),
+ (Value::U32(3), Value::U32(5), Ok(Value::U32(7))),
+ (Value::I64(3), Value::I64(5), Ok(Value::I64(7))),
+ (Value::U64(3), Value::U64(5), Ok(Value::U64(7))),
+ (Value::F32(3.), Value::F32(5.), Err(Error::IntegralTypeRequired)),
+ (Value::F64(3.), Value::F64(5.), Err(Error::IntegralTypeRequired)),
+ (Value::Generic(3), Value::U32(5), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.or(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_xor() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(5), Ok(Value::Generic(6))),
+ (Value::I8(3), Value::I8(5), Ok(Value::I8(6))),
+ (Value::U8(3), Value::U8(5), Ok(Value::U8(6))),
+ (Value::I16(3), Value::I16(5), Ok(Value::I16(6))),
+ (Value::U16(3), Value::U16(5), Ok(Value::U16(6))),
+ (Value::I32(3), Value::I32(5), Ok(Value::I32(6))),
+ (Value::U32(3), Value::U32(5), Ok(Value::U32(6))),
+ (Value::I64(3), Value::I64(5), Ok(Value::I64(6))),
+ (Value::U64(3), Value::U64(5), Ok(Value::U64(6))),
+ (Value::F32(3.), Value::F32(5.), Err(Error::IntegralTypeRequired)),
+ (Value::F64(3.), Value::F64(5.), Err(Error::IntegralTypeRequired)),
+ (Value::Generic(3), Value::U32(5), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.xor(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_shl() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ // One of each type
+ (Value::Generic(3), Value::Generic(5), Ok(Value::Generic(96))),
+ (Value::I8(3), Value::U8(5), Ok(Value::I8(96))),
+ (Value::U8(3), Value::I8(5), Ok(Value::U8(96))),
+ (Value::I16(3), Value::U16(5), Ok(Value::I16(96))),
+ (Value::U16(3), Value::I16(5), Ok(Value::U16(96))),
+ (Value::I32(3), Value::U32(5), Ok(Value::I32(96))),
+ (Value::U32(3), Value::I32(5), Ok(Value::U32(96))),
+ (Value::I64(3), Value::U64(5), Ok(Value::I64(96))),
+ (Value::U64(3), Value::I64(5), Ok(Value::U64(96))),
+ (Value::F32(3.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ (Value::F64(3.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ // Invalid shifts
+ (Value::U8(3), Value::I8(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(3), Value::I16(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(3), Value::I32(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(3), Value::I64(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(3), Value::F32(5.), Err(Error::InvalidShiftExpression)),
+ (Value::U8(3), Value::F64(5.), Err(Error::InvalidShiftExpression)),
+ // Large shifts
+ (Value::Generic(3), Value::Generic(32), Ok(Value::Generic(0))),
+ (Value::I8(3), Value::U8(8), Ok(Value::I8(0))),
+ (Value::U8(3), Value::I8(9), Ok(Value::U8(0))),
+ (Value::I16(3), Value::U16(17), Ok(Value::I16(0))),
+ (Value::U16(3), Value::I16(16), Ok(Value::U16(0))),
+ (Value::I32(3), Value::U32(32), Ok(Value::I32(0))),
+ (Value::U32(3), Value::I32(33), Ok(Value::U32(0))),
+ (Value::I64(3), Value::U64(65), Ok(Value::I64(0))),
+ (Value::U64(3), Value::I64(64), Ok(Value::U64(0))),
+ ] {
+ assert_eq!(v1.shl(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_shr() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ // One of each type
+ (Value::Generic(96), Value::Generic(5), Ok(Value::Generic(3))),
+ (Value::I8(96), Value::U8(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::U8(96), Value::I8(5), Ok(Value::U8(3))),
+ (Value::I16(96), Value::U16(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::U16(96), Value::I16(5), Ok(Value::U16(3))),
+ (Value::I32(96), Value::U32(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::U32(96), Value::I32(5), Ok(Value::U32(3))),
+ (Value::I64(96), Value::U64(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::U64(96), Value::I64(5), Ok(Value::U64(3))),
+ (Value::F32(96.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ (Value::F64(96.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ // Invalid shifts
+ (Value::U8(96), Value::I8(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I16(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I32(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I64(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::F32(5.), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::F64(5.), Err(Error::InvalidShiftExpression)),
+ // Large shifts
+ (Value::Generic(96), Value::Generic(32), Ok(Value::Generic(0))),
+ (Value::U8(96), Value::I8(9), Ok(Value::U8(0))),
+ (Value::U16(96), Value::I16(16), Ok(Value::U16(0))),
+ (Value::U32(96), Value::I32(33), Ok(Value::U32(0))),
+ (Value::U64(96), Value::I64(64), Ok(Value::U64(0))),
+ ] {
+ assert_eq!(v1.shr(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn value_shra() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ // One of each type
+ (Value::Generic(u64::from(-96i32 as u32)), Value::Generic(5), Ok(Value::Generic(-3i64 as u64))),
+ (Value::I8(-96), Value::U8(5), Ok(Value::I8(-3))),
+ (Value::U8(96), Value::I8(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::I16(-96), Value::U16(5), Ok(Value::I16(-3))),
+ (Value::U16(96), Value::I16(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::I32(-96), Value::U32(5), Ok(Value::I32(-3))),
+ (Value::U32(96), Value::I32(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::I64(-96), Value::U64(5), Ok(Value::I64(-3))),
+ (Value::U64(96), Value::I64(5), Err(Error::UnsupportedTypeOperation)),
+ (Value::F32(96.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ (Value::F64(96.), Value::U8(5), Err(Error::IntegralTypeRequired)),
+ // Invalid shifts
+ (Value::U8(96), Value::I8(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I16(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I32(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::I64(-5), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::F32(5.), Err(Error::InvalidShiftExpression)),
+ (Value::U8(96), Value::F64(5.), Err(Error::InvalidShiftExpression)),
+ // Large shifts
+ (Value::Generic(96), Value::Generic(32), Ok(Value::Generic(0))),
+ (Value::I8(96), Value::U8(8), Ok(Value::I8(0))),
+ (Value::I8(-96), Value::U8(8), Ok(Value::I8(-1))),
+ (Value::I16(96), Value::U16(17), Ok(Value::I16(0))),
+ (Value::I16(-96), Value::U16(17), Ok(Value::I16(-1))),
+ (Value::I32(96), Value::U32(32), Ok(Value::I32(0))),
+ (Value::I32(-96), Value::U32(32), Ok(Value::I32(-1))),
+ (Value::I64(96), Value::U64(65), Ok(Value::I64(0))),
+ (Value::I64(-96), Value::U64(65), Ok(Value::I64(-1))),
+ ] {
+ assert_eq!(v1.shra(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_eq() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(3), Ok(Value::Generic(1))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(0))),
+ (Value::I8(3), Value::I8(3), Ok(Value::Generic(1))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(0))),
+ (Value::U8(3), Value::U8(3), Ok(Value::Generic(1))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(0))),
+ (Value::I16(3), Value::I16(3), Ok(Value::Generic(1))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(0))),
+ (Value::U16(3), Value::U16(3), Ok(Value::Generic(1))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(0))),
+ (Value::I32(3), Value::I32(3), Ok(Value::Generic(1))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(0))),
+ (Value::U32(3), Value::U32(3), Ok(Value::Generic(1))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(0))),
+ (Value::I64(3), Value::I64(3), Ok(Value::Generic(1))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(0))),
+ (Value::U64(3), Value::U64(3), Ok(Value::Generic(1))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(0))),
+ (Value::F32(3.), Value::F32(3.), Ok(Value::Generic(1))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(0))),
+ (Value::F64(3.), Value::F64(3.), Ok(Value::Generic(1))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(0))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.eq(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_ne() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(3), Ok(Value::Generic(0))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(1))),
+ (Value::I8(3), Value::I8(3), Ok(Value::Generic(0))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(1))),
+ (Value::U8(3), Value::U8(3), Ok(Value::Generic(0))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(1))),
+ (Value::I16(3), Value::I16(3), Ok(Value::Generic(0))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(1))),
+ (Value::U16(3), Value::U16(3), Ok(Value::Generic(0))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(1))),
+ (Value::I32(3), Value::I32(3), Ok(Value::Generic(0))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(1))),
+ (Value::U32(3), Value::U32(3), Ok(Value::Generic(0))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(1))),
+ (Value::I64(3), Value::I64(3), Ok(Value::Generic(0))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(1))),
+ (Value::U64(3), Value::U64(3), Ok(Value::Generic(0))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(1))),
+ (Value::F32(3.), Value::F32(3.), Ok(Value::Generic(0))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(1))),
+ (Value::F64(3.), Value::F64(3.), Ok(Value::Generic(0))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(1))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.ne(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_ge() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(!3), Ok(Value::Generic(1))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(0))),
+ (Value::I8(3), Value::I8(!3), Ok(Value::Generic(1))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(0))),
+ (Value::U8(3), Value::U8(!3), Ok(Value::Generic(0))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(1))),
+ (Value::I16(3), Value::I16(!3), Ok(Value::Generic(1))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(0))),
+ (Value::U16(3), Value::U16(!3), Ok(Value::Generic(0))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(1))),
+ (Value::I32(3), Value::I32(!3), Ok(Value::Generic(1))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(0))),
+ (Value::U32(3), Value::U32(!3), Ok(Value::Generic(0))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(1))),
+ (Value::I64(3), Value::I64(!3), Ok(Value::Generic(1))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(0))),
+ (Value::U64(3), Value::U64(!3), Ok(Value::Generic(0))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(1))),
+ (Value::F32(3.), Value::F32(-3.), Ok(Value::Generic(1))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(0))),
+ (Value::F64(3.), Value::F64(-3.), Ok(Value::Generic(1))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(0))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.ge(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_gt() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(!3), Ok(Value::Generic(1))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(0))),
+ (Value::I8(3), Value::I8(!3), Ok(Value::Generic(1))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(0))),
+ (Value::U8(3), Value::U8(!3), Ok(Value::Generic(0))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(1))),
+ (Value::I16(3), Value::I16(!3), Ok(Value::Generic(1))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(0))),
+ (Value::U16(3), Value::U16(!3), Ok(Value::Generic(0))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(1))),
+ (Value::I32(3), Value::I32(!3), Ok(Value::Generic(1))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(0))),
+ (Value::U32(3), Value::U32(!3), Ok(Value::Generic(0))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(1))),
+ (Value::I64(3), Value::I64(!3), Ok(Value::Generic(1))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(0))),
+ (Value::U64(3), Value::U64(!3), Ok(Value::Generic(0))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(1))),
+ (Value::F32(3.), Value::F32(-3.), Ok(Value::Generic(1))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(0))),
+ (Value::F64(3.), Value::F64(-3.), Ok(Value::Generic(1))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(0))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.gt(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_le() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(!3), Ok(Value::Generic(0))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(1))),
+ (Value::I8(3), Value::I8(!3), Ok(Value::Generic(0))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(1))),
+ (Value::U8(3), Value::U8(!3), Ok(Value::Generic(1))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(0))),
+ (Value::I16(3), Value::I16(!3), Ok(Value::Generic(0))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(1))),
+ (Value::U16(3), Value::U16(!3), Ok(Value::Generic(1))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(0))),
+ (Value::I32(3), Value::I32(!3), Ok(Value::Generic(0))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(1))),
+ (Value::U32(3), Value::U32(!3), Ok(Value::Generic(1))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(0))),
+ (Value::I64(3), Value::I64(!3), Ok(Value::Generic(0))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(1))),
+ (Value::U64(3), Value::U64(!3), Ok(Value::Generic(1))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(0))),
+ (Value::F32(3.), Value::F32(-3.), Ok(Value::Generic(0))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(1))),
+ (Value::F64(3.), Value::F64(-3.), Ok(Value::Generic(0))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(1))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.le(v2, addr_mask), result);
+ }
+ }
+
+ #[test]
+ fn value_lt() {
+ let addr_mask = 0xffff_ffff;
+ for &(v1, v2, result) in &[
+ (Value::Generic(3), Value::Generic(!3), Ok(Value::Generic(0))),
+ (Value::Generic(!3), Value::Generic(3), Ok(Value::Generic(1))),
+ (Value::I8(3), Value::I8(!3), Ok(Value::Generic(0))),
+ (Value::I8(!3), Value::I8(3), Ok(Value::Generic(1))),
+ (Value::U8(3), Value::U8(!3), Ok(Value::Generic(1))),
+ (Value::U8(!3), Value::U8(3), Ok(Value::Generic(0))),
+ (Value::I16(3), Value::I16(!3), Ok(Value::Generic(0))),
+ (Value::I16(!3), Value::I16(3), Ok(Value::Generic(1))),
+ (Value::U16(3), Value::U16(!3), Ok(Value::Generic(1))),
+ (Value::U16(!3), Value::U16(3), Ok(Value::Generic(0))),
+ (Value::I32(3), Value::I32(!3), Ok(Value::Generic(0))),
+ (Value::I32(!3), Value::I32(3), Ok(Value::Generic(1))),
+ (Value::U32(3), Value::U32(!3), Ok(Value::Generic(1))),
+ (Value::U32(!3), Value::U32(3), Ok(Value::Generic(0))),
+ (Value::I64(3), Value::I64(!3), Ok(Value::Generic(0))),
+ (Value::I64(!3), Value::I64(3), Ok(Value::Generic(1))),
+ (Value::U64(3), Value::U64(!3), Ok(Value::Generic(1))),
+ (Value::U64(!3), Value::U64(3), Ok(Value::Generic(0))),
+ (Value::F32(3.), Value::F32(-3.), Ok(Value::Generic(0))),
+ (Value::F32(-3.), Value::F32(3.), Ok(Value::Generic(1))),
+ (Value::F64(3.), Value::F64(-3.), Ok(Value::Generic(0))),
+ (Value::F64(-3.), Value::F64(3.), Ok(Value::Generic(1))),
+ (Value::Generic(3), Value::U32(3), Err(Error::TypeMismatch)),
+ ] {
+ assert_eq!(v1.lt(v2, addr_mask), result);
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/test_util.rs b/vendor/gimli-0.26.2/src/test_util.rs
new file mode 100644
index 000000000..706aaf934
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/test_util.rs
@@ -0,0 +1,53 @@
+#![allow(missing_docs)]
+
+use crate::Format;
+use test_assembler::{Label, Section};
+
+pub trait GimliSectionMethods {
+ fn sleb(self, val: i64) -> Self;
+ fn uleb(self, val: u64) -> Self;
+ fn initial_length(self, format: Format, length: &Label, start: &Label) -> Self;
+ fn word(self, size: u8, val: u64) -> Self;
+ fn word_label(self, size: u8, val: &Label) -> Self;
+}
+
+impl GimliSectionMethods for Section {
+ fn sleb(mut self, mut val: i64) -> Self {
+ while val & !0x3f != 0 && val | 0x3f != -1 {
+ self = self.D8(val as u8 | 0x80);
+ val >>= 7;
+ }
+ self.D8(val as u8 & 0x7f)
+ }
+
+ fn uleb(mut self, mut val: u64) -> Self {
+ while val & !0x7f != 0 {
+ self = self.D8(val as u8 | 0x80);
+ val >>= 7;
+ }
+ self.D8(val as u8)
+ }
+
+ fn initial_length(self, format: Format, length: &Label, start: &Label) -> Self {
+ match format {
+ Format::Dwarf32 => self.D32(length).mark(start),
+ Format::Dwarf64 => self.D32(0xffff_ffff).D64(length).mark(start),
+ }
+ }
+
+ fn word(self, size: u8, val: u64) -> Self {
+ match size {
+ 4 => self.D32(val as u32),
+ 8 => self.D64(val),
+ _ => panic!("unsupported word size"),
+ }
+ }
+
+ fn word_label(self, size: u8, val: &Label) -> Self {
+ match size {
+ 4 => self.D32(val),
+ 8 => self.D64(val),
+ _ => panic!("unsupported word size"),
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/abbrev.rs b/vendor/gimli-0.26.2/src/write/abbrev.rs
new file mode 100644
index 000000000..7cdfa969c
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/abbrev.rs
@@ -0,0 +1,188 @@
+use alloc::vec::Vec;
+use indexmap::IndexSet;
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{DebugAbbrevOffset, SectionId};
+use crate::constants;
+use crate::write::{Result, Section, Writer};
+
+/// A table of abbreviations that will be stored in a `.debug_abbrev` section.
+// Requirements:
+// - values are `Abbreviation`
+// - insertion returns an abbreviation code for use in writing a DIE
+// - inserting a duplicate returns the code of the existing value
+#[derive(Debug, Default)]
+pub(crate) struct AbbreviationTable {
+ abbrevs: IndexSet<Abbreviation>,
+}
+
+impl AbbreviationTable {
+ /// Add an abbreviation to the table and return its code.
+ pub fn add(&mut self, abbrev: Abbreviation) -> u64 {
+ let (code, _) = self.abbrevs.insert_full(abbrev);
+ // Code must be non-zero
+ (code + 1) as u64
+ }
+
+ /// Write the abbreviation table to the `.debug_abbrev` section.
+ pub fn write<W: Writer>(&self, w: &mut DebugAbbrev<W>) -> Result<()> {
+ for (code, abbrev) in self.abbrevs.iter().enumerate() {
+ w.write_uleb128((code + 1) as u64)?;
+ abbrev.write(w)?;
+ }
+ // Null abbreviation code
+ w.write_u8(0)
+ }
+}
+
+/// An abbreviation describes the shape of a `DebuggingInformationEntry`'s type:
+/// its tag type, whether it has children, and its set of attributes.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct Abbreviation {
+ tag: constants::DwTag,
+ has_children: bool,
+ attributes: Vec<AttributeSpecification>,
+}
+
+impl Abbreviation {
+ /// Construct a new `Abbreviation`.
+ #[inline]
+ pub fn new(
+ tag: constants::DwTag,
+ has_children: bool,
+ attributes: Vec<AttributeSpecification>,
+ ) -> Abbreviation {
+ Abbreviation {
+ tag,
+ has_children,
+ attributes,
+ }
+ }
+
+ /// Write the abbreviation to the `.debug_abbrev` section.
+ pub fn write<W: Writer>(&self, w: &mut DebugAbbrev<W>) -> Result<()> {
+ w.write_uleb128(self.tag.0.into())?;
+ w.write_u8(if self.has_children {
+ constants::DW_CHILDREN_yes.0
+ } else {
+ constants::DW_CHILDREN_no.0
+ })?;
+ for attr in &self.attributes {
+ attr.write(w)?;
+ }
+ // Null name and form
+ w.write_u8(0)?;
+ w.write_u8(0)
+ }
+}
+
+/// The description of an attribute in an abbreviated type.
+// TODO: support implicit const
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct AttributeSpecification {
+ name: constants::DwAt,
+ form: constants::DwForm,
+}
+
+impl AttributeSpecification {
+ /// Construct a new `AttributeSpecification`.
+ #[inline]
+ pub fn new(name: constants::DwAt, form: constants::DwForm) -> AttributeSpecification {
+ AttributeSpecification { name, form }
+ }
+
+ /// Write the attribute specification to the `.debug_abbrev` section.
+ #[inline]
+ pub fn write<W: Writer>(&self, w: &mut DebugAbbrev<W>) -> Result<()> {
+ w.write_uleb128(self.name.0.into())?;
+ w.write_uleb128(self.form.0.into())
+ }
+}
+
+define_section!(
+ DebugAbbrev,
+ DebugAbbrevOffset,
+ "A writable `.debug_abbrev` section."
+);
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::constants;
+ use crate::read;
+ use crate::write::EndianVec;
+ use crate::LittleEndian;
+
+ #[test]
+ fn test_abbreviation_table() {
+ let mut abbrevs = AbbreviationTable::default();
+ let abbrev1 = Abbreviation::new(
+ constants::DW_TAG_subprogram,
+ false,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_string,
+ )],
+ );
+ let abbrev2 = Abbreviation::new(
+ constants::DW_TAG_compile_unit,
+ true,
+ vec![
+ AttributeSpecification::new(constants::DW_AT_producer, constants::DW_FORM_strp),
+ AttributeSpecification::new(constants::DW_AT_language, constants::DW_FORM_data2),
+ ],
+ );
+ let code1 = abbrevs.add(abbrev1.clone());
+ assert_eq!(code1, 1);
+ let code2 = abbrevs.add(abbrev2.clone());
+ assert_eq!(code2, 2);
+ assert_eq!(abbrevs.add(abbrev1.clone()), code1);
+ assert_eq!(abbrevs.add(abbrev2.clone()), code2);
+
+ let mut debug_abbrev = DebugAbbrev::from(EndianVec::new(LittleEndian));
+ let debug_abbrev_offset = debug_abbrev.offset();
+ assert_eq!(debug_abbrev_offset, DebugAbbrevOffset(0));
+ abbrevs.write(&mut debug_abbrev).unwrap();
+ assert_eq!(debug_abbrev.offset(), DebugAbbrevOffset(17));
+
+ let read_debug_abbrev = read::DebugAbbrev::new(debug_abbrev.slice(), LittleEndian);
+ let read_abbrevs = read_debug_abbrev
+ .abbreviations(debug_abbrev_offset)
+ .unwrap();
+
+ let read_abbrev1 = read_abbrevs.get(code1).unwrap();
+ assert_eq!(abbrev1.tag, read_abbrev1.tag());
+ assert_eq!(abbrev1.has_children, read_abbrev1.has_children());
+ assert_eq!(abbrev1.attributes.len(), read_abbrev1.attributes().len());
+ assert_eq!(
+ abbrev1.attributes[0].name,
+ read_abbrev1.attributes()[0].name()
+ );
+ assert_eq!(
+ abbrev1.attributes[0].form,
+ read_abbrev1.attributes()[0].form()
+ );
+
+ let read_abbrev2 = read_abbrevs.get(code2).unwrap();
+ assert_eq!(abbrev2.tag, read_abbrev2.tag());
+ assert_eq!(abbrev2.has_children, read_abbrev2.has_children());
+ assert_eq!(abbrev2.attributes.len(), read_abbrev2.attributes().len());
+ assert_eq!(
+ abbrev2.attributes[0].name,
+ read_abbrev2.attributes()[0].name()
+ );
+ assert_eq!(
+ abbrev2.attributes[0].form,
+ read_abbrev2.attributes()[0].form()
+ );
+ assert_eq!(
+ abbrev2.attributes[1].name,
+ read_abbrev2.attributes()[1].name()
+ );
+ assert_eq!(
+ abbrev2.attributes[1].form,
+ read_abbrev2.attributes()[1].form()
+ );
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/cfi.rs b/vendor/gimli-0.26.2/src/write/cfi.rs
new file mode 100644
index 000000000..718cb69ad
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/cfi.rs
@@ -0,0 +1,1025 @@
+use alloc::vec::Vec;
+use indexmap::IndexSet;
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{DebugFrameOffset, EhFrameOffset, Encoding, Format, Register, SectionId};
+use crate::constants;
+use crate::write::{Address, BaseId, Error, Expression, Result, Section, Writer};
+
+define_section!(
+ DebugFrame,
+ DebugFrameOffset,
+ "A writable `.debug_frame` section."
+);
+
+define_section!(EhFrame, EhFrameOffset, "A writable `.eh_frame` section.");
+
+define_id!(CieId, "An identifier for a CIE in a `FrameTable`.");
+
+/// A table of frame description entries.
+#[derive(Debug, Default)]
+pub struct FrameTable {
+ /// Base id for CIEs.
+ base_id: BaseId,
+ /// The common information entries.
+ cies: IndexSet<CommonInformationEntry>,
+ /// The frame description entries.
+ fdes: Vec<(CieId, FrameDescriptionEntry)>,
+}
+
+impl FrameTable {
+ /// Add a CIE and return its id.
+ ///
+ /// If the CIE already exists, then return the id of the existing CIE.
+ pub fn add_cie(&mut self, cie: CommonInformationEntry) -> CieId {
+ let (index, _) = self.cies.insert_full(cie);
+ CieId::new(self.base_id, index)
+ }
+
+ /// The number of CIEs.
+ pub fn cie_count(&self) -> usize {
+ self.cies.len()
+ }
+
+ /// Add a FDE.
+ ///
+ /// Does not check for duplicates.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the CIE id is invalid.
+ pub fn add_fde(&mut self, cie: CieId, fde: FrameDescriptionEntry) {
+ debug_assert_eq!(self.base_id, cie.base_id);
+ self.fdes.push((cie, fde));
+ }
+
+ /// The number of FDEs.
+ pub fn fde_count(&self) -> usize {
+ self.fdes.len()
+ }
+
+ /// Write the frame table entries to the given `.debug_frame` section.
+ pub fn write_debug_frame<W: Writer>(&self, w: &mut DebugFrame<W>) -> Result<()> {
+ self.write(&mut w.0, false)
+ }
+
+ /// Write the frame table entries to the given `.eh_frame` section.
+ pub fn write_eh_frame<W: Writer>(&self, w: &mut EhFrame<W>) -> Result<()> {
+ self.write(&mut w.0, true)
+ }
+
+ fn write<W: Writer>(&self, w: &mut W, eh_frame: bool) -> Result<()> {
+ let mut cie_offsets = vec![None; self.cies.len()];
+ for (cie_id, fde) in &self.fdes {
+ let cie_index = cie_id.index;
+ let cie = self.cies.get_index(cie_index).unwrap();
+ let cie_offset = match cie_offsets[cie_index] {
+ Some(offset) => offset,
+ None => {
+ // Only write CIEs as they are referenced.
+ let offset = cie.write(w, eh_frame)?;
+ cie_offsets[cie_index] = Some(offset);
+ offset
+ }
+ };
+
+ fde.write(w, eh_frame, cie_offset, cie)?;
+ }
+ // TODO: write length 0 terminator for eh_frame?
+ Ok(())
+ }
+}
+
+/// A common information entry. This contains information that is shared between FDEs.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CommonInformationEntry {
+ encoding: Encoding,
+
+ /// A constant that is factored out of code offsets.
+ ///
+ /// This should be set to the minimum instruction length.
+ /// Writing a code offset that is not a multiple of this factor will generate an error.
+ code_alignment_factor: u8,
+
+ /// A constant that is factored out of data offsets.
+ ///
+ /// This should be set to the minimum data alignment for the frame.
+ /// Writing a data offset that is not a multiple of this factor will generate an error.
+ data_alignment_factor: i8,
+
+ /// The return address register. This might not correspond to an actual machine register.
+ return_address_register: Register,
+
+ /// The address of the personality function and its encoding.
+ pub personality: Option<(constants::DwEhPe, Address)>,
+
+ /// The encoding to use for the LSDA address in FDEs.
+ ///
+ /// If set then all FDEs which use this CIE must have a LSDA address.
+ pub lsda_encoding: Option<constants::DwEhPe>,
+
+ /// The encoding to use for addresses in FDEs.
+ pub fde_address_encoding: constants::DwEhPe,
+
+ /// True for signal trampolines.
+ pub signal_trampoline: bool,
+
+ /// The initial instructions upon entry to this function.
+ instructions: Vec<CallFrameInstruction>,
+}
+
+impl CommonInformationEntry {
+ /// Create a new common information entry.
+ ///
+ /// The encoding version must be a CFI version, not a DWARF version.
+ pub fn new(
+ encoding: Encoding,
+ code_alignment_factor: u8,
+ data_alignment_factor: i8,
+ return_address_register: Register,
+ ) -> Self {
+ CommonInformationEntry {
+ encoding,
+ code_alignment_factor,
+ data_alignment_factor,
+ return_address_register,
+ personality: None,
+ lsda_encoding: None,
+ fde_address_encoding: constants::DW_EH_PE_absptr,
+ signal_trampoline: false,
+ instructions: Vec::new(),
+ }
+ }
+
+ /// Add an initial instruction.
+ pub fn add_instruction(&mut self, instruction: CallFrameInstruction) {
+ self.instructions.push(instruction);
+ }
+
+ fn has_augmentation(&self) -> bool {
+ self.personality.is_some()
+ || self.lsda_encoding.is_some()
+ || self.signal_trampoline
+ || self.fde_address_encoding != constants::DW_EH_PE_absptr
+ }
+
+ /// Returns the section offset of the CIE.
+ fn write<W: Writer>(&self, w: &mut W, eh_frame: bool) -> Result<usize> {
+ let encoding = self.encoding;
+ let offset = w.len();
+
+ let length_offset = w.write_initial_length(encoding.format)?;
+ let length_base = w.len();
+
+ if eh_frame {
+ w.write_u32(0)?;
+ } else {
+ match encoding.format {
+ Format::Dwarf32 => w.write_u32(0xffff_ffff)?,
+ Format::Dwarf64 => w.write_u64(0xffff_ffff_ffff_ffff)?,
+ }
+ }
+
+ if eh_frame {
+ if encoding.version != 1 {
+ return Err(Error::UnsupportedVersion(encoding.version));
+ };
+ } else {
+ match encoding.version {
+ 1 | 3 | 4 => {}
+ _ => return Err(Error::UnsupportedVersion(encoding.version)),
+ };
+ }
+ w.write_u8(encoding.version as u8)?;
+
+ let augmentation = self.has_augmentation();
+ if augmentation {
+ w.write_u8(b'z')?;
+ if self.lsda_encoding.is_some() {
+ w.write_u8(b'L')?;
+ }
+ if self.personality.is_some() {
+ w.write_u8(b'P')?;
+ }
+ if self.fde_address_encoding != constants::DW_EH_PE_absptr {
+ w.write_u8(b'R')?;
+ }
+ if self.signal_trampoline {
+ w.write_u8(b'S')?;
+ }
+ }
+ w.write_u8(0)?;
+
+ if encoding.version >= 4 {
+ w.write_u8(encoding.address_size)?;
+ // TODO: segment_selector_size
+ w.write_u8(0)?;
+ }
+
+ w.write_uleb128(self.code_alignment_factor.into())?;
+ w.write_sleb128(self.data_alignment_factor.into())?;
+
+ if !eh_frame && encoding.version == 1 {
+ let register = self.return_address_register.0 as u8;
+ if u16::from(register) != self.return_address_register.0 {
+ return Err(Error::ValueTooLarge);
+ }
+ w.write_u8(register)?;
+ } else {
+ w.write_uleb128(self.return_address_register.0.into())?;
+ }
+
+ if augmentation {
+ let augmentation_length_offset = w.len();
+ w.write_u8(0)?;
+ let augmentation_length_base = w.len();
+
+ if let Some(eh_pe) = self.lsda_encoding {
+ w.write_u8(eh_pe.0)?;
+ }
+ if let Some((eh_pe, address)) = self.personality {
+ w.write_u8(eh_pe.0)?;
+ w.write_eh_pointer(address, eh_pe, encoding.address_size)?;
+ }
+ if self.fde_address_encoding != constants::DW_EH_PE_absptr {
+ w.write_u8(self.fde_address_encoding.0)?;
+ }
+
+ let augmentation_length = (w.len() - augmentation_length_base) as u64;
+ debug_assert!(augmentation_length < 0x80);
+ w.write_udata_at(augmentation_length_offset, augmentation_length, 1)?;
+ }
+
+ for instruction in &self.instructions {
+ instruction.write(w, encoding, self)?;
+ }
+
+ write_nop(
+ w,
+ encoding.format.word_size() as usize + w.len() - length_base,
+ encoding.address_size,
+ )?;
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, encoding.format)?;
+
+ Ok(offset)
+ }
+}
+
+/// A frame description entry. There should be one FDE per function.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FrameDescriptionEntry {
+ /// The initial address of the function.
+ address: Address,
+
+ /// The length in bytes of the function.
+ length: u32,
+
+ /// The address of the LSDA.
+ pub lsda: Option<Address>,
+
+ /// The instructions for this function, ordered by offset.
+ instructions: Vec<(u32, CallFrameInstruction)>,
+}
+
+impl FrameDescriptionEntry {
+ /// Create a new frame description entry for a function.
+ pub fn new(address: Address, length: u32) -> Self {
+ FrameDescriptionEntry {
+ address,
+ length,
+ lsda: None,
+ instructions: Vec::new(),
+ }
+ }
+
+ /// Add an instruction.
+ ///
+ /// Instructions must be added in increasing order of offset, or writing will fail.
+ pub fn add_instruction(&mut self, offset: u32, instruction: CallFrameInstruction) {
+ debug_assert!(self.instructions.last().map(|x| x.0).unwrap_or(0) <= offset);
+ self.instructions.push((offset, instruction));
+ }
+
+ fn write<W: Writer>(
+ &self,
+ w: &mut W,
+ eh_frame: bool,
+ cie_offset: usize,
+ cie: &CommonInformationEntry,
+ ) -> Result<()> {
+ let encoding = cie.encoding;
+ let length_offset = w.write_initial_length(encoding.format)?;
+ let length_base = w.len();
+
+ if eh_frame {
+ // .eh_frame uses a relative offset which doesn't need relocation.
+ w.write_udata((w.len() - cie_offset) as u64, 4)?;
+ } else {
+ w.write_offset(
+ cie_offset,
+ SectionId::DebugFrame,
+ encoding.format.word_size(),
+ )?;
+ }
+
+ if cie.fde_address_encoding != constants::DW_EH_PE_absptr {
+ w.write_eh_pointer(
+ self.address,
+ cie.fde_address_encoding,
+ encoding.address_size,
+ )?;
+ w.write_eh_pointer_data(
+ self.length.into(),
+ cie.fde_address_encoding.format(),
+ encoding.address_size,
+ )?;
+ } else {
+ w.write_address(self.address, encoding.address_size)?;
+ w.write_udata(self.length.into(), encoding.address_size)?;
+ }
+
+ if cie.has_augmentation() {
+ let mut augmentation_length = 0u64;
+ if self.lsda.is_some() {
+ augmentation_length += u64::from(encoding.address_size);
+ }
+ w.write_uleb128(augmentation_length)?;
+
+ debug_assert_eq!(self.lsda.is_some(), cie.lsda_encoding.is_some());
+ if let (Some(lsda), Some(lsda_encoding)) = (self.lsda, cie.lsda_encoding) {
+ w.write_eh_pointer(lsda, lsda_encoding, encoding.address_size)?;
+ }
+ }
+
+ let mut prev_offset = 0;
+ for (offset, instruction) in &self.instructions {
+ write_advance_loc(w, cie.code_alignment_factor, prev_offset, *offset)?;
+ prev_offset = *offset;
+ instruction.write(w, encoding, cie)?;
+ }
+
+ write_nop(
+ w,
+ encoding.format.word_size() as usize + w.len() - length_base,
+ encoding.address_size,
+ )?;
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, encoding.format)?;
+
+ Ok(())
+ }
+}
+
+/// An instruction in a frame description entry.
+///
+/// This may be a CFA definition, a register rule, or some other directive.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CallFrameInstruction {
+ /// Define the CFA rule to use the provided register and offset.
+ Cfa(Register, i32),
+ /// Update the CFA rule to use the provided register. The offset is unchanged.
+ CfaRegister(Register),
+ /// Update the CFA rule to use the provided offset. The register is unchanged.
+ CfaOffset(i32),
+ /// Define the CFA rule to use the provided expression.
+ CfaExpression(Expression),
+
+ /// Restore the initial rule for the register.
+ Restore(Register),
+ /// The previous value of the register is not recoverable.
+ Undefined(Register),
+ /// The register has not been modified.
+ SameValue(Register),
+ /// The previous value of the register is saved at address CFA + offset.
+ Offset(Register, i32),
+ /// The previous value of the register is CFA + offset.
+ ValOffset(Register, i32),
+ /// The previous value of the register is stored in another register.
+ Register(Register, Register),
+ /// The previous value of the register is saved at address given by the expression.
+ Expression(Register, Expression),
+ /// The previous value of the register is given by the expression.
+ ValExpression(Register, Expression),
+
+ /// Push all register rules onto a stack.
+ RememberState,
+ /// Pop all register rules off the stack.
+ RestoreState,
+ /// The size of the arguments that have been pushed onto the stack.
+ ArgsSize(u32),
+}
+
+impl CallFrameInstruction {
+ fn write<W: Writer>(
+ &self,
+ w: &mut W,
+ encoding: Encoding,
+ cie: &CommonInformationEntry,
+ ) -> Result<()> {
+ match *self {
+ CallFrameInstruction::Cfa(register, offset) => {
+ if offset < 0 {
+ let offset = factored_data_offset(offset, cie.data_alignment_factor)?;
+ w.write_u8(constants::DW_CFA_def_cfa_sf.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_sleb128(offset.into())?;
+ } else {
+ // Unfactored offset.
+ w.write_u8(constants::DW_CFA_def_cfa.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(offset as u64)?;
+ }
+ }
+ CallFrameInstruction::CfaRegister(register) => {
+ w.write_u8(constants::DW_CFA_def_cfa_register.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ CallFrameInstruction::CfaOffset(offset) => {
+ if offset < 0 {
+ let offset = factored_data_offset(offset, cie.data_alignment_factor)?;
+ w.write_u8(constants::DW_CFA_def_cfa_offset_sf.0)?;
+ w.write_sleb128(offset.into())?;
+ } else {
+ // Unfactored offset.
+ w.write_u8(constants::DW_CFA_def_cfa_offset.0)?;
+ w.write_uleb128(offset as u64)?;
+ }
+ }
+ CallFrameInstruction::CfaExpression(ref expression) => {
+ w.write_u8(constants::DW_CFA_def_cfa_expression.0)?;
+ w.write_uleb128(expression.size(encoding, None) as u64)?;
+ expression.write(w, None, encoding, None)?;
+ }
+ CallFrameInstruction::Restore(register) => {
+ if register.0 < 0x40 {
+ w.write_u8(constants::DW_CFA_restore.0 | register.0 as u8)?;
+ } else {
+ w.write_u8(constants::DW_CFA_restore_extended.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ }
+ CallFrameInstruction::Undefined(register) => {
+ w.write_u8(constants::DW_CFA_undefined.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ CallFrameInstruction::SameValue(register) => {
+ w.write_u8(constants::DW_CFA_same_value.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ CallFrameInstruction::Offset(register, offset) => {
+ let offset = factored_data_offset(offset, cie.data_alignment_factor)?;
+ if offset < 0 {
+ w.write_u8(constants::DW_CFA_offset_extended_sf.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_sleb128(offset.into())?;
+ } else if register.0 < 0x40 {
+ w.write_u8(constants::DW_CFA_offset.0 | register.0 as u8)?;
+ w.write_uleb128(offset as u64)?;
+ } else {
+ w.write_u8(constants::DW_CFA_offset_extended.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(offset as u64)?;
+ }
+ }
+ CallFrameInstruction::ValOffset(register, offset) => {
+ let offset = factored_data_offset(offset, cie.data_alignment_factor)?;
+ if offset < 0 {
+ w.write_u8(constants::DW_CFA_val_offset_sf.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_sleb128(offset.into())?;
+ } else {
+ w.write_u8(constants::DW_CFA_val_offset.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(offset as u64)?;
+ }
+ }
+ CallFrameInstruction::Register(register1, register2) => {
+ w.write_u8(constants::DW_CFA_register.0)?;
+ w.write_uleb128(register1.0.into())?;
+ w.write_uleb128(register2.0.into())?;
+ }
+ CallFrameInstruction::Expression(register, ref expression) => {
+ w.write_u8(constants::DW_CFA_expression.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(expression.size(encoding, None) as u64)?;
+ expression.write(w, None, encoding, None)?;
+ }
+ CallFrameInstruction::ValExpression(register, ref expression) => {
+ w.write_u8(constants::DW_CFA_val_expression.0)?;
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(expression.size(encoding, None) as u64)?;
+ expression.write(w, None, encoding, None)?;
+ }
+ CallFrameInstruction::RememberState => {
+ w.write_u8(constants::DW_CFA_remember_state.0)?;
+ }
+ CallFrameInstruction::RestoreState => {
+ w.write_u8(constants::DW_CFA_restore_state.0)?;
+ }
+ CallFrameInstruction::ArgsSize(size) => {
+ w.write_u8(constants::DW_CFA_GNU_args_size.0)?;
+ w.write_uleb128(size.into())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+fn write_advance_loc<W: Writer>(
+ w: &mut W,
+ code_alignment_factor: u8,
+ prev_offset: u32,
+ offset: u32,
+) -> Result<()> {
+ if offset == prev_offset {
+ return Ok(());
+ }
+ let delta = factored_code_delta(prev_offset, offset, code_alignment_factor)?;
+ if delta < 0x40 {
+ w.write_u8(constants::DW_CFA_advance_loc.0 | delta as u8)?;
+ } else if delta < 0x100 {
+ w.write_u8(constants::DW_CFA_advance_loc1.0)?;
+ w.write_u8(delta as u8)?;
+ } else if delta < 0x10000 {
+ w.write_u8(constants::DW_CFA_advance_loc2.0)?;
+ w.write_u16(delta as u16)?;
+ } else {
+ w.write_u8(constants::DW_CFA_advance_loc4.0)?;
+ w.write_u32(delta)?;
+ }
+ Ok(())
+}
+
+fn write_nop<W: Writer>(w: &mut W, len: usize, align: u8) -> Result<()> {
+ debug_assert_eq!(align & (align - 1), 0);
+ let tail_len = (!len + 1) & (align as usize - 1);
+ for _ in 0..tail_len {
+ w.write_u8(constants::DW_CFA_nop.0)?;
+ }
+ Ok(())
+}
+
+fn factored_code_delta(prev_offset: u32, offset: u32, factor: u8) -> Result<u32> {
+ if offset < prev_offset {
+ return Err(Error::InvalidFrameCodeOffset(offset));
+ }
+ let delta = offset - prev_offset;
+ let factor = u32::from(factor);
+ let factored_delta = delta / factor;
+ if delta != factored_delta * factor {
+ return Err(Error::InvalidFrameCodeOffset(offset));
+ }
+ Ok(factored_delta)
+}
+
+fn factored_data_offset(offset: i32, factor: i8) -> Result<i32> {
+ let factor = i32::from(factor);
+ let factored_offset = offset / factor;
+ if offset != factored_offset * factor {
+ return Err(Error::InvalidFrameDataOffset(offset));
+ }
+ Ok(factored_offset)
+}
+
+#[cfg(feature = "read")]
+pub(crate) mod convert {
+ use super::*;
+ use crate::read::{self, Reader};
+ use crate::write::{ConvertError, ConvertResult};
+ use std::collections::{hash_map, HashMap};
+
+ impl FrameTable {
+ /// Create a frame table by reading the data in the given section.
+ ///
+ /// `convert_address` is a function to convert read addresses into the `Address`
+ /// type. For non-relocatable addresses, this function may simply return
+ /// `Address::Constant(address)`. For relocatable addresses, it is the caller's
+ /// responsibility to determine the symbol and addend corresponding to the address
+ /// and return `Address::Symbol { symbol, addend }`.
+ pub fn from<R, Section>(
+ frame: &Section,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<FrameTable>
+ where
+ R: Reader<Offset = usize>,
+ Section: read::UnwindSection<R>,
+ Section::Offset: read::UnwindOffset<usize>,
+ {
+ let bases = read::BaseAddresses::default().set_eh_frame(0);
+
+ let mut frame_table = FrameTable::default();
+
+ let mut cie_ids = HashMap::new();
+ let mut entries = frame.entries(&bases);
+ while let Some(entry) = entries.next()? {
+ let partial = match entry {
+ read::CieOrFde::Cie(_) => continue,
+ read::CieOrFde::Fde(partial) => partial,
+ };
+
+ // TODO: is it worth caching the parsed CIEs? It would be better if FDEs only
+ // stored a reference.
+ let from_fde = partial.parse(Section::cie_from_offset)?;
+ let from_cie = from_fde.cie();
+ let cie_id = match cie_ids.entry(from_cie.offset()) {
+ hash_map::Entry::Occupied(o) => *o.get(),
+ hash_map::Entry::Vacant(e) => {
+ let cie =
+ CommonInformationEntry::from(from_cie, frame, &bases, convert_address)?;
+ let cie_id = frame_table.add_cie(cie);
+ e.insert(cie_id);
+ cie_id
+ }
+ };
+ let fde = FrameDescriptionEntry::from(&from_fde, frame, &bases, convert_address)?;
+ frame_table.add_fde(cie_id, fde);
+ }
+
+ Ok(frame_table)
+ }
+ }
+
+ impl CommonInformationEntry {
+ fn from<R, Section>(
+ from_cie: &read::CommonInformationEntry<R>,
+ frame: &Section,
+ bases: &read::BaseAddresses,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<CommonInformationEntry>
+ where
+ R: Reader<Offset = usize>,
+ Section: read::UnwindSection<R>,
+ Section::Offset: read::UnwindOffset<usize>,
+ {
+ let mut cie = CommonInformationEntry::new(
+ from_cie.encoding(),
+ from_cie.code_alignment_factor() as u8,
+ from_cie.data_alignment_factor() as i8,
+ from_cie.return_address_register(),
+ );
+
+ cie.personality = match from_cie.personality_with_encoding() {
+ // We treat these the same because the encoding already determines
+ // whether it is indirect.
+ Some((eh_pe, read::Pointer::Direct(p)))
+ | Some((eh_pe, read::Pointer::Indirect(p))) => {
+ let address = convert_address(p).ok_or(ConvertError::InvalidAddress)?;
+ Some((eh_pe, address))
+ }
+ _ => None,
+ };
+ cie.lsda_encoding = from_cie.lsda_encoding();
+ cie.fde_address_encoding = from_cie
+ .fde_address_encoding()
+ .unwrap_or(constants::DW_EH_PE_absptr);
+ cie.signal_trampoline = from_cie.is_signal_trampoline();
+
+ let mut offset = 0;
+ let mut from_instructions = from_cie.instructions(frame, bases);
+ while let Some(from_instruction) = from_instructions.next()? {
+ if let Some(instruction) = CallFrameInstruction::from(
+ from_instruction,
+ from_cie,
+ convert_address,
+ &mut offset,
+ )? {
+ cie.instructions.push(instruction);
+ }
+ }
+ Ok(cie)
+ }
+ }
+
+ impl FrameDescriptionEntry {
+ fn from<R, Section>(
+ from_fde: &read::FrameDescriptionEntry<R>,
+ frame: &Section,
+ bases: &read::BaseAddresses,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<FrameDescriptionEntry>
+ where
+ R: Reader<Offset = usize>,
+ Section: read::UnwindSection<R>,
+ Section::Offset: read::UnwindOffset<usize>,
+ {
+ let address =
+ convert_address(from_fde.initial_address()).ok_or(ConvertError::InvalidAddress)?;
+ let length = from_fde.len() as u32;
+ let mut fde = FrameDescriptionEntry::new(address, length);
+
+ match from_fde.lsda() {
+ // We treat these the same because the encoding already determines
+ // whether it is indirect.
+ Some(read::Pointer::Direct(p)) | Some(read::Pointer::Indirect(p)) => {
+ let address = convert_address(p).ok_or(ConvertError::InvalidAddress)?;
+ fde.lsda = Some(address);
+ }
+ None => {}
+ }
+
+ let from_cie = from_fde.cie();
+ let mut offset = 0;
+ let mut from_instructions = from_fde.instructions(frame, bases);
+ while let Some(from_instruction) = from_instructions.next()? {
+ if let Some(instruction) = CallFrameInstruction::from(
+ from_instruction,
+ from_cie,
+ convert_address,
+ &mut offset,
+ )? {
+ fde.instructions.push((offset, instruction));
+ }
+ }
+
+ Ok(fde)
+ }
+ }
+
+ impl CallFrameInstruction {
+ fn from<R: Reader<Offset = usize>>(
+ from_instruction: read::CallFrameInstruction<R>,
+ from_cie: &read::CommonInformationEntry<R>,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ offset: &mut u32,
+ ) -> ConvertResult<Option<CallFrameInstruction>> {
+ let convert_expression =
+ |x| Expression::from(x, from_cie.encoding(), None, None, None, convert_address);
+ // TODO: validate integer type conversions
+ Ok(Some(match from_instruction {
+ read::CallFrameInstruction::SetLoc { .. } => {
+ return Err(ConvertError::UnsupportedCfiInstruction);
+ }
+ read::CallFrameInstruction::AdvanceLoc { delta } => {
+ *offset += delta * from_cie.code_alignment_factor() as u32;
+ return Ok(None);
+ }
+ read::CallFrameInstruction::DefCfa { register, offset } => {
+ CallFrameInstruction::Cfa(register, offset as i32)
+ }
+ read::CallFrameInstruction::DefCfaSf {
+ register,
+ factored_offset,
+ } => {
+ let offset = factored_offset * from_cie.data_alignment_factor();
+ CallFrameInstruction::Cfa(register, offset as i32)
+ }
+ read::CallFrameInstruction::DefCfaRegister { register } => {
+ CallFrameInstruction::CfaRegister(register)
+ }
+
+ read::CallFrameInstruction::DefCfaOffset { offset } => {
+ CallFrameInstruction::CfaOffset(offset as i32)
+ }
+ read::CallFrameInstruction::DefCfaOffsetSf { factored_offset } => {
+ let offset = factored_offset * from_cie.data_alignment_factor();
+ CallFrameInstruction::CfaOffset(offset as i32)
+ }
+ read::CallFrameInstruction::DefCfaExpression { expression } => {
+ CallFrameInstruction::CfaExpression(convert_expression(expression)?)
+ }
+ read::CallFrameInstruction::Undefined { register } => {
+ CallFrameInstruction::Undefined(register)
+ }
+ read::CallFrameInstruction::SameValue { register } => {
+ CallFrameInstruction::SameValue(register)
+ }
+ read::CallFrameInstruction::Offset {
+ register,
+ factored_offset,
+ } => {
+ let offset = factored_offset as i64 * from_cie.data_alignment_factor();
+ CallFrameInstruction::Offset(register, offset as i32)
+ }
+ read::CallFrameInstruction::OffsetExtendedSf {
+ register,
+ factored_offset,
+ } => {
+ let offset = factored_offset * from_cie.data_alignment_factor();
+ CallFrameInstruction::Offset(register, offset as i32)
+ }
+ read::CallFrameInstruction::ValOffset {
+ register,
+ factored_offset,
+ } => {
+ let offset = factored_offset as i64 * from_cie.data_alignment_factor();
+ CallFrameInstruction::ValOffset(register, offset as i32)
+ }
+ read::CallFrameInstruction::ValOffsetSf {
+ register,
+ factored_offset,
+ } => {
+ let offset = factored_offset * from_cie.data_alignment_factor();
+ CallFrameInstruction::ValOffset(register, offset as i32)
+ }
+ read::CallFrameInstruction::Register {
+ dest_register,
+ src_register,
+ } => CallFrameInstruction::Register(dest_register, src_register),
+ read::CallFrameInstruction::Expression {
+ register,
+ expression,
+ } => CallFrameInstruction::Expression(register, convert_expression(expression)?),
+ read::CallFrameInstruction::ValExpression {
+ register,
+ expression,
+ } => CallFrameInstruction::ValExpression(register, convert_expression(expression)?),
+ read::CallFrameInstruction::Restore { register } => {
+ CallFrameInstruction::Restore(register)
+ }
+ read::CallFrameInstruction::RememberState => CallFrameInstruction::RememberState,
+ read::CallFrameInstruction::RestoreState => CallFrameInstruction::RestoreState,
+ read::CallFrameInstruction::ArgsSize { size } => {
+ CallFrameInstruction::ArgsSize(size as u32)
+ }
+ read::CallFrameInstruction::Nop => return Ok(None),
+ }))
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::arch::X86_64;
+ use crate::read;
+ use crate::write::EndianVec;
+ use crate::LittleEndian;
+
+ #[test]
+ fn test_frame_table() {
+ for &version in &[1, 3, 4] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let mut frames = FrameTable::default();
+
+ let cie1 = CommonInformationEntry::new(encoding, 1, 8, X86_64::RA);
+ let cie1_id = frames.add_cie(cie1.clone());
+ assert_eq!(cie1_id, frames.add_cie(cie1.clone()));
+
+ let mut cie2 = CommonInformationEntry::new(encoding, 1, 8, X86_64::RA);
+ cie2.lsda_encoding = Some(constants::DW_EH_PE_absptr);
+ cie2.personality =
+ Some((constants::DW_EH_PE_absptr, Address::Constant(0x1234)));
+ cie2.signal_trampoline = true;
+ let cie2_id = frames.add_cie(cie2.clone());
+ assert_ne!(cie1_id, cie2_id);
+ assert_eq!(cie2_id, frames.add_cie(cie2.clone()));
+
+ let fde1 = FrameDescriptionEntry::new(Address::Constant(0x1000), 0x10);
+ frames.add_fde(cie1_id, fde1.clone());
+
+ let fde2 = FrameDescriptionEntry::new(Address::Constant(0x2000), 0x20);
+ frames.add_fde(cie1_id, fde2.clone());
+
+ let mut fde3 = FrameDescriptionEntry::new(Address::Constant(0x3000), 0x30);
+ fde3.lsda = Some(Address::Constant(0x3300));
+ frames.add_fde(cie2_id, fde3.clone());
+
+ let mut fde4 = FrameDescriptionEntry::new(Address::Constant(0x4000), 0x40);
+ fde4.lsda = Some(Address::Constant(0x4400));
+ frames.add_fde(cie2_id, fde4.clone());
+
+ let mut cie3 = CommonInformationEntry::new(encoding, 1, 8, X86_64::RA);
+ cie3.fde_address_encoding = constants::DW_EH_PE_pcrel;
+ cie3.lsda_encoding = Some(constants::DW_EH_PE_pcrel);
+ cie3.personality = Some((constants::DW_EH_PE_pcrel, Address::Constant(0x1235)));
+ cie3.signal_trampoline = true;
+ let cie3_id = frames.add_cie(cie3.clone());
+ assert_ne!(cie2_id, cie3_id);
+ assert_eq!(cie3_id, frames.add_cie(cie3.clone()));
+
+ let mut fde5 = FrameDescriptionEntry::new(Address::Constant(0x5000), 0x50);
+ fde5.lsda = Some(Address::Constant(0x5500));
+ frames.add_fde(cie3_id, fde5.clone());
+
+ // Test writing `.debug_frame`.
+ let mut debug_frame = DebugFrame::from(EndianVec::new(LittleEndian));
+ frames.write_debug_frame(&mut debug_frame).unwrap();
+
+ let mut read_debug_frame =
+ read::DebugFrame::new(debug_frame.slice(), LittleEndian);
+ read_debug_frame.set_address_size(address_size);
+ let convert_frames = FrameTable::from(&read_debug_frame, &|address| {
+ Some(Address::Constant(address))
+ })
+ .unwrap();
+ assert_eq!(frames.cies, convert_frames.cies);
+ assert_eq!(frames.fdes.len(), convert_frames.fdes.len());
+ for (a, b) in frames.fdes.iter().zip(convert_frames.fdes.iter()) {
+ assert_eq!(a.1, b.1);
+ }
+
+ if version == 1 {
+ // Test writing `.eh_frame`.
+ let mut eh_frame = EhFrame::from(EndianVec::new(LittleEndian));
+ frames.write_eh_frame(&mut eh_frame).unwrap();
+
+ let mut read_eh_frame = read::EhFrame::new(eh_frame.slice(), LittleEndian);
+ read_eh_frame.set_address_size(address_size);
+ let convert_frames = FrameTable::from(&read_eh_frame, &|address| {
+ Some(Address::Constant(address))
+ })
+ .unwrap();
+ assert_eq!(frames.cies, convert_frames.cies);
+ assert_eq!(frames.fdes.len(), convert_frames.fdes.len());
+ for (a, b) in frames.fdes.iter().zip(convert_frames.fdes.iter()) {
+ assert_eq!(a.1, b.1);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_frame_instruction() {
+ let mut expression = Expression::new();
+ expression.op_constu(0);
+
+ let cie_instructions = [
+ CallFrameInstruction::Cfa(X86_64::RSP, 8),
+ CallFrameInstruction::Offset(X86_64::RA, -8),
+ ];
+
+ let fde_instructions = [
+ (0, CallFrameInstruction::Cfa(X86_64::RSP, 0)),
+ (0, CallFrameInstruction::Cfa(X86_64::RSP, -8)),
+ (2, CallFrameInstruction::CfaRegister(X86_64::RBP)),
+ (4, CallFrameInstruction::CfaOffset(8)),
+ (4, CallFrameInstruction::CfaOffset(0)),
+ (4, CallFrameInstruction::CfaOffset(-8)),
+ (6, CallFrameInstruction::CfaExpression(expression.clone())),
+ (8, CallFrameInstruction::Restore(Register(1))),
+ (8, CallFrameInstruction::Restore(Register(101))),
+ (10, CallFrameInstruction::Undefined(Register(2))),
+ (12, CallFrameInstruction::SameValue(Register(3))),
+ (14, CallFrameInstruction::Offset(Register(4), 16)),
+ (14, CallFrameInstruction::Offset(Register(104), 16)),
+ (16, CallFrameInstruction::ValOffset(Register(5), -24)),
+ (16, CallFrameInstruction::ValOffset(Register(5), 24)),
+ (18, CallFrameInstruction::Register(Register(6), Register(7))),
+ (
+ 20,
+ CallFrameInstruction::Expression(Register(8), expression.clone()),
+ ),
+ (
+ 22,
+ CallFrameInstruction::ValExpression(Register(9), expression.clone()),
+ ),
+ (24 + 0x80, CallFrameInstruction::RememberState),
+ (26 + 0x280, CallFrameInstruction::RestoreState),
+ (28 + 0x20280, CallFrameInstruction::ArgsSize(23)),
+ ];
+
+ for &version in &[1, 3, 4] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let mut frames = FrameTable::default();
+
+ let mut cie = CommonInformationEntry::new(encoding, 2, 8, X86_64::RA);
+ for i in &cie_instructions {
+ cie.add_instruction(i.clone());
+ }
+ let cie_id = frames.add_cie(cie);
+
+ let mut fde = FrameDescriptionEntry::new(Address::Constant(0x1000), 0x10);
+ for (o, i) in &fde_instructions {
+ fde.add_instruction(*o, i.clone());
+ }
+ frames.add_fde(cie_id, fde);
+
+ let mut debug_frame = DebugFrame::from(EndianVec::new(LittleEndian));
+ frames.write_debug_frame(&mut debug_frame).unwrap();
+
+ let mut read_debug_frame =
+ read::DebugFrame::new(debug_frame.slice(), LittleEndian);
+ read_debug_frame.set_address_size(address_size);
+ let frames = FrameTable::from(&read_debug_frame, &|address| {
+ Some(Address::Constant(address))
+ })
+ .unwrap();
+
+ assert_eq!(
+ &frames.cies.get_index(0).unwrap().instructions,
+ &cie_instructions
+ );
+ assert_eq!(&frames.fdes[0].1.instructions, &fde_instructions);
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/dwarf.rs b/vendor/gimli-0.26.2/src/write/dwarf.rs
new file mode 100644
index 000000000..ea507126a
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/dwarf.rs
@@ -0,0 +1,138 @@
+use alloc::vec::Vec;
+
+use crate::common::Encoding;
+use crate::write::{
+ AbbreviationTable, LineProgram, LineStringTable, Result, Sections, StringTable, Unit,
+ UnitTable, Writer,
+};
+
+/// Writable DWARF information for more than one unit.
+#[derive(Debug, Default)]
+pub struct Dwarf {
+ /// A table of units. These are primarily stored in the `.debug_info` section,
+ /// but they also contain information that is stored in other sections.
+ pub units: UnitTable,
+
+ /// Extra line number programs that are not associated with a unit.
+ ///
+ /// These should only be used when generating DWARF5 line-only debug
+ /// information.
+ pub line_programs: Vec<LineProgram>,
+
+ /// A table of strings that will be stored in the `.debug_line_str` section.
+ pub line_strings: LineStringTable,
+
+ /// A table of strings that will be stored in the `.debug_str` section.
+ pub strings: StringTable,
+}
+
+impl Dwarf {
+ /// Create a new `Dwarf` instance.
+ #[inline]
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Write the DWARF information to the given sections.
+ pub fn write<W: Writer>(&mut self, sections: &mut Sections<W>) -> Result<()> {
+ let line_strings = self.line_strings.write(&mut sections.debug_line_str)?;
+ let strings = self.strings.write(&mut sections.debug_str)?;
+ self.units.write(sections, &line_strings, &strings)?;
+ for line_program in &self.line_programs {
+ line_program.write(
+ &mut sections.debug_line,
+ line_program.encoding(),
+ &line_strings,
+ &strings,
+ )?;
+ }
+ Ok(())
+ }
+}
+
+/// Writable DWARF information for a single unit.
+#[derive(Debug)]
+pub struct DwarfUnit {
+ /// A unit. This is primarily stored in the `.debug_info` section,
+ /// but also contains information that is stored in other sections.
+ pub unit: Unit,
+
+ /// A table of strings that will be stored in the `.debug_line_str` section.
+ pub line_strings: LineStringTable,
+
+ /// A table of strings that will be stored in the `.debug_str` section.
+ pub strings: StringTable,
+}
+
+impl DwarfUnit {
+ /// Create a new `DwarfUnit`.
+ ///
+ /// Note: you should set `self.unit.line_program` after creation.
+ /// This cannot be done earlier because it may need to reference
+ /// `self.line_strings`.
+ pub fn new(encoding: Encoding) -> Self {
+ let unit = Unit::new(encoding, LineProgram::none());
+ DwarfUnit {
+ unit,
+ line_strings: LineStringTable::default(),
+ strings: StringTable::default(),
+ }
+ }
+
+ /// Write the DWARf information to the given sections.
+ pub fn write<W: Writer>(&mut self, sections: &mut Sections<W>) -> Result<()> {
+ let line_strings = self.line_strings.write(&mut sections.debug_line_str)?;
+ let strings = self.strings.write(&mut sections.debug_str)?;
+
+ let abbrev_offset = sections.debug_abbrev.offset();
+ let mut abbrevs = AbbreviationTable::default();
+
+ self.unit.write(
+ sections,
+ abbrev_offset,
+ &mut abbrevs,
+ &line_strings,
+ &strings,
+ )?;
+ // None should exist because we didn't give out any UnitId.
+ assert!(sections.debug_info_refs.is_empty());
+ assert!(sections.debug_loc_refs.is_empty());
+ assert!(sections.debug_loclists_refs.is_empty());
+
+ abbrevs.write(&mut sections.debug_abbrev)?;
+ Ok(())
+ }
+}
+
+#[cfg(feature = "read")]
+pub(crate) mod convert {
+ use super::*;
+ use crate::read::{self, Reader};
+ use crate::write::{Address, ConvertResult};
+
+ impl Dwarf {
+ /// Create a `write::Dwarf` by converting a `read::Dwarf`.
+ ///
+ /// `convert_address` is a function to convert read addresses into the `Address`
+ /// type. For non-relocatable addresses, this function may simply return
+ /// `Address::Constant(address)`. For relocatable addresses, it is the caller's
+ /// responsibility to determine the symbol and addend corresponding to the address
+ /// and return `Address::Symbol { symbol, addend }`.
+ pub fn from<R: Reader<Offset = usize>>(
+ dwarf: &read::Dwarf<R>,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<Dwarf> {
+ let mut line_strings = LineStringTable::default();
+ let mut strings = StringTable::default();
+ let units = UnitTable::from(dwarf, &mut line_strings, &mut strings, convert_address)?;
+ // TODO: convert the line programs that were not referenced by a unit.
+ let line_programs = Vec::new();
+ Ok(Dwarf {
+ units,
+ line_programs,
+ line_strings,
+ strings,
+ })
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/endian_vec.rs b/vendor/gimli-0.26.2/src/write/endian_vec.rs
new file mode 100644
index 000000000..7b040606a
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/endian_vec.rs
@@ -0,0 +1,117 @@
+use alloc::vec::Vec;
+use std::mem;
+
+use crate::endianity::Endianity;
+use crate::write::{Error, Result, Writer};
+
+/// A `Vec<u8>` with endianity metadata.
+///
+/// This implements the `Writer` trait, which is used for all writing of DWARF sections.
+#[derive(Debug, Clone)]
+pub struct EndianVec<Endian>
+where
+ Endian: Endianity,
+{
+ vec: Vec<u8>,
+ endian: Endian,
+}
+
+impl<Endian> EndianVec<Endian>
+where
+ Endian: Endianity,
+{
+ /// Construct an empty `EndianVec` with the given endianity.
+ pub fn new(endian: Endian) -> EndianVec<Endian> {
+ EndianVec {
+ vec: Vec::new(),
+ endian,
+ }
+ }
+
+ /// Return a reference to the raw slice.
+ pub fn slice(&self) -> &[u8] {
+ &self.vec
+ }
+
+ /// Convert into a `Vec<u8>`.
+ pub fn into_vec(self) -> Vec<u8> {
+ self.vec
+ }
+
+ /// Take any written data out of the `EndianVec`, leaving an empty `Vec` in its place.
+ pub fn take(&mut self) -> Vec<u8> {
+ let mut vec = Vec::new();
+ mem::swap(&mut self.vec, &mut vec);
+ vec
+ }
+}
+
+impl<Endian> Writer for EndianVec<Endian>
+where
+ Endian: Endianity,
+{
+ type Endian = Endian;
+
+ #[inline]
+ fn endian(&self) -> Self::Endian {
+ self.endian
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.vec.len()
+ }
+
+ fn write(&mut self, bytes: &[u8]) -> Result<()> {
+ self.vec.extend(bytes);
+ Ok(())
+ }
+
+ fn write_at(&mut self, offset: usize, bytes: &[u8]) -> Result<()> {
+ if offset > self.vec.len() {
+ return Err(Error::OffsetOutOfBounds);
+ }
+ let to = &mut self.vec[offset..];
+ if bytes.len() > to.len() {
+ return Err(Error::LengthOutOfBounds);
+ }
+ let to = &mut to[..bytes.len()];
+ to.copy_from_slice(bytes);
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::LittleEndian;
+
+ #[test]
+ fn test_endian_vec() {
+ let mut w = EndianVec::new(LittleEndian);
+ assert_eq!(w.endian(), LittleEndian);
+ assert_eq!(w.len(), 0);
+
+ w.write(&[1, 2]).unwrap();
+ assert_eq!(w.slice(), &[1, 2]);
+ assert_eq!(w.len(), 2);
+
+ w.write(&[3, 4, 5]).unwrap();
+ assert_eq!(w.slice(), &[1, 2, 3, 4, 5]);
+ assert_eq!(w.len(), 5);
+
+ w.write_at(0, &[6, 7]).unwrap();
+ assert_eq!(w.slice(), &[6, 7, 3, 4, 5]);
+ assert_eq!(w.len(), 5);
+
+ w.write_at(3, &[8, 9]).unwrap();
+ assert_eq!(w.slice(), &[6, 7, 3, 8, 9]);
+ assert_eq!(w.len(), 5);
+
+ assert_eq!(w.write_at(4, &[6, 7]), Err(Error::LengthOutOfBounds));
+ assert_eq!(w.write_at(5, &[6, 7]), Err(Error::LengthOutOfBounds));
+ assert_eq!(w.write_at(6, &[6, 7]), Err(Error::OffsetOutOfBounds));
+
+ assert_eq!(w.into_vec(), vec![6, 7, 3, 8, 9]);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/line.rs b/vendor/gimli-0.26.2/src/write/line.rs
new file mode 100644
index 000000000..310170d9a
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/line.rs
@@ -0,0 +1,1960 @@
+use alloc::vec::Vec;
+use indexmap::{IndexMap, IndexSet};
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{DebugLineOffset, Encoding, Format, LineEncoding, SectionId};
+use crate::constants;
+use crate::leb128;
+use crate::write::{
+ Address, DebugLineStrOffsets, DebugStrOffsets, Error, LineStringId, LineStringTable, Result,
+ Section, StringId, Writer,
+};
+
+/// The number assigned to the first special opcode.
+//
+// We output all instructions for all DWARF versions, since readers
+// should be able to ignore instructions they don't support.
+const OPCODE_BASE: u8 = 13;
+
+/// A line number program.
+#[derive(Debug, Clone)]
+pub struct LineProgram {
+ /// True if this line program was created with `LineProgram::none()`.
+ none: bool,
+ encoding: Encoding,
+ line_encoding: LineEncoding,
+
+ /// A list of source directory path names.
+ ///
+ /// If a path is relative, then the directory is located relative to the working
+ /// directory of the compilation unit.
+ ///
+ /// The first entry is for the working directory of the compilation unit.
+ directories: IndexSet<LineString>,
+
+ /// A list of source file entries.
+ ///
+ /// Each entry has a path name and a directory.
+ ///
+ /// If a path is a relative, then the file is located relative to the
+ /// directory. Otherwise the directory is meaningless.
+ ///
+ /// Does not include comp_file, even for version >= 5.
+ files: IndexMap<(LineString, DirectoryId), FileInfo>,
+
+ /// The primary source file of the compilation unit.
+ /// This is required for version >= 5, but we never reference it elsewhere
+ /// because DWARF defines DW_AT_decl_file=0 to mean not specified.
+ comp_file: (LineString, FileInfo),
+
+ /// True if the file entries may have valid timestamps.
+ ///
+ /// Entries may still have a timestamp of 0 even if this is set.
+ /// For version <= 4, this is ignored.
+ /// For version 5, this controls whether to emit `DW_LNCT_timestamp`.
+ pub file_has_timestamp: bool,
+
+ /// True if the file entries may have valid sizes.
+ ///
+ /// Entries may still have a size of 0 even if this is set.
+ /// For version <= 4, this is ignored.
+ /// For version 5, this controls whether to emit `DW_LNCT_size`.
+ pub file_has_size: bool,
+
+ /// True if the file entries have valid MD5 checksums.
+ ///
+ /// For version <= 4, this is ignored.
+ /// For version 5, this controls whether to emit `DW_LNCT_MD5`.
+ pub file_has_md5: bool,
+
+ prev_row: LineRow,
+ row: LineRow,
+ // TODO: this probably should be either rows or sequences instead
+ instructions: Vec<LineInstruction>,
+ in_sequence: bool,
+}
+
+impl LineProgram {
+ /// Create a new `LineProgram`.
+ ///
+ /// `comp_dir` defines the working directory of the compilation unit,
+ /// and must be the same as the `DW_AT_comp_dir` attribute
+ /// of the compilation unit DIE.
+ ///
+ /// `comp_file` and `comp_file_info` define the primary source file
+ /// of the compilation unit and must be the same as the `DW_AT_name`
+ /// attribute of the compilation unit DIE.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `line_encoding.line_base` > 0.
+ ///
+ /// Panics if `line_encoding.line_base` + `line_encoding.line_range` <= 0.
+ ///
+ /// Panics if `comp_dir` is empty or contains a null byte.
+ ///
+ /// Panics if `comp_file` is empty or contains a null byte.
+ #[allow(clippy::too_many_arguments)]
+ #[allow(clippy::new_ret_no_self)]
+ pub fn new(
+ encoding: Encoding,
+ line_encoding: LineEncoding,
+ comp_dir: LineString,
+ comp_file: LineString,
+ comp_file_info: Option<FileInfo>,
+ ) -> LineProgram {
+ // We require a special opcode for a line advance of 0.
+ // See the debug_asserts in generate_row().
+ assert!(line_encoding.line_base <= 0);
+ assert!(line_encoding.line_base + line_encoding.line_range as i8 > 0);
+ let mut program = LineProgram {
+ none: false,
+ encoding,
+ line_encoding,
+ directories: IndexSet::new(),
+ files: IndexMap::new(),
+ comp_file: (comp_file, comp_file_info.unwrap_or_default()),
+ prev_row: LineRow::initial_state(line_encoding),
+ row: LineRow::initial_state(line_encoding),
+ instructions: Vec::new(),
+ in_sequence: false,
+ file_has_timestamp: false,
+ file_has_size: false,
+ file_has_md5: false,
+ };
+ // For all DWARF versions, directory index 0 is comp_dir.
+ // For version <= 4, the entry is implicit. We still add
+ // it here so that we use it, but we don't emit it.
+ program.add_directory(comp_dir);
+ program
+ }
+
+ /// Create a new `LineProgram` with no fields set.
+ ///
+ /// This can be used when the `LineProgram` will not be used.
+ ///
+ /// You should not attempt to add files or line instructions to
+ /// this line program, or write it to the `.debug_line` section.
+ pub fn none() -> Self {
+ let line_encoding = LineEncoding::default();
+ LineProgram {
+ none: true,
+ encoding: Encoding {
+ format: Format::Dwarf32,
+ version: 2,
+ address_size: 0,
+ },
+ line_encoding,
+ directories: IndexSet::new(),
+ files: IndexMap::new(),
+ comp_file: (LineString::String(Vec::new()), FileInfo::default()),
+ prev_row: LineRow::initial_state(line_encoding),
+ row: LineRow::initial_state(line_encoding),
+ instructions: Vec::new(),
+ in_sequence: false,
+ file_has_timestamp: false,
+ file_has_size: false,
+ file_has_md5: false,
+ }
+ }
+
+ /// Return true if this line program was created with `LineProgram::none()`.
+ #[inline]
+ pub fn is_none(&self) -> bool {
+ self.none
+ }
+
+ /// Return the encoding parameters for this line program.
+ #[inline]
+ pub fn encoding(&self) -> Encoding {
+ self.encoding
+ }
+
+ /// Return the DWARF version for this line program.
+ #[inline]
+ pub fn version(&self) -> u16 {
+ self.encoding.version
+ }
+
+ /// Return the address size in bytes for this line program.
+ #[inline]
+ pub fn address_size(&self) -> u8 {
+ self.encoding.address_size
+ }
+
+ /// Return the DWARF format for this line program.
+ #[inline]
+ pub fn format(&self) -> Format {
+ self.encoding.format
+ }
+
+ /// Return the id for the working directory of the compilation unit.
+ #[inline]
+ pub fn default_directory(&self) -> DirectoryId {
+ DirectoryId(0)
+ }
+
+ /// Add a directory entry and return its id.
+ ///
+ /// If the directory already exists, then return the id of the existing entry.
+ ///
+ /// If the path is relative, then the directory is located relative to the working
+ /// directory of the compilation unit.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `directory` is empty or contains a null byte.
+ pub fn add_directory(&mut self, directory: LineString) -> DirectoryId {
+ if let LineString::String(ref val) = directory {
+ // For DWARF version <= 4, directories must not be empty.
+ // The first directory isn't emitted so skip the check for it.
+ if self.encoding.version <= 4 && !self.directories.is_empty() {
+ assert!(!val.is_empty());
+ }
+ assert!(!val.contains(&0));
+ }
+ let (index, _) = self.directories.insert_full(directory);
+ DirectoryId(index)
+ }
+
+ /// Get a reference to a directory entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ pub fn get_directory(&self, id: DirectoryId) -> &LineString {
+ self.directories.get_index(id.0).unwrap()
+ }
+
+ /// Add a file entry and return its id.
+ ///
+ /// If the file already exists, then return the id of the existing entry.
+ ///
+ /// If the file path is relative, then the file is located relative
+ /// to the directory. Otherwise the directory is meaningless, but it
+ /// is still used as a key for file entries.
+ ///
+ /// If `info` is `None`, then new entries are assigned
+ /// default information, and existing entries are unmodified.
+ ///
+ /// If `info` is not `None`, then it is always assigned to the
+ /// entry, even if the entry already exists.
+ ///
+ /// # Panics
+ ///
+ /// Panics if 'file' is empty or contains a null byte.
+ pub fn add_file(
+ &mut self,
+ file: LineString,
+ directory: DirectoryId,
+ info: Option<FileInfo>,
+ ) -> FileId {
+ if let LineString::String(ref val) = file {
+ assert!(!val.is_empty());
+ assert!(!val.contains(&0));
+ }
+
+ let key = (file, directory);
+ let index = if let Some(info) = info {
+ let (index, _) = self.files.insert_full(key, info);
+ index
+ } else {
+ let entry = self.files.entry(key);
+ let index = entry.index();
+ entry.or_insert(FileInfo::default());
+ index
+ };
+ FileId::new(index)
+ }
+
+ /// Get a reference to a file entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ pub fn get_file(&self, id: FileId) -> (&LineString, DirectoryId) {
+ match id.index() {
+ None => (&self.comp_file.0, DirectoryId(0)),
+ Some(index) => self
+ .files
+ .get_index(index)
+ .map(|entry| (&(entry.0).0, (entry.0).1))
+ .unwrap(),
+ }
+ }
+
+ /// Get a reference to the info for a file entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ pub fn get_file_info(&self, id: FileId) -> &FileInfo {
+ match id.index() {
+ None => &self.comp_file.1,
+ Some(index) => self.files.get_index(index).map(|entry| entry.1).unwrap(),
+ }
+ }
+
+ /// Get a mutable reference to the info for a file entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ pub fn get_file_info_mut(&mut self, id: FileId) -> &mut FileInfo {
+ match id.index() {
+ None => &mut self.comp_file.1,
+ Some(index) => self
+ .files
+ .get_index_mut(index)
+ .map(|entry| entry.1)
+ .unwrap(),
+ }
+ }
+
+ /// Begin a new sequence and set its base address.
+ ///
+ /// # Panics
+ ///
+ /// Panics if a sequence has already begun.
+ pub fn begin_sequence(&mut self, address: Option<Address>) {
+ assert!(!self.in_sequence);
+ self.in_sequence = true;
+ if let Some(address) = address {
+ self.instructions.push(LineInstruction::SetAddress(address));
+ }
+ }
+
+ /// End the sequence, and reset the row to its default values.
+ ///
+ /// Only the `address_offset` and op_index` fields of the current row are used.
+ ///
+ /// # Panics
+ ///
+ /// Panics if a sequence has not begun.
+ pub fn end_sequence(&mut self, address_offset: u64) {
+ assert!(self.in_sequence);
+ self.in_sequence = false;
+ self.row.address_offset = address_offset;
+ let op_advance = self.op_advance();
+ if op_advance != 0 {
+ self.instructions
+ .push(LineInstruction::AdvancePc(op_advance));
+ }
+ self.instructions.push(LineInstruction::EndSequence);
+ self.prev_row = LineRow::initial_state(self.line_encoding);
+ self.row = LineRow::initial_state(self.line_encoding);
+ }
+
+ /// Return true if a sequence has begun.
+ #[inline]
+ pub fn in_sequence(&self) -> bool {
+ self.in_sequence
+ }
+
+ /// Returns a reference to the data for the current row.
+ #[inline]
+ pub fn row(&mut self) -> &mut LineRow {
+ &mut self.row
+ }
+
+ /// Generates the line number information instructions for the current row.
+ ///
+ /// After the instructions are generated, it sets `discriminator` to 0, and sets
+ /// `basic_block`, `prologue_end`, and `epilogue_begin` to false.
+ ///
+ /// # Panics
+ ///
+ /// Panics if a sequence has not begun.
+ /// Panics if the address_offset decreases.
+ pub fn generate_row(&mut self) {
+ assert!(self.in_sequence);
+
+ // Output fields that are reset on every row.
+ if self.row.discriminator != 0 {
+ self.instructions
+ .push(LineInstruction::SetDiscriminator(self.row.discriminator));
+ self.row.discriminator = 0;
+ }
+ if self.row.basic_block {
+ self.instructions.push(LineInstruction::SetBasicBlock);
+ self.row.basic_block = false;
+ }
+ if self.row.prologue_end {
+ self.instructions.push(LineInstruction::SetPrologueEnd);
+ self.row.prologue_end = false;
+ }
+ if self.row.epilogue_begin {
+ self.instructions.push(LineInstruction::SetEpilogueBegin);
+ self.row.epilogue_begin = false;
+ }
+
+ // Output fields that are not reset on every row.
+ if self.row.is_statement != self.prev_row.is_statement {
+ self.instructions.push(LineInstruction::NegateStatement);
+ }
+ if self.row.file != self.prev_row.file {
+ self.instructions
+ .push(LineInstruction::SetFile(self.row.file));
+ }
+ if self.row.column != self.prev_row.column {
+ self.instructions
+ .push(LineInstruction::SetColumn(self.row.column));
+ }
+ if self.row.isa != self.prev_row.isa {
+ self.instructions
+ .push(LineInstruction::SetIsa(self.row.isa));
+ }
+
+ // Advance the line, address, and operation index.
+ let line_base = i64::from(self.line_encoding.line_base) as u64;
+ let line_range = u64::from(self.line_encoding.line_range);
+ let line_advance = self.row.line as i64 - self.prev_row.line as i64;
+ let op_advance = self.op_advance();
+
+ // Default to special advances of 0.
+ let special_base = u64::from(OPCODE_BASE);
+ // TODO: handle lack of special opcodes for 0 line advance
+ debug_assert!(self.line_encoding.line_base <= 0);
+ debug_assert!(self.line_encoding.line_base + self.line_encoding.line_range as i8 >= 0);
+ let special_default = special_base.wrapping_sub(line_base);
+ let mut special = special_default;
+ let mut use_special = false;
+
+ if line_advance != 0 {
+ let special_line = (line_advance as u64).wrapping_sub(line_base);
+ if special_line < line_range {
+ special = special_base + special_line;
+ use_special = true;
+ } else {
+ self.instructions
+ .push(LineInstruction::AdvanceLine(line_advance));
+ }
+ }
+
+ if op_advance != 0 {
+ // Using ConstAddPc can save a byte.
+ let (special_op_advance, const_add_pc) = if special + op_advance * line_range <= 255 {
+ (op_advance, false)
+ } else {
+ let op_range = (255 - special_base) / line_range;
+ (op_advance - op_range, true)
+ };
+
+ let special_op = special_op_advance * line_range;
+ if special + special_op <= 255 {
+ special += special_op;
+ use_special = true;
+ if const_add_pc {
+ self.instructions.push(LineInstruction::ConstAddPc);
+ }
+ } else {
+ self.instructions
+ .push(LineInstruction::AdvancePc(op_advance));
+ }
+ }
+
+ if use_special && special != special_default {
+ debug_assert!(special >= special_base);
+ debug_assert!(special <= 255);
+ self.instructions
+ .push(LineInstruction::Special(special as u8));
+ } else {
+ self.instructions.push(LineInstruction::Copy);
+ }
+
+ self.prev_row = self.row;
+ }
+
+ fn op_advance(&self) -> u64 {
+ debug_assert!(self.row.address_offset >= self.prev_row.address_offset);
+ let mut address_advance = self.row.address_offset - self.prev_row.address_offset;
+ if self.line_encoding.minimum_instruction_length != 1 {
+ debug_assert_eq!(
+ self.row.address_offset % u64::from(self.line_encoding.minimum_instruction_length),
+ 0
+ );
+ address_advance /= u64::from(self.line_encoding.minimum_instruction_length);
+ }
+ address_advance * u64::from(self.line_encoding.maximum_operations_per_instruction)
+ + self.row.op_index
+ - self.prev_row.op_index
+ }
+
+ /// Returns true if the line number program has no instructions.
+ ///
+ /// Does not check the file or directory entries.
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.instructions.is_empty()
+ }
+
+ /// Write the line number program to the given section.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `self.is_none()`.
+ pub fn write<W: Writer>(
+ &self,
+ w: &mut DebugLine<W>,
+ encoding: Encoding,
+ debug_line_str_offsets: &DebugLineStrOffsets,
+ debug_str_offsets: &DebugStrOffsets,
+ ) -> Result<DebugLineOffset> {
+ assert!(!self.is_none());
+
+ if encoding.version < self.version()
+ || encoding.format != self.format()
+ || encoding.address_size != self.address_size()
+ {
+ return Err(Error::IncompatibleLineProgramEncoding);
+ }
+
+ let offset = w.offset();
+
+ let length_offset = w.write_initial_length(self.format())?;
+ let length_base = w.len();
+
+ if self.version() < 2 || self.version() > 5 {
+ return Err(Error::UnsupportedVersion(self.version()));
+ }
+ w.write_u16(self.version())?;
+
+ if self.version() >= 5 {
+ w.write_u8(self.address_size())?;
+ // Segment selector size.
+ w.write_u8(0)?;
+ }
+
+ let header_length_offset = w.len();
+ w.write_udata(0, self.format().word_size())?;
+ let header_length_base = w.len();
+
+ w.write_u8(self.line_encoding.minimum_instruction_length)?;
+ if self.version() >= 4 {
+ w.write_u8(self.line_encoding.maximum_operations_per_instruction)?;
+ } else if self.line_encoding.maximum_operations_per_instruction != 1 {
+ return Err(Error::NeedVersion(4));
+ };
+ w.write_u8(if self.line_encoding.default_is_stmt {
+ 1
+ } else {
+ 0
+ })?;
+ w.write_u8(self.line_encoding.line_base as u8)?;
+ w.write_u8(self.line_encoding.line_range)?;
+ w.write_u8(OPCODE_BASE)?;
+ w.write(&[0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1])?;
+
+ if self.version() <= 4 {
+ // The first directory is stored as DW_AT_comp_dir.
+ for dir in self.directories.iter().skip(1) {
+ dir.write(
+ w,
+ constants::DW_FORM_string,
+ self.encoding,
+ debug_line_str_offsets,
+ debug_str_offsets,
+ )?;
+ }
+ w.write_u8(0)?;
+
+ for ((file, dir), info) in self.files.iter() {
+ file.write(
+ w,
+ constants::DW_FORM_string,
+ self.encoding,
+ debug_line_str_offsets,
+ debug_str_offsets,
+ )?;
+ w.write_uleb128(dir.0 as u64)?;
+ w.write_uleb128(info.timestamp)?;
+ w.write_uleb128(info.size)?;
+ }
+ w.write_u8(0)?;
+ } else {
+ // Directory entry formats (only ever 1).
+ w.write_u8(1)?;
+ w.write_uleb128(u64::from(constants::DW_LNCT_path.0))?;
+ let dir_form = self.directories.get_index(0).unwrap().form();
+ w.write_uleb128(dir_form.0.into())?;
+
+ // Directory entries.
+ w.write_uleb128(self.directories.len() as u64)?;
+ for dir in self.directories.iter() {
+ dir.write(
+ w,
+ dir_form,
+ self.encoding,
+ debug_line_str_offsets,
+ debug_str_offsets,
+ )?;
+ }
+
+ // File name entry formats.
+ let count = 2
+ + if self.file_has_timestamp { 1 } else { 0 }
+ + if self.file_has_size { 1 } else { 0 }
+ + if self.file_has_md5 { 1 } else { 0 };
+ w.write_u8(count)?;
+ w.write_uleb128(u64::from(constants::DW_LNCT_path.0))?;
+ let file_form = self.comp_file.0.form();
+ w.write_uleb128(file_form.0.into())?;
+ w.write_uleb128(u64::from(constants::DW_LNCT_directory_index.0))?;
+ w.write_uleb128(constants::DW_FORM_udata.0.into())?;
+ if self.file_has_timestamp {
+ w.write_uleb128(u64::from(constants::DW_LNCT_timestamp.0))?;
+ w.write_uleb128(constants::DW_FORM_udata.0.into())?;
+ }
+ if self.file_has_size {
+ w.write_uleb128(u64::from(constants::DW_LNCT_size.0))?;
+ w.write_uleb128(constants::DW_FORM_udata.0.into())?;
+ }
+ if self.file_has_md5 {
+ w.write_uleb128(u64::from(constants::DW_LNCT_MD5.0))?;
+ w.write_uleb128(constants::DW_FORM_data16.0.into())?;
+ }
+
+ // File name entries.
+ w.write_uleb128(self.files.len() as u64 + 1)?;
+ let mut write_file = |file: &LineString, dir: DirectoryId, info: &FileInfo| {
+ file.write(
+ w,
+ file_form,
+ self.encoding,
+ debug_line_str_offsets,
+ debug_str_offsets,
+ )?;
+ w.write_uleb128(dir.0 as u64)?;
+ if self.file_has_timestamp {
+ w.write_uleb128(info.timestamp)?;
+ }
+ if self.file_has_size {
+ w.write_uleb128(info.size)?;
+ }
+ if self.file_has_md5 {
+ w.write(&info.md5)?;
+ }
+ Ok(())
+ };
+ write_file(&self.comp_file.0, DirectoryId(0), &self.comp_file.1)?;
+ for ((file, dir), info) in self.files.iter() {
+ write_file(file, *dir, info)?;
+ }
+ }
+
+ let header_length = (w.len() - header_length_base) as u64;
+ w.write_udata_at(
+ header_length_offset,
+ header_length,
+ self.format().word_size(),
+ )?;
+
+ for instruction in &self.instructions {
+ instruction.write(w, self.address_size())?;
+ }
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, self.format())?;
+
+ Ok(offset)
+ }
+}
+
+/// A row in the line number table that corresponds to a machine instruction.
+#[derive(Debug, Clone, Copy)]
+pub struct LineRow {
+ /// The offset of the instruction from the start address of the sequence.
+ pub address_offset: u64,
+ /// The index of an operation within a VLIW instruction.
+ ///
+ /// The index of the first operation is 0.
+ /// Set to 0 for non-VLIW instructions.
+ pub op_index: u64,
+
+ /// The source file corresponding to the instruction.
+ pub file: FileId,
+ /// The line number within the source file.
+ ///
+ /// Lines are numbered beginning at 1. Set to 0 if there is no source line.
+ pub line: u64,
+ /// The column number within the source line.
+ ///
+ /// Columns are numbered beginning at 1. Set to 0 for the "left edge" of the line.
+ pub column: u64,
+ /// An additional discriminator used to distinguish between source locations.
+ /// This value is assigned arbitrarily by the DWARF producer.
+ pub discriminator: u64,
+
+ /// Set to true if the instruction is a recommended breakpoint for a statement.
+ pub is_statement: bool,
+ /// Set to true if the instruction is the beginning of a basic block.
+ pub basic_block: bool,
+ /// Set to true if the instruction is a recommended breakpoint at the entry of a
+ /// function.
+ pub prologue_end: bool,
+ /// Set to true if the instruction is a recommended breakpoint prior to the exit of
+ /// a function.
+ pub epilogue_begin: bool,
+
+ /// The instruction set architecture of the instruction.
+ ///
+ /// Set to 0 for the default ISA. Other values are defined by the architecture ABI.
+ pub isa: u64,
+}
+
+impl LineRow {
+ /// Return the initial state as specified in the DWARF standard.
+ fn initial_state(line_encoding: LineEncoding) -> Self {
+ LineRow {
+ address_offset: 0,
+ op_index: 0,
+
+ file: FileId::initial_state(),
+ line: 1,
+ column: 0,
+ discriminator: 0,
+
+ is_statement: line_encoding.default_is_stmt,
+ basic_block: false,
+ prologue_end: false,
+ epilogue_begin: false,
+
+ isa: 0,
+ }
+ }
+}
+
+/// An instruction in a line number program.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum LineInstruction {
+ // Special opcodes
+ Special(u8),
+
+ // Standard opcodes
+ Copy,
+ AdvancePc(u64),
+ AdvanceLine(i64),
+ SetFile(FileId),
+ SetColumn(u64),
+ NegateStatement,
+ SetBasicBlock,
+ ConstAddPc,
+ // DW_LNS_fixed_advance_pc is not supported.
+ SetPrologueEnd,
+ SetEpilogueBegin,
+ SetIsa(u64),
+
+ // Extended opcodes
+ EndSequence,
+ // TODO: this doubles the size of this enum.
+ SetAddress(Address),
+ // DW_LNE_define_file is not supported.
+ SetDiscriminator(u64),
+}
+
+impl LineInstruction {
+ /// Write the line number instruction to the given section.
+ fn write<W: Writer>(self, w: &mut DebugLine<W>, address_size: u8) -> Result<()> {
+ use self::LineInstruction::*;
+ match self {
+ Special(val) => w.write_u8(val)?,
+ Copy => w.write_u8(constants::DW_LNS_copy.0)?,
+ AdvancePc(val) => {
+ w.write_u8(constants::DW_LNS_advance_pc.0)?;
+ w.write_uleb128(val)?;
+ }
+ AdvanceLine(val) => {
+ w.write_u8(constants::DW_LNS_advance_line.0)?;
+ w.write_sleb128(val)?;
+ }
+ SetFile(val) => {
+ w.write_u8(constants::DW_LNS_set_file.0)?;
+ w.write_uleb128(val.raw())?;
+ }
+ SetColumn(val) => {
+ w.write_u8(constants::DW_LNS_set_column.0)?;
+ w.write_uleb128(val)?;
+ }
+ NegateStatement => w.write_u8(constants::DW_LNS_negate_stmt.0)?,
+ SetBasicBlock => w.write_u8(constants::DW_LNS_set_basic_block.0)?,
+ ConstAddPc => w.write_u8(constants::DW_LNS_const_add_pc.0)?,
+ SetPrologueEnd => w.write_u8(constants::DW_LNS_set_prologue_end.0)?,
+ SetEpilogueBegin => w.write_u8(constants::DW_LNS_set_epilogue_begin.0)?,
+ SetIsa(val) => {
+ w.write_u8(constants::DW_LNS_set_isa.0)?;
+ w.write_uleb128(val)?;
+ }
+ EndSequence => {
+ w.write_u8(0)?;
+ w.write_uleb128(1)?;
+ w.write_u8(constants::DW_LNE_end_sequence.0)?;
+ }
+ SetAddress(address) => {
+ w.write_u8(0)?;
+ w.write_uleb128(1 + u64::from(address_size))?;
+ w.write_u8(constants::DW_LNE_set_address.0)?;
+ w.write_address(address, address_size)?;
+ }
+ SetDiscriminator(val) => {
+ let mut bytes = [0u8; 10];
+ // bytes is long enough so this will never fail.
+ let len = leb128::write::unsigned(&mut { &mut bytes[..] }, val).unwrap();
+ w.write_u8(0)?;
+ w.write_uleb128(1 + len as u64)?;
+ w.write_u8(constants::DW_LNE_set_discriminator.0)?;
+ w.write(&bytes[..len])?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// A string value for use in defining paths in line number programs.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum LineString {
+ /// A slice of bytes representing a string. Must not include null bytes.
+ /// Not guaranteed to be UTF-8 or anything like that.
+ String(Vec<u8>),
+
+ /// A reference to a string in the `.debug_str` section.
+ StringRef(StringId),
+
+ /// A reference to a string in the `.debug_line_str` section.
+ LineStringRef(LineStringId),
+}
+
+impl LineString {
+ /// Create a `LineString` using the normal form for the given encoding.
+ pub fn new<T>(val: T, encoding: Encoding, line_strings: &mut LineStringTable) -> Self
+ where
+ T: Into<Vec<u8>>,
+ {
+ let val = val.into();
+ if encoding.version <= 4 {
+ LineString::String(val)
+ } else {
+ LineString::LineStringRef(line_strings.add(val))
+ }
+ }
+
+ fn form(&self) -> constants::DwForm {
+ match *self {
+ LineString::String(..) => constants::DW_FORM_string,
+ LineString::StringRef(..) => constants::DW_FORM_strp,
+ LineString::LineStringRef(..) => constants::DW_FORM_line_strp,
+ }
+ }
+
+ fn write<W: Writer>(
+ &self,
+ w: &mut DebugLine<W>,
+ form: constants::DwForm,
+ encoding: Encoding,
+ debug_line_str_offsets: &DebugLineStrOffsets,
+ debug_str_offsets: &DebugStrOffsets,
+ ) -> Result<()> {
+ if form != self.form() {
+ return Err(Error::LineStringFormMismatch);
+ }
+
+ match *self {
+ LineString::String(ref val) => {
+ if encoding.version <= 4 {
+ debug_assert!(!val.is_empty());
+ }
+ w.write(val)?;
+ w.write_u8(0)?;
+ }
+ LineString::StringRef(val) => {
+ if encoding.version < 5 {
+ return Err(Error::NeedVersion(5));
+ }
+ w.write_offset(
+ debug_str_offsets.get(val).0,
+ SectionId::DebugStr,
+ encoding.format.word_size(),
+ )?;
+ }
+ LineString::LineStringRef(val) => {
+ if encoding.version < 5 {
+ return Err(Error::NeedVersion(5));
+ }
+ w.write_offset(
+ debug_line_str_offsets.get(val).0,
+ SectionId::DebugLineStr,
+ encoding.format.word_size(),
+ )?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// An identifier for a directory in a `LineProgram`.
+///
+/// Defaults to the working directory of the compilation unit.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DirectoryId(usize);
+
+// Force FileId access via the methods.
+mod id {
+ /// An identifier for a file in a `LineProgram`.
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub struct FileId(usize);
+
+ impl FileId {
+ /// Create a FileId given an index into `LineProgram::files`.
+ pub(crate) fn new(index: usize) -> Self {
+ FileId(index + 1)
+ }
+
+ /// The index of the file in `LineProgram::files`.
+ pub(super) fn index(self) -> Option<usize> {
+ if self.0 == 0 {
+ None
+ } else {
+ Some(self.0 - 1)
+ }
+ }
+
+ /// The initial state of the file register.
+ pub(super) fn initial_state() -> Self {
+ FileId(1)
+ }
+
+ /// The raw value used when writing.
+ pub(crate) fn raw(self) -> u64 {
+ self.0 as u64
+ }
+
+ /// The id for file index 0 in DWARF version 5.
+ /// Only used when converting.
+ // Used for tests only.
+ #[allow(unused)]
+ pub(super) fn zero() -> Self {
+ FileId(0)
+ }
+ }
+}
+pub use self::id::*;
+
+/// Extra information for file in a `LineProgram`.
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
+pub struct FileInfo {
+ /// The implementation defined timestamp of the last modification of the file,
+ /// or 0 if not available.
+ pub timestamp: u64,
+
+ /// The size of the file in bytes, or 0 if not available.
+ pub size: u64,
+
+ /// A 16-byte MD5 digest of the file contents.
+ ///
+ /// Only used if version >= 5 and `LineProgram::file_has_md5` is `true`.
+ pub md5: [u8; 16],
+}
+
+define_section!(
+ DebugLine,
+ DebugLineOffset,
+ "A writable `.debug_line` section."
+);
+
+#[cfg(feature = "read")]
+mod convert {
+ use super::*;
+ use crate::read::{self, Reader};
+ use crate::write::{self, ConvertError, ConvertResult};
+
+ impl LineProgram {
+ /// Create a line number program by reading the data from the given program.
+ ///
+ /// Return the program and a mapping from file index to `FileId`.
+ pub fn from<R: Reader<Offset = usize>>(
+ mut from_program: read::IncompleteLineProgram<R>,
+ dwarf: &read::Dwarf<R>,
+ line_strings: &mut write::LineStringTable,
+ strings: &mut write::StringTable,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<(LineProgram, Vec<FileId>)> {
+ // Create mappings in case the source has duplicate files or directories.
+ let mut dirs = Vec::new();
+ let mut files = Vec::new();
+
+ let mut program = {
+ let from_header = from_program.header();
+ let encoding = from_header.encoding();
+
+ let comp_dir = match from_header.directory(0) {
+ Some(comp_dir) => LineString::from(comp_dir, dwarf, line_strings, strings)?,
+ None => LineString::new(&[][..], encoding, line_strings),
+ };
+
+ let (comp_name, comp_file_info) = match from_header.file(0) {
+ Some(comp_file) => {
+ if comp_file.directory_index() != 0 {
+ return Err(ConvertError::InvalidDirectoryIndex);
+ }
+ (
+ LineString::from(comp_file.path_name(), dwarf, line_strings, strings)?,
+ Some(FileInfo {
+ timestamp: comp_file.timestamp(),
+ size: comp_file.size(),
+ md5: *comp_file.md5(),
+ }),
+ )
+ }
+ None => (LineString::new(&[][..], encoding, line_strings), None),
+ };
+
+ if from_header.line_base() > 0 {
+ return Err(ConvertError::InvalidLineBase);
+ }
+ let mut program = LineProgram::new(
+ encoding,
+ from_header.line_encoding(),
+ comp_dir,
+ comp_name,
+ comp_file_info,
+ );
+
+ let file_skip;
+ if from_header.version() <= 4 {
+ // The first directory is implicit.
+ dirs.push(DirectoryId(0));
+ // A file index of 0 is invalid for version <= 4, but putting
+ // something there makes the indexing easier.
+ file_skip = 0;
+ files.push(FileId::zero());
+ } else {
+ // We don't add the first file to `files`, but still allow
+ // it to be referenced from converted instructions.
+ file_skip = 1;
+ files.push(FileId::zero());
+ }
+
+ for from_dir in from_header.include_directories() {
+ let from_dir =
+ LineString::from(from_dir.clone(), dwarf, line_strings, strings)?;
+ dirs.push(program.add_directory(from_dir));
+ }
+
+ program.file_has_timestamp = from_header.file_has_timestamp();
+ program.file_has_size = from_header.file_has_size();
+ program.file_has_md5 = from_header.file_has_md5();
+ for from_file in from_header.file_names().iter().skip(file_skip) {
+ let from_name =
+ LineString::from(from_file.path_name(), dwarf, line_strings, strings)?;
+ let from_dir = from_file.directory_index();
+ if from_dir >= dirs.len() as u64 {
+ return Err(ConvertError::InvalidDirectoryIndex);
+ }
+ let from_dir = dirs[from_dir as usize];
+ let from_info = Some(FileInfo {
+ timestamp: from_file.timestamp(),
+ size: from_file.size(),
+ md5: *from_file.md5(),
+ });
+ files.push(program.add_file(from_name, from_dir, from_info));
+ }
+
+ program
+ };
+
+ // We can't use the `from_program.rows()` because that wouldn't let
+ // us preserve address relocations.
+ let mut from_row = read::LineRow::new(from_program.header());
+ let mut instructions = from_program.header().instructions();
+ let mut address = None;
+ while let Some(instruction) = instructions.next_instruction(from_program.header())? {
+ match instruction {
+ read::LineInstruction::SetAddress(val) => {
+ if program.in_sequence() {
+ return Err(ConvertError::UnsupportedLineInstruction);
+ }
+ match convert_address(val) {
+ Some(val) => address = Some(val),
+ None => return Err(ConvertError::InvalidAddress),
+ }
+ from_row.execute(read::LineInstruction::SetAddress(0), &mut from_program);
+ }
+ read::LineInstruction::DefineFile(_) => {
+ return Err(ConvertError::UnsupportedLineInstruction);
+ }
+ _ => {
+ if from_row.execute(instruction, &mut from_program) {
+ if !program.in_sequence() {
+ program.begin_sequence(address);
+ address = None;
+ }
+ if from_row.end_sequence() {
+ program.end_sequence(from_row.address());
+ } else {
+ program.row().address_offset = from_row.address();
+ program.row().op_index = from_row.op_index();
+ program.row().file = {
+ let file = from_row.file_index();
+ if file >= files.len() as u64 {
+ return Err(ConvertError::InvalidFileIndex);
+ }
+ if file == 0 && program.version() <= 4 {
+ return Err(ConvertError::InvalidFileIndex);
+ }
+ files[file as usize]
+ };
+ program.row().line = match from_row.line() {
+ Some(line) => line.get(),
+ None => 0,
+ };
+ program.row().column = match from_row.column() {
+ read::ColumnType::LeftEdge => 0,
+ read::ColumnType::Column(val) => val.get(),
+ };
+ program.row().discriminator = from_row.discriminator();
+ program.row().is_statement = from_row.is_stmt();
+ program.row().basic_block = from_row.basic_block();
+ program.row().prologue_end = from_row.prologue_end();
+ program.row().epilogue_begin = from_row.epilogue_begin();
+ program.row().isa = from_row.isa();
+ program.generate_row();
+ }
+ from_row.reset(from_program.header());
+ }
+ }
+ };
+ }
+ Ok((program, files))
+ }
+ }
+
+ impl LineString {
+ fn from<R: Reader<Offset = usize>>(
+ from_attr: read::AttributeValue<R>,
+ dwarf: &read::Dwarf<R>,
+ line_strings: &mut write::LineStringTable,
+ strings: &mut write::StringTable,
+ ) -> ConvertResult<LineString> {
+ Ok(match from_attr {
+ read::AttributeValue::String(r) => LineString::String(r.to_slice()?.to_vec()),
+ read::AttributeValue::DebugStrRef(offset) => {
+ let r = dwarf.debug_str.get_str(offset)?;
+ let id = strings.add(r.to_slice()?);
+ LineString::StringRef(id)
+ }
+ read::AttributeValue::DebugLineStrRef(offset) => {
+ let r = dwarf.debug_line_str.get_str(offset)?;
+ let id = line_strings.add(r.to_slice()?);
+ LineString::LineStringRef(id)
+ }
+ _ => return Err(ConvertError::UnsupportedLineStringForm),
+ })
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::read;
+ use crate::write::{DebugLineStr, DebugStr, EndianVec, StringTable};
+ use crate::LittleEndian;
+
+ #[test]
+ fn test_line_program_table() {
+ let dir1 = LineString::String(b"dir1".to_vec());
+ let file1 = LineString::String(b"file1".to_vec());
+ let dir2 = LineString::String(b"dir2".to_vec());
+ let file2 = LineString::String(b"file2".to_vec());
+
+ let mut programs = Vec::new();
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let mut program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ dir1.clone(),
+ file1.clone(),
+ None,
+ );
+
+ {
+ assert_eq!(&dir1, program.get_directory(program.default_directory()));
+ program.file_has_timestamp = true;
+ program.file_has_size = true;
+ if encoding.version >= 5 {
+ program.file_has_md5 = true;
+ }
+
+ let dir_id = program.add_directory(dir2.clone());
+ assert_eq!(&dir2, program.get_directory(dir_id));
+ assert_eq!(dir_id, program.add_directory(dir2.clone()));
+
+ let file_info = FileInfo {
+ timestamp: 1,
+ size: 2,
+ md5: if encoding.version >= 5 {
+ [3; 16]
+ } else {
+ [0; 16]
+ },
+ };
+ let file_id = program.add_file(file2.clone(), dir_id, Some(file_info));
+ assert_eq!((&file2, dir_id), program.get_file(file_id));
+ assert_eq!(file_info, *program.get_file_info(file_id));
+
+ program.get_file_info_mut(file_id).size = 3;
+ assert_ne!(file_info, *program.get_file_info(file_id));
+ assert_eq!(file_id, program.add_file(file2.clone(), dir_id, None));
+ assert_ne!(file_info, *program.get_file_info(file_id));
+ assert_eq!(
+ file_id,
+ program.add_file(file2.clone(), dir_id, Some(file_info))
+ );
+ assert_eq!(file_info, *program.get_file_info(file_id));
+
+ programs.push((program, file_id, encoding));
+ }
+ }
+ }
+ }
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let mut debug_line_offsets = Vec::new();
+ for (program, _, encoding) in &programs {
+ debug_line_offsets.push(
+ program
+ .write(
+ &mut debug_line,
+ *encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap(),
+ );
+ }
+
+ let read_debug_line = read::DebugLine::new(debug_line.slice(), LittleEndian);
+
+ let convert_address = &|address| Some(Address::Constant(address));
+ for ((program, file_id, encoding), offset) in programs.iter().zip(debug_line_offsets.iter())
+ {
+ let read_program = read_debug_line
+ .program(
+ *offset,
+ encoding.address_size,
+ Some(read::EndianSlice::new(b"dir1", LittleEndian)),
+ Some(read::EndianSlice::new(b"file1", LittleEndian)),
+ )
+ .unwrap();
+
+ let dwarf = read::Dwarf::default();
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let (convert_program, convert_files) = LineProgram::from(
+ read_program,
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ convert_address,
+ )
+ .unwrap();
+ assert_eq!(convert_program.version(), program.version());
+ assert_eq!(convert_program.address_size(), program.address_size());
+ assert_eq!(convert_program.format(), program.format());
+
+ let convert_file_id = convert_files[file_id.raw() as usize];
+ let (file, dir) = program.get_file(*file_id);
+ let (convert_file, convert_dir) = convert_program.get_file(convert_file_id);
+ assert_eq!(file, convert_file);
+ assert_eq!(
+ program.get_directory(dir),
+ convert_program.get_directory(convert_dir)
+ );
+ assert_eq!(
+ program.get_file_info(*file_id),
+ convert_program.get_file_info(convert_file_id)
+ );
+ }
+ }
+
+ #[test]
+ fn test_line_row() {
+ let dir1 = &b"dir1"[..];
+ let file1 = &b"file1"[..];
+ let file2 = &b"file2"[..];
+ let convert_address = &|address| Some(Address::Constant(address));
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let line_base = -5;
+ let line_range = 14;
+ let neg_line_base = (-line_base) as u8;
+ let mut program = LineProgram::new(
+ encoding,
+ LineEncoding {
+ line_base,
+ line_range,
+ ..Default::default()
+ },
+ LineString::String(dir1.to_vec()),
+ LineString::String(file1.to_vec()),
+ None,
+ );
+ let dir_id = program.default_directory();
+ program.add_file(LineString::String(file1.to_vec()), dir_id, None);
+ let file_id =
+ program.add_file(LineString::String(file2.to_vec()), dir_id, None);
+
+ // Test sequences.
+ {
+ let mut program = program.clone();
+ let address = Address::Constant(0x12);
+ program.begin_sequence(Some(address));
+ assert_eq!(
+ program.instructions,
+ vec![LineInstruction::SetAddress(address)]
+ );
+ }
+
+ {
+ let mut program = program.clone();
+ program.begin_sequence(None);
+ assert_eq!(program.instructions, Vec::new());
+ }
+
+ {
+ let mut program = program.clone();
+ program.begin_sequence(None);
+ program.end_sequence(0x1234);
+ assert_eq!(
+ program.instructions,
+ vec![
+ LineInstruction::AdvancePc(0x1234),
+ LineInstruction::EndSequence
+ ]
+ );
+ }
+
+ // Create a base program.
+ program.begin_sequence(None);
+ program.row.line = 0x1000;
+ program.generate_row();
+ let base_row = program.row;
+ let base_instructions = program.instructions.clone();
+
+ // Create test cases.
+ let mut tests = Vec::new();
+
+ let row = base_row;
+ tests.push((row, vec![LineInstruction::Copy]));
+
+ let mut row = base_row;
+ row.line -= u64::from(neg_line_base);
+ tests.push((row, vec![LineInstruction::Special(OPCODE_BASE)]));
+
+ let mut row = base_row;
+ row.line += u64::from(line_range) - 1;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![LineInstruction::Special(OPCODE_BASE + line_range - 1)],
+ ));
+
+ let mut row = base_row;
+ row.line += u64::from(line_range);
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![
+ LineInstruction::AdvanceLine(i64::from(line_range - neg_line_base)),
+ LineInstruction::Copy,
+ ],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = 1;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![LineInstruction::Special(OPCODE_BASE + line_range)],
+ ));
+
+ let op_range = (255 - OPCODE_BASE) / line_range;
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range);
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![LineInstruction::Special(
+ OPCODE_BASE + op_range * line_range,
+ )],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range);
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range);
+ row.line -= u64::from(neg_line_base);
+ tests.push((row, vec![LineInstruction::Special(255)]));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range);
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range) + 1;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![LineInstruction::ConstAddPc, LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range);
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range) + 2;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![
+ LineInstruction::ConstAddPc,
+ LineInstruction::Special(OPCODE_BASE + 6),
+ ],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range) * 2;
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range);
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![LineInstruction::ConstAddPc, LineInstruction::Special(255)],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range) * 2;
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range) + 1;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![
+ LineInstruction::AdvancePc(row.address_offset),
+ LineInstruction::Copy,
+ ],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = u64::from(op_range) * 2;
+ row.line += u64::from(255 - OPCODE_BASE - op_range * line_range) + 2;
+ row.line -= u64::from(neg_line_base);
+ tests.push((
+ row,
+ vec![
+ LineInstruction::AdvancePc(row.address_offset),
+ LineInstruction::Special(OPCODE_BASE + 6),
+ ],
+ ));
+
+ let mut row = base_row;
+ row.address_offset = 0x1234;
+ tests.push((
+ row,
+ vec![LineInstruction::AdvancePc(0x1234), LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.line += 0x1234;
+ tests.push((
+ row,
+ vec![LineInstruction::AdvanceLine(0x1234), LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.file = file_id;
+ tests.push((
+ row,
+ vec![LineInstruction::SetFile(file_id), LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.column = 0x1234;
+ tests.push((
+ row,
+ vec![LineInstruction::SetColumn(0x1234), LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.discriminator = 0x1234;
+ tests.push((
+ row,
+ vec![
+ LineInstruction::SetDiscriminator(0x1234),
+ LineInstruction::Copy,
+ ],
+ ));
+
+ let mut row = base_row;
+ row.is_statement = !row.is_statement;
+ tests.push((
+ row,
+ vec![LineInstruction::NegateStatement, LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.basic_block = true;
+ tests.push((
+ row,
+ vec![LineInstruction::SetBasicBlock, LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.prologue_end = true;
+ tests.push((
+ row,
+ vec![LineInstruction::SetPrologueEnd, LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.epilogue_begin = true;
+ tests.push((
+ row,
+ vec![LineInstruction::SetEpilogueBegin, LineInstruction::Copy],
+ ));
+
+ let mut row = base_row;
+ row.isa = 0x1234;
+ tests.push((
+ row,
+ vec![LineInstruction::SetIsa(0x1234), LineInstruction::Copy],
+ ));
+
+ for test in tests {
+ // Test generate_row().
+ let mut program = program.clone();
+ program.row = test.0;
+ program.generate_row();
+ assert_eq!(
+ &program.instructions[base_instructions.len()..],
+ &test.1[..]
+ );
+
+ // Test LineProgram::from().
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let debug_line_offset = program
+ .write(
+ &mut debug_line,
+ encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap();
+
+ let read_debug_line =
+ read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_program = read_debug_line
+ .program(
+ debug_line_offset,
+ address_size,
+ Some(read::EndianSlice::new(dir1, LittleEndian)),
+ Some(read::EndianSlice::new(file1, LittleEndian)),
+ )
+ .unwrap();
+
+ let dwarf = read::Dwarf::default();
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let (convert_program, _convert_files) = LineProgram::from(
+ read_program,
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ convert_address,
+ )
+ .unwrap();
+ assert_eq!(
+ &convert_program.instructions[base_instructions.len()..],
+ &test.1[..]
+ );
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_line_instruction() {
+ let dir1 = &b"dir1"[..];
+ let file1 = &b"file1"[..];
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let mut program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ LineString::String(dir1.to_vec()),
+ LineString::String(file1.to_vec()),
+ None,
+ );
+ let dir_id = program.default_directory();
+ let file_id =
+ program.add_file(LineString::String(file1.to_vec()), dir_id, None);
+
+ for &(ref inst, ref expect_inst) in &[
+ (
+ LineInstruction::Special(OPCODE_BASE),
+ read::LineInstruction::Special(OPCODE_BASE),
+ ),
+ (
+ LineInstruction::Special(255),
+ read::LineInstruction::Special(255),
+ ),
+ (LineInstruction::Copy, read::LineInstruction::Copy),
+ (
+ LineInstruction::AdvancePc(0x12),
+ read::LineInstruction::AdvancePc(0x12),
+ ),
+ (
+ LineInstruction::AdvanceLine(0x12),
+ read::LineInstruction::AdvanceLine(0x12),
+ ),
+ (
+ LineInstruction::SetFile(file_id),
+ read::LineInstruction::SetFile(file_id.raw()),
+ ),
+ (
+ LineInstruction::SetColumn(0x12),
+ read::LineInstruction::SetColumn(0x12),
+ ),
+ (
+ LineInstruction::NegateStatement,
+ read::LineInstruction::NegateStatement,
+ ),
+ (
+ LineInstruction::SetBasicBlock,
+ read::LineInstruction::SetBasicBlock,
+ ),
+ (
+ LineInstruction::ConstAddPc,
+ read::LineInstruction::ConstAddPc,
+ ),
+ (
+ LineInstruction::SetPrologueEnd,
+ read::LineInstruction::SetPrologueEnd,
+ ),
+ (
+ LineInstruction::SetEpilogueBegin,
+ read::LineInstruction::SetEpilogueBegin,
+ ),
+ (
+ LineInstruction::SetIsa(0x12),
+ read::LineInstruction::SetIsa(0x12),
+ ),
+ (
+ LineInstruction::EndSequence,
+ read::LineInstruction::EndSequence,
+ ),
+ (
+ LineInstruction::SetAddress(Address::Constant(0x12)),
+ read::LineInstruction::SetAddress(0x12),
+ ),
+ (
+ LineInstruction::SetDiscriminator(0x12),
+ read::LineInstruction::SetDiscriminator(0x12),
+ ),
+ ][..]
+ {
+ let mut program = program.clone();
+ program.instructions.push(*inst);
+
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let debug_line_offset = program
+ .write(
+ &mut debug_line,
+ encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap();
+
+ let read_debug_line =
+ read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_program = read_debug_line
+ .program(
+ debug_line_offset,
+ address_size,
+ Some(read::EndianSlice::new(dir1, LittleEndian)),
+ Some(read::EndianSlice::new(file1, LittleEndian)),
+ )
+ .unwrap();
+ let read_header = read_program.header();
+ let mut read_insts = read_header.instructions();
+ assert_eq!(
+ *expect_inst,
+ read_insts.next_instruction(read_header).unwrap().unwrap()
+ );
+ assert_eq!(None, read_insts.next_instruction(read_header).unwrap());
+ }
+ }
+ }
+ }
+ }
+
+ // Test that the address/line advance is correct. We don't test for optimality.
+ #[test]
+ #[allow(clippy::useless_vec)]
+ fn test_advance() {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 8,
+ };
+
+ let dir1 = &b"dir1"[..];
+ let file1 = &b"file1"[..];
+
+ let addresses = 0..50;
+ let lines = -10..25i64;
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+
+ for minimum_instruction_length in vec![1, 4] {
+ for maximum_operations_per_instruction in vec![1, 3] {
+ for line_base in vec![-5, 0] {
+ for line_range in vec![10, 20] {
+ let line_encoding = LineEncoding {
+ minimum_instruction_length,
+ maximum_operations_per_instruction,
+ line_base,
+ line_range,
+ default_is_stmt: true,
+ };
+ let mut program = LineProgram::new(
+ encoding,
+ line_encoding,
+ LineString::String(dir1.to_vec()),
+ LineString::String(file1.to_vec()),
+ None,
+ );
+ for address_advance in addresses.clone() {
+ program.begin_sequence(Some(Address::Constant(0x1000)));
+ program.row().line = 0x10000;
+ program.generate_row();
+ for line_advance in lines.clone() {
+ {
+ let row = program.row();
+ row.address_offset +=
+ address_advance * u64::from(minimum_instruction_length);
+ row.line = row.line.wrapping_add(line_advance as u64);
+ }
+ program.generate_row();
+ }
+ let address_offset = program.row().address_offset
+ + u64::from(minimum_instruction_length);
+ program.end_sequence(address_offset);
+ }
+
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let debug_line_offset = program
+ .write(
+ &mut debug_line,
+ encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap();
+
+ let read_debug_line =
+ read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_program = read_debug_line
+ .program(
+ debug_line_offset,
+ 8,
+ Some(read::EndianSlice::new(dir1, LittleEndian)),
+ Some(read::EndianSlice::new(file1, LittleEndian)),
+ )
+ .unwrap();
+
+ let mut rows = read_program.rows();
+ for address_advance in addresses.clone() {
+ let mut address;
+ let mut line;
+ {
+ let row = rows.next_row().unwrap().unwrap().1;
+ address = row.address();
+ line = row.line().unwrap().get();
+ }
+ assert_eq!(address, 0x1000);
+ assert_eq!(line, 0x10000);
+ for line_advance in lines.clone() {
+ let row = rows.next_row().unwrap().unwrap().1;
+ assert_eq!(
+ row.address() - address,
+ address_advance * u64::from(minimum_instruction_length)
+ );
+ assert_eq!(
+ (row.line().unwrap().get() as i64) - (line as i64),
+ line_advance
+ );
+ address = row.address();
+ line = row.line().unwrap().get();
+ }
+ let row = rows.next_row().unwrap().unwrap().1;
+ assert!(row.end_sequence());
+ }
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_line_string() {
+ let version = 5;
+
+ let file = b"file1";
+
+ let mut strings = StringTable::default();
+ let string_id = strings.add("file2");
+ let mut debug_str = DebugStr::from(EndianVec::new(LittleEndian));
+ let debug_str_offsets = strings.write(&mut debug_str).unwrap();
+
+ let mut line_strings = LineStringTable::default();
+ let line_string_id = line_strings.add("file3");
+ let mut debug_line_str = DebugLineStr::from(EndianVec::new(LittleEndian));
+ let debug_line_str_offsets = line_strings.write(&mut debug_line_str).unwrap();
+
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ for (file, expect_file) in vec![
+ (
+ LineString::String(file.to_vec()),
+ read::AttributeValue::String(read::EndianSlice::new(file, LittleEndian)),
+ ),
+ (
+ LineString::StringRef(string_id),
+ read::AttributeValue::DebugStrRef(debug_str_offsets.get(string_id)),
+ ),
+ (
+ LineString::LineStringRef(line_string_id),
+ read::AttributeValue::DebugLineStrRef(
+ debug_line_str_offsets.get(line_string_id),
+ ),
+ ),
+ ] {
+ let program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ LineString::String(b"dir".to_vec()),
+ file,
+ None,
+ );
+
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let debug_line_offset = program
+ .write(
+ &mut debug_line,
+ encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap();
+
+ let read_debug_line = read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_program = read_debug_line
+ .program(debug_line_offset, address_size, None, None)
+ .unwrap();
+ let read_header = read_program.header();
+ assert_eq!(read_header.file(0).unwrap().path_name(), expect_file);
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_missing_comp_dir() {
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ LineString::String(Vec::new()),
+ LineString::String(Vec::new()),
+ None,
+ );
+
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let debug_line_offset = program
+ .write(
+ &mut debug_line,
+ encoding,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ )
+ .unwrap();
+
+ let read_debug_line = read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_program = read_debug_line
+ .program(
+ debug_line_offset,
+ address_size,
+ // Testing missing comp_dir/comp_name.
+ None,
+ None,
+ )
+ .unwrap();
+
+ let dwarf = read::Dwarf::default();
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let convert_address = &|address| Some(Address::Constant(address));
+ LineProgram::from(
+ read_program,
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ convert_address,
+ )
+ .unwrap();
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/loc.rs b/vendor/gimli-0.26.2/src/write/loc.rs
new file mode 100644
index 000000000..ea0ecb1cf
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/loc.rs
@@ -0,0 +1,549 @@
+use alloc::vec::Vec;
+use indexmap::IndexSet;
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{Encoding, LocationListsOffset, SectionId};
+use crate::write::{
+ Address, BaseId, DebugInfoReference, Error, Expression, Result, Section, Sections, UnitOffsets,
+ Writer,
+};
+
+define_section!(
+ DebugLoc,
+ LocationListsOffset,
+ "A writable `.debug_loc` section."
+);
+define_section!(
+ DebugLocLists,
+ LocationListsOffset,
+ "A writable `.debug_loclists` section."
+);
+
+define_offsets!(
+ LocationListOffsets: LocationListId => LocationListsOffset,
+ "The section offsets of a series of location lists within the `.debug_loc` or `.debug_loclists` sections."
+);
+
+define_id!(
+ LocationListId,
+ "An identifier for a location list in a `LocationListTable`."
+);
+
+/// A table of location lists that will be stored in a `.debug_loc` or `.debug_loclists` section.
+#[derive(Debug, Default)]
+pub struct LocationListTable {
+ base_id: BaseId,
+ locations: IndexSet<LocationList>,
+}
+
+impl LocationListTable {
+ /// Add a location list to the table.
+ pub fn add(&mut self, loc_list: LocationList) -> LocationListId {
+ let (index, _) = self.locations.insert_full(loc_list);
+ LocationListId::new(self.base_id, index)
+ }
+
+ /// Write the location list table to the appropriate section for the given DWARF version.
+ pub(crate) fn write<W: Writer>(
+ &self,
+ sections: &mut Sections<W>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ ) -> Result<LocationListOffsets> {
+ if self.locations.is_empty() {
+ return Ok(LocationListOffsets::none());
+ }
+
+ match encoding.version {
+ 2..=4 => self.write_loc(
+ &mut sections.debug_loc,
+ &mut sections.debug_loc_refs,
+ encoding,
+ unit_offsets,
+ ),
+ 5 => self.write_loclists(
+ &mut sections.debug_loclists,
+ &mut sections.debug_loclists_refs,
+ encoding,
+ unit_offsets,
+ ),
+ _ => Err(Error::UnsupportedVersion(encoding.version)),
+ }
+ }
+
+ /// Write the location list table to the `.debug_loc` section.
+ fn write_loc<W: Writer>(
+ &self,
+ w: &mut DebugLoc<W>,
+ refs: &mut Vec<DebugInfoReference>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ ) -> Result<LocationListOffsets> {
+ let address_size = encoding.address_size;
+ let mut offsets = Vec::new();
+ for loc_list in self.locations.iter() {
+ offsets.push(w.offset());
+ for loc in &loc_list.0 {
+ // Note that we must ensure none of the ranges have both begin == 0 and end == 0.
+ // We do this by ensuring that begin != end, which is a bit more restrictive
+ // than required, but still seems reasonable.
+ match *loc {
+ Location::BaseAddress { address } => {
+ let marker = !0 >> (64 - address_size * 8);
+ w.write_udata(marker, address_size)?;
+ w.write_address(address, address_size)?;
+ }
+ Location::OffsetPair {
+ begin,
+ end,
+ ref data,
+ } => {
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_udata(begin, address_size)?;
+ w.write_udata(end, address_size)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::StartEnd {
+ begin,
+ end,
+ ref data,
+ } => {
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_address(begin, address_size)?;
+ w.write_address(end, address_size)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::StartLength {
+ begin,
+ length,
+ ref data,
+ } => {
+ let end = match begin {
+ Address::Constant(begin) => Address::Constant(begin + length),
+ Address::Symbol { symbol, addend } => Address::Symbol {
+ symbol,
+ addend: addend + length as i64,
+ },
+ };
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_address(begin, address_size)?;
+ w.write_address(end, address_size)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::DefaultLocation { .. } => {
+ return Err(Error::InvalidRange);
+ }
+ }
+ }
+ w.write_udata(0, address_size)?;
+ w.write_udata(0, address_size)?;
+ }
+ Ok(LocationListOffsets {
+ base_id: self.base_id,
+ offsets,
+ })
+ }
+
+ /// Write the location list table to the `.debug_loclists` section.
+ fn write_loclists<W: Writer>(
+ &self,
+ w: &mut DebugLocLists<W>,
+ refs: &mut Vec<DebugInfoReference>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ ) -> Result<LocationListOffsets> {
+ let mut offsets = Vec::new();
+
+ if encoding.version != 5 {
+ return Err(Error::NeedVersion(5));
+ }
+
+ let length_offset = w.write_initial_length(encoding.format)?;
+ let length_base = w.len();
+
+ w.write_u16(encoding.version)?;
+ w.write_u8(encoding.address_size)?;
+ w.write_u8(0)?; // segment_selector_size
+ w.write_u32(0)?; // offset_entry_count (when set to zero DW_FORM_rnglistx can't be used, see section 7.28)
+ // FIXME implement DW_FORM_rnglistx writing and implement the offset entry list
+
+ for loc_list in self.locations.iter() {
+ offsets.push(w.offset());
+ for loc in &loc_list.0 {
+ match *loc {
+ Location::BaseAddress { address } => {
+ w.write_u8(crate::constants::DW_LLE_base_address.0)?;
+ w.write_address(address, encoding.address_size)?;
+ }
+ Location::OffsetPair {
+ begin,
+ end,
+ ref data,
+ } => {
+ w.write_u8(crate::constants::DW_LLE_offset_pair.0)?;
+ w.write_uleb128(begin)?;
+ w.write_uleb128(end)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::StartEnd {
+ begin,
+ end,
+ ref data,
+ } => {
+ w.write_u8(crate::constants::DW_LLE_start_end.0)?;
+ w.write_address(begin, encoding.address_size)?;
+ w.write_address(end, encoding.address_size)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::StartLength {
+ begin,
+ length,
+ ref data,
+ } => {
+ w.write_u8(crate::constants::DW_LLE_start_length.0)?;
+ w.write_address(begin, encoding.address_size)?;
+ w.write_uleb128(length)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ Location::DefaultLocation { ref data } => {
+ w.write_u8(crate::constants::DW_LLE_default_location.0)?;
+ write_expression(&mut w.0, refs, encoding, unit_offsets, data)?;
+ }
+ }
+ }
+
+ w.write_u8(crate::constants::DW_LLE_end_of_list.0)?;
+ }
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, encoding.format)?;
+
+ Ok(LocationListOffsets {
+ base_id: self.base_id,
+ offsets,
+ })
+ }
+}
+
+/// A locations list that will be stored in a `.debug_loc` or `.debug_loclists` section.
+#[derive(Clone, Debug, Eq, PartialEq, Hash)]
+pub struct LocationList(pub Vec<Location>);
+
+/// A single location.
+#[derive(Clone, Debug, Eq, PartialEq, Hash)]
+pub enum Location {
+ /// DW_LLE_base_address
+ BaseAddress {
+ /// Base address.
+ address: Address,
+ },
+ /// DW_LLE_offset_pair
+ OffsetPair {
+ /// Start of range relative to base address.
+ begin: u64,
+ /// End of range relative to base address.
+ end: u64,
+ /// Location description.
+ data: Expression,
+ },
+ /// DW_LLE_start_end
+ StartEnd {
+ /// Start of range.
+ begin: Address,
+ /// End of range.
+ end: Address,
+ /// Location description.
+ data: Expression,
+ },
+ /// DW_LLE_start_length
+ StartLength {
+ /// Start of range.
+ begin: Address,
+ /// Length of range.
+ length: u64,
+ /// Location description.
+ data: Expression,
+ },
+ /// DW_LLE_default_location
+ DefaultLocation {
+ /// Location description.
+ data: Expression,
+ },
+}
+
+fn write_expression<W: Writer>(
+ w: &mut W,
+ refs: &mut Vec<DebugInfoReference>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ val: &Expression,
+) -> Result<()> {
+ let size = val.size(encoding, unit_offsets) as u64;
+ if encoding.version <= 4 {
+ w.write_udata(size, 2)?;
+ } else {
+ w.write_uleb128(size)?;
+ }
+ val.write(w, Some(refs), encoding, unit_offsets)?;
+ Ok(())
+}
+
+#[cfg(feature = "read")]
+mod convert {
+ use super::*;
+
+ use crate::read::{self, Reader};
+ use crate::write::{ConvertError, ConvertResult, ConvertUnitContext};
+
+ impl LocationList {
+ /// Create a location list by reading the data from the give location list iter.
+ pub(crate) fn from<R: Reader<Offset = usize>>(
+ mut from: read::RawLocListIter<R>,
+ context: &ConvertUnitContext<R>,
+ ) -> ConvertResult<Self> {
+ let mut have_base_address = context.base_address != Address::Constant(0);
+ let convert_address =
+ |x| (context.convert_address)(x).ok_or(ConvertError::InvalidAddress);
+ let convert_expression = |x| {
+ Expression::from(
+ x,
+ context.unit.encoding(),
+ Some(context.dwarf),
+ Some(context.unit),
+ Some(context.entry_ids),
+ context.convert_address,
+ )
+ };
+ let mut loc_list = Vec::new();
+ while let Some(from_loc) = from.next()? {
+ let loc = match from_loc {
+ read::RawLocListEntry::AddressOrOffsetPair { begin, end, data } => {
+ // These were parsed as addresses, even if they are offsets.
+ let begin = convert_address(begin)?;
+ let end = convert_address(end)?;
+ let data = convert_expression(data)?;
+ match (begin, end) {
+ (Address::Constant(begin_offset), Address::Constant(end_offset)) => {
+ if have_base_address {
+ Location::OffsetPair {
+ begin: begin_offset,
+ end: end_offset,
+ data,
+ }
+ } else {
+ Location::StartEnd { begin, end, data }
+ }
+ }
+ _ => {
+ if have_base_address {
+ // At least one of begin/end is an address, but we also have
+ // a base address. Adding addresses is undefined.
+ return Err(ConvertError::InvalidRangeRelativeAddress);
+ }
+ Location::StartEnd { begin, end, data }
+ }
+ }
+ }
+ read::RawLocListEntry::BaseAddress { addr } => {
+ have_base_address = true;
+ let address = convert_address(addr)?;
+ Location::BaseAddress { address }
+ }
+ read::RawLocListEntry::BaseAddressx { addr } => {
+ have_base_address = true;
+ let address = convert_address(context.dwarf.address(context.unit, addr)?)?;
+ Location::BaseAddress { address }
+ }
+ read::RawLocListEntry::StartxEndx { begin, end, data } => {
+ let begin = convert_address(context.dwarf.address(context.unit, begin)?)?;
+ let end = convert_address(context.dwarf.address(context.unit, end)?)?;
+ let data = convert_expression(data)?;
+ Location::StartEnd { begin, end, data }
+ }
+ read::RawLocListEntry::StartxLength {
+ begin,
+ length,
+ data,
+ } => {
+ let begin = convert_address(context.dwarf.address(context.unit, begin)?)?;
+ let data = convert_expression(data)?;
+ Location::StartLength {
+ begin,
+ length,
+ data,
+ }
+ }
+ read::RawLocListEntry::OffsetPair { begin, end, data } => {
+ let data = convert_expression(data)?;
+ Location::OffsetPair { begin, end, data }
+ }
+ read::RawLocListEntry::StartEnd { begin, end, data } => {
+ let begin = convert_address(begin)?;
+ let end = convert_address(end)?;
+ let data = convert_expression(data)?;
+ Location::StartEnd { begin, end, data }
+ }
+ read::RawLocListEntry::StartLength {
+ begin,
+ length,
+ data,
+ } => {
+ let begin = convert_address(begin)?;
+ let data = convert_expression(data)?;
+ Location::StartLength {
+ begin,
+ length,
+ data,
+ }
+ }
+ read::RawLocListEntry::DefaultLocation { data } => {
+ let data = convert_expression(data)?;
+ Location::DefaultLocation { data }
+ }
+ };
+ // In some cases, existing data may contain begin == end, filtering
+ // these out.
+ match loc {
+ Location::StartLength { length, .. } if length == 0 => continue,
+ Location::StartEnd { begin, end, .. } if begin == end => continue,
+ Location::OffsetPair { begin, end, .. } if begin == end => continue,
+ _ => (),
+ }
+ loc_list.push(loc);
+ }
+ Ok(LocationList(loc_list))
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::common::{
+ DebugAbbrevOffset, DebugAddrBase, DebugInfoOffset, DebugLocListsBase, DebugRngListsBase,
+ DebugStrOffsetsBase, Format,
+ };
+ use crate::read;
+ use crate::write::{
+ ConvertUnitContext, EndianVec, LineStringTable, RangeListTable, StringTable,
+ };
+ use crate::LittleEndian;
+ use std::collections::HashMap;
+
+ #[test]
+ fn test_loc_list() {
+ let mut line_strings = LineStringTable::default();
+ let mut strings = StringTable::default();
+ let mut expression = Expression::new();
+ expression.op_constu(0);
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ let mut loc_list = LocationList(vec![
+ Location::StartLength {
+ begin: Address::Constant(6666),
+ length: 7777,
+ data: expression.clone(),
+ },
+ Location::StartEnd {
+ begin: Address::Constant(4444),
+ end: Address::Constant(5555),
+ data: expression.clone(),
+ },
+ Location::BaseAddress {
+ address: Address::Constant(1111),
+ },
+ Location::OffsetPair {
+ begin: 2222,
+ end: 3333,
+ data: expression.clone(),
+ },
+ ]);
+ if version >= 5 {
+ loc_list.0.push(Location::DefaultLocation {
+ data: expression.clone(),
+ });
+ }
+
+ let mut locations = LocationListTable::default();
+ let loc_list_id = locations.add(loc_list.clone());
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let loc_list_offsets = locations.write(&mut sections, encoding, None).unwrap();
+ assert!(sections.debug_loc_refs.is_empty());
+ assert!(sections.debug_loclists_refs.is_empty());
+
+ let read_debug_loc =
+ read::DebugLoc::new(sections.debug_loc.slice(), LittleEndian);
+ let read_debug_loclists =
+ read::DebugLocLists::new(sections.debug_loclists.slice(), LittleEndian);
+ let read_loc = read::LocationLists::new(read_debug_loc, read_debug_loclists);
+ let offset = loc_list_offsets.get(loc_list_id);
+ let read_loc_list = read_loc.raw_locations(offset, encoding).unwrap();
+
+ let dwarf = read::Dwarf {
+ locations: read_loc,
+ ..Default::default()
+ };
+ let unit = read::Unit {
+ header: read::UnitHeader::new(
+ encoding,
+ 0,
+ read::UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ read::EndianSlice::default(),
+ ),
+ abbreviations: read::Abbreviations::default(),
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase(0),
+ addr_base: DebugAddrBase(0),
+ loclists_base: DebugLocListsBase(0),
+ rnglists_base: DebugRngListsBase(0),
+ line_program: None,
+ dwo_id: None,
+ };
+ let context = ConvertUnitContext {
+ dwarf: &dwarf,
+ unit: &unit,
+ line_strings: &mut line_strings,
+ strings: &mut strings,
+ ranges: &mut RangeListTable::default(),
+ locations: &mut locations,
+ convert_address: &|address| Some(Address::Constant(address)),
+ base_address: Address::Constant(0),
+ line_program_offset: None,
+ line_program_files: Vec::new(),
+ entry_ids: &HashMap::new(),
+ };
+ let convert_loc_list = LocationList::from(read_loc_list, &context).unwrap();
+
+ if version <= 4 {
+ loc_list.0[0] = Location::StartEnd {
+ begin: Address::Constant(6666),
+ end: Address::Constant(6666 + 7777),
+ data: expression.clone(),
+ };
+ }
+ assert_eq!(loc_list, convert_loc_list);
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/mod.rs b/vendor/gimli-0.26.2/src/write/mod.rs
new file mode 100644
index 000000000..47ba6319d
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/mod.rs
@@ -0,0 +1,425 @@
+//! Write DWARF debugging information.
+//!
+//! ## API Structure
+//!
+//! This module works by building up a representation of the debugging information
+//! in memory, and then writing it all at once. It supports two major use cases:
+//!
+//! * Use the [`DwarfUnit`](./struct.DwarfUnit.html) type when writing DWARF
+//! for a single compilation unit.
+//!
+//! * Use the [`Dwarf`](./struct.Dwarf.html) type when writing DWARF for multiple
+//! compilation units.
+//!
+//! The module also supports reading in DWARF debugging information and writing it out
+//! again, possibly after modifying it. Create a [`read::Dwarf`](../read/struct.Dwarf.html)
+//! instance, and then use [`Dwarf::from`](./struct.Dwarf.html#method.from) to convert
+//! it to a writable instance.
+//!
+//! ## Example Usage
+//!
+//! Write a compilation unit containing only the top level DIE.
+//!
+//! ```rust
+//! use gimli::write::{
+//! Address, AttributeValue, DwarfUnit, EndianVec, Error, Range, RangeList, Sections,
+//! };
+//!
+//! fn example() -> Result<(), Error> {
+//! // Choose the encoding parameters.
+//! let encoding = gimli::Encoding {
+//! format: gimli::Format::Dwarf32,
+//! version: 5,
+//! address_size: 8,
+//! };
+//! // Create a container for a single compilation unit.
+//! let mut dwarf = DwarfUnit::new(encoding);
+//! // Set a range attribute on the root DIE.
+//! let range_list = RangeList(vec![Range::StartLength {
+//! begin: Address::Constant(0x100),
+//! length: 42,
+//! }]);
+//! let range_list_id = dwarf.unit.ranges.add(range_list);
+//! let root = dwarf.unit.root();
+//! dwarf.unit.get_mut(root).set(
+//! gimli::DW_AT_ranges,
+//! AttributeValue::RangeListRef(range_list_id),
+//! );
+//! // Create a `Vec` for each DWARF section.
+//! let mut sections = Sections::new(EndianVec::new(gimli::LittleEndian));
+//! // Finally, write the DWARF data to the sections.
+//! dwarf.write(&mut sections)?;
+//! sections.for_each(|id, data| {
+//! // Here you can add the data to the output object file.
+//! Ok(())
+//! })
+//! }
+//! # fn main() {
+//! # example().unwrap();
+//! # }
+
+use std::error;
+use std::fmt;
+use std::result;
+
+use crate::constants;
+
+mod endian_vec;
+pub use self::endian_vec::*;
+
+mod writer;
+pub use self::writer::*;
+
+#[macro_use]
+mod section;
+pub use self::section::*;
+
+macro_rules! define_id {
+ ($name:ident, $docs:expr) => {
+ #[doc=$docs]
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub struct $name {
+ base_id: BaseId,
+ index: usize,
+ }
+
+ impl $name {
+ #[inline]
+ fn new(base_id: BaseId, index: usize) -> Self {
+ $name { base_id, index }
+ }
+ }
+ };
+}
+
+macro_rules! define_offsets {
+ ($offsets:ident: $id:ident => $offset:ident, $off_doc:expr) => {
+ #[doc=$off_doc]
+ #[derive(Debug)]
+ pub struct $offsets {
+ base_id: BaseId,
+ // We know ids start at 0.
+ offsets: Vec<$offset>,
+ }
+
+ impl $offsets {
+ /// Return an empty list of offsets.
+ #[inline]
+ pub fn none() -> Self {
+ $offsets {
+ base_id: BaseId::default(),
+ offsets: Vec::new(),
+ }
+ }
+
+ /// Get the offset
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ #[inline]
+ pub fn get(&self, id: $id) -> $offset {
+ debug_assert_eq!(self.base_id, id.base_id);
+ self.offsets[id.index]
+ }
+
+ /// Return the number of offsets.
+ #[inline]
+ pub fn count(&self) -> usize {
+ self.offsets.len()
+ }
+ }
+ };
+}
+
+mod abbrev;
+pub use self::abbrev::*;
+
+mod cfi;
+pub use self::cfi::*;
+
+mod dwarf;
+pub use self::dwarf::*;
+
+mod line;
+pub use self::line::*;
+
+mod loc;
+pub use self::loc::*;
+
+mod op;
+pub use self::op::*;
+
+mod range;
+pub use self::range::*;
+
+mod str;
+pub use self::str::*;
+
+mod unit;
+pub use self::unit::*;
+
+/// An error that occurred when writing.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum Error {
+ /// The given offset is out of bounds.
+ OffsetOutOfBounds,
+ /// The given length is out of bounds.
+ LengthOutOfBounds,
+ /// The attribute value is an invalid for writing.
+ InvalidAttributeValue,
+ /// The value is too large for the encoding form.
+ ValueTooLarge,
+ /// Unsupported word size.
+ UnsupportedWordSize(u8),
+ /// Unsupported DWARF version.
+ UnsupportedVersion(u16),
+ /// The unit length is too large for the requested DWARF format.
+ InitialLengthOverflow,
+ /// The address is invalid.
+ InvalidAddress,
+ /// The reference is invalid.
+ InvalidReference,
+ /// A requested feature requires a different DWARF version.
+ NeedVersion(u16),
+ /// Strings in line number program have mismatched forms.
+ LineStringFormMismatch,
+ /// The range is empty or otherwise invalid.
+ InvalidRange,
+ /// The line number program encoding is incompatible with the unit encoding.
+ IncompatibleLineProgramEncoding,
+ /// Could not encode code offset for a frame instruction.
+ InvalidFrameCodeOffset(u32),
+ /// Could not encode data offset for a frame instruction.
+ InvalidFrameDataOffset(i32),
+ /// Unsupported eh_frame pointer encoding.
+ UnsupportedPointerEncoding(constants::DwEhPe),
+ /// Unsupported reference in CFI expression.
+ UnsupportedCfiExpressionReference,
+ /// Unsupported forward reference in expression.
+ UnsupportedExpressionForwardReference,
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
+ match *self {
+ Error::OffsetOutOfBounds => write!(f, "The given offset is out of bounds."),
+ Error::LengthOutOfBounds => write!(f, "The given length is out of bounds."),
+ Error::InvalidAttributeValue => {
+ write!(f, "The attribute value is an invalid for writing.")
+ }
+ Error::ValueTooLarge => write!(f, "The value is too large for the encoding form."),
+ Error::UnsupportedWordSize(size) => write!(f, "Unsupported word size: {}", size),
+ Error::UnsupportedVersion(version) => {
+ write!(f, "Unsupported DWARF version: {}", version)
+ }
+ Error::InitialLengthOverflow => write!(
+ f,
+ "The unit length is too large for the requested DWARF format."
+ ),
+ Error::InvalidAddress => write!(f, "The address is invalid."),
+ Error::InvalidReference => write!(f, "The reference is invalid."),
+ Error::NeedVersion(version) => write!(
+ f,
+ "A requested feature requires a DWARF version {}.",
+ version
+ ),
+ Error::LineStringFormMismatch => {
+ write!(f, "Strings in line number program have mismatched forms.")
+ }
+ Error::InvalidRange => write!(f, "The range is empty or otherwise invalid."),
+ Error::IncompatibleLineProgramEncoding => write!(
+ f,
+ "The line number program encoding is incompatible with the unit encoding."
+ ),
+ Error::InvalidFrameCodeOffset(offset) => write!(
+ f,
+ "Could not encode code offset ({}) for a frame instruction.",
+ offset,
+ ),
+ Error::InvalidFrameDataOffset(offset) => write!(
+ f,
+ "Could not encode data offset ({}) for a frame instruction.",
+ offset,
+ ),
+ Error::UnsupportedPointerEncoding(eh_pe) => {
+ write!(f, "Unsupported eh_frame pointer encoding ({}).", eh_pe)
+ }
+ Error::UnsupportedCfiExpressionReference => {
+ write!(f, "Unsupported reference in CFI expression.")
+ }
+ Error::UnsupportedExpressionForwardReference => {
+ write!(f, "Unsupported forward reference in expression.")
+ }
+ }
+ }
+}
+
+impl error::Error for Error {}
+
+/// The result of a write.
+pub type Result<T> = result::Result<T, Error>;
+
+/// An address.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum Address {
+ /// A fixed address that does not require relocation.
+ Constant(u64),
+ /// An address that is relative to a symbol which may be relocated.
+ Symbol {
+ /// The symbol that the address is relative to.
+ ///
+ /// The meaning of this value is decided by the writer, but
+ /// will typically be an index into a symbol table.
+ symbol: usize,
+ /// The offset of the address relative to the symbol.
+ ///
+ /// This will typically be used as the addend in a relocation.
+ addend: i64,
+ },
+}
+
+/// A reference to a `.debug_info` entry.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum Reference {
+ /// An external symbol.
+ ///
+ /// The meaning of this value is decided by the writer, but
+ /// will typically be an index into a symbol table.
+ Symbol(usize),
+ /// An entry in the same section.
+ ///
+ /// This only supports references in units that are emitted together.
+ Entry(UnitId, UnitEntryId),
+}
+
+// This type is only used in debug assertions.
+#[cfg(not(debug_assertions))]
+type BaseId = ();
+
+#[cfg(debug_assertions)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+struct BaseId(usize);
+
+#[cfg(debug_assertions)]
+impl Default for BaseId {
+ fn default() -> Self {
+ use std::sync::atomic;
+ static BASE_ID: atomic::AtomicUsize = atomic::AtomicUsize::new(0);
+ BaseId(BASE_ID.fetch_add(1, atomic::Ordering::Relaxed))
+ }
+}
+
+#[cfg(feature = "read")]
+mod convert {
+ use super::*;
+ use crate::read;
+
+ pub(crate) use super::unit::convert::*;
+
+ /// An error that occurred when converting a read value into a write value.
+ #[derive(Debug, Clone, Copy, PartialEq, Eq)]
+ pub enum ConvertError {
+ /// An error occurred when reading.
+ Read(read::Error),
+ /// Writing of this attribute value is not implemented yet.
+ UnsupportedAttributeValue,
+ /// This attribute value is an invalid name/form combination.
+ InvalidAttributeValue,
+ /// A `.debug_info` reference does not refer to a valid entry.
+ InvalidDebugInfoOffset,
+ /// An address could not be converted.
+ InvalidAddress,
+ /// Writing this line number instruction is not implemented yet.
+ UnsupportedLineInstruction,
+ /// Writing this form of line string is not implemented yet.
+ UnsupportedLineStringForm,
+ /// A `.debug_line` file index is invalid.
+ InvalidFileIndex,
+ /// A `.debug_line` directory index is invalid.
+ InvalidDirectoryIndex,
+ /// A `.debug_line` line base is invalid.
+ InvalidLineBase,
+ /// A `.debug_line` reference is invalid.
+ InvalidLineRef,
+ /// A `.debug_info` unit entry reference is invalid.
+ InvalidUnitRef,
+ /// A `.debug_info` reference is invalid.
+ InvalidDebugInfoRef,
+ /// Invalid relative address in a range list.
+ InvalidRangeRelativeAddress,
+ /// Writing this CFI instruction is not implemented yet.
+ UnsupportedCfiInstruction,
+ /// Writing indirect pointers is not implemented yet.
+ UnsupportedIndirectAddress,
+ /// Writing this expression operation is not implemented yet.
+ UnsupportedOperation,
+ /// Operation branch target is invalid.
+ InvalidBranchTarget,
+ /// Writing this unit type is not supported yet.
+ UnsupportedUnitType,
+ }
+
+ impl fmt::Display for ConvertError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
+ use self::ConvertError::*;
+ match *self {
+ Read(ref e) => e.fmt(f),
+ UnsupportedAttributeValue => {
+ write!(f, "Writing of this attribute value is not implemented yet.")
+ }
+ InvalidAttributeValue => write!(
+ f,
+ "This attribute value is an invalid name/form combination."
+ ),
+ InvalidDebugInfoOffset => write!(
+ f,
+ "A `.debug_info` reference does not refer to a valid entry."
+ ),
+ InvalidAddress => write!(f, "An address could not be converted."),
+ UnsupportedLineInstruction => write!(
+ f,
+ "Writing this line number instruction is not implemented yet."
+ ),
+ UnsupportedLineStringForm => write!(
+ f,
+ "Writing this form of line string is not implemented yet."
+ ),
+ InvalidFileIndex => write!(f, "A `.debug_line` file index is invalid."),
+ InvalidDirectoryIndex => write!(f, "A `.debug_line` directory index is invalid."),
+ InvalidLineBase => write!(f, "A `.debug_line` line base is invalid."),
+ InvalidLineRef => write!(f, "A `.debug_line` reference is invalid."),
+ InvalidUnitRef => write!(f, "A `.debug_info` unit entry reference is invalid."),
+ InvalidDebugInfoRef => write!(f, "A `.debug_info` reference is invalid."),
+ InvalidRangeRelativeAddress => {
+ write!(f, "Invalid relative address in a range list.")
+ }
+ UnsupportedCfiInstruction => {
+ write!(f, "Writing this CFI instruction is not implemented yet.")
+ }
+ UnsupportedIndirectAddress => {
+ write!(f, "Writing indirect pointers is not implemented yet.")
+ }
+ UnsupportedOperation => write!(
+ f,
+ "Writing this expression operation is not implemented yet."
+ ),
+ InvalidBranchTarget => write!(f, "Operation branch target is invalid."),
+ UnsupportedUnitType => write!(f, "Writing this unit type is not supported yet."),
+ }
+ }
+ }
+
+ impl error::Error for ConvertError {}
+
+ impl From<read::Error> for ConvertError {
+ fn from(e: read::Error) -> Self {
+ ConvertError::Read(e)
+ }
+ }
+
+ /// The result of a conversion.
+ pub type ConvertResult<T> = result::Result<T, ConvertError>;
+}
+#[cfg(feature = "read")]
+pub use self::convert::*;
diff --git a/vendor/gimli-0.26.2/src/write/op.rs b/vendor/gimli-0.26.2/src/write/op.rs
new file mode 100644
index 000000000..c70eec2dd
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/op.rs
@@ -0,0 +1,1621 @@
+use alloc::boxed::Box;
+use alloc::vec::Vec;
+
+use crate::common::{Encoding, Register};
+use crate::constants::{self, DwOp};
+use crate::leb128::write::{sleb128_size, uleb128_size};
+use crate::write::{
+ Address, DebugInfoReference, Error, Reference, Result, UnitEntryId, UnitOffsets, Writer,
+};
+
+/// The bytecode for a DWARF expression or location description.
+#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
+pub struct Expression {
+ operations: Vec<Operation>,
+}
+
+impl Expression {
+ /// Create an empty expression.
+ #[inline]
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Create an expression from raw bytecode.
+ ///
+ /// This does not support operations that require references, such as `DW_OP_addr`.
+ #[inline]
+ pub fn raw(bytecode: Vec<u8>) -> Self {
+ Expression {
+ operations: vec![Operation::Raw(bytecode)],
+ }
+ }
+
+ /// Add an operation to the expression.
+ ///
+ /// This should only be used for operations that have no explicit operands.
+ pub fn op(&mut self, opcode: DwOp) {
+ self.operations.push(Operation::Simple(opcode));
+ }
+
+ /// Add a `DW_OP_addr` operation to the expression.
+ pub fn op_addr(&mut self, address: Address) {
+ self.operations.push(Operation::Address(address));
+ }
+
+ /// Add a `DW_OP_constu` operation to the expression.
+ ///
+ /// This may be emitted as a smaller equivalent operation.
+ pub fn op_constu(&mut self, value: u64) {
+ self.operations.push(Operation::UnsignedConstant(value));
+ }
+
+ /// Add a `DW_OP_consts` operation to the expression.
+ ///
+ /// This may be emitted as a smaller equivalent operation.
+ pub fn op_consts(&mut self, value: i64) {
+ self.operations.push(Operation::SignedConstant(value));
+ }
+
+ /// Add a `DW_OP_const_type` or `DW_OP_GNU_const_type` operation to the expression.
+ pub fn op_const_type(&mut self, base: UnitEntryId, value: Box<[u8]>) {
+ self.operations.push(Operation::ConstantType(base, value));
+ }
+
+ /// Add a `DW_OP_fbreg` operation to the expression.
+ pub fn op_fbreg(&mut self, offset: i64) {
+ self.operations.push(Operation::FrameOffset(offset));
+ }
+
+ /// Add a `DW_OP_bregx` operation to the expression.
+ ///
+ /// This may be emitted as a smaller equivalent operation.
+ pub fn op_breg(&mut self, register: Register, offset: i64) {
+ self.operations
+ .push(Operation::RegisterOffset(register, offset));
+ }
+
+ /// Add a `DW_OP_regval_type` or `DW_OP_GNU_regval_type` operation to the expression.
+ ///
+ /// This may be emitted as a smaller equivalent operation.
+ pub fn op_regval_type(&mut self, register: Register, base: UnitEntryId) {
+ self.operations
+ .push(Operation::RegisterType(register, base));
+ }
+
+ /// Add a `DW_OP_pick` operation to the expression.
+ ///
+ /// This may be emitted as a `DW_OP_dup` or `DW_OP_over` operation.
+ pub fn op_pick(&mut self, index: u8) {
+ self.operations.push(Operation::Pick(index));
+ }
+
+ /// Add a `DW_OP_deref` operation to the expression.
+ pub fn op_deref(&mut self) {
+ self.operations.push(Operation::Deref { space: false });
+ }
+
+ /// Add a `DW_OP_xderef` operation to the expression.
+ pub fn op_xderef(&mut self) {
+ self.operations.push(Operation::Deref { space: true });
+ }
+
+ /// Add a `DW_OP_deref_size` operation to the expression.
+ pub fn op_deref_size(&mut self, size: u8) {
+ self.operations
+ .push(Operation::DerefSize { size, space: false });
+ }
+
+ /// Add a `DW_OP_xderef_size` operation to the expression.
+ pub fn op_xderef_size(&mut self, size: u8) {
+ self.operations
+ .push(Operation::DerefSize { size, space: true });
+ }
+
+ /// Add a `DW_OP_deref_type` or `DW_OP_GNU_deref_type` operation to the expression.
+ pub fn op_deref_type(&mut self, size: u8, base: UnitEntryId) {
+ self.operations.push(Operation::DerefType {
+ size,
+ base,
+ space: false,
+ });
+ }
+
+ /// Add a `DW_OP_xderef_type` operation to the expression.
+ pub fn op_xderef_type(&mut self, size: u8, base: UnitEntryId) {
+ self.operations.push(Operation::DerefType {
+ size,
+ base,
+ space: true,
+ });
+ }
+
+ /// Add a `DW_OP_plus_uconst` operation to the expression.
+ pub fn op_plus_uconst(&mut self, value: u64) {
+ self.operations.push(Operation::PlusConstant(value));
+ }
+
+ /// Add a `DW_OP_skip` operation to the expression.
+ ///
+ /// Returns the index of the operation. The caller must call `set_target` with
+ /// this index to set the target of the branch.
+ pub fn op_skip(&mut self) -> usize {
+ let index = self.next_index();
+ self.operations.push(Operation::Skip(!0));
+ index
+ }
+
+ /// Add a `DW_OP_bra` operation to the expression.
+ ///
+ /// Returns the index of the operation. The caller must call `set_target` with
+ /// this index to set the target of the branch.
+ pub fn op_bra(&mut self) -> usize {
+ let index = self.next_index();
+ self.operations.push(Operation::Branch(!0));
+ index
+ }
+
+ /// Return the index that will be assigned to the next operation.
+ ///
+ /// This can be passed to `set_target`.
+ #[inline]
+ pub fn next_index(&self) -> usize {
+ self.operations.len()
+ }
+
+ /// Set the target of a `DW_OP_skip` or `DW_OP_bra` operation .
+ pub fn set_target(&mut self, operation: usize, new_target: usize) {
+ debug_assert!(new_target <= self.next_index());
+ debug_assert_ne!(operation, new_target);
+ match self.operations[operation] {
+ Operation::Skip(ref mut target) | Operation::Branch(ref mut target) => {
+ *target = new_target;
+ }
+ _ => unimplemented!(),
+ }
+ }
+
+ /// Add a `DW_OP_call4` operation to the expression.
+ pub fn op_call(&mut self, entry: UnitEntryId) {
+ self.operations.push(Operation::Call(entry));
+ }
+
+ /// Add a `DW_OP_call_ref` operation to the expression.
+ pub fn op_call_ref(&mut self, entry: Reference) {
+ self.operations.push(Operation::CallRef(entry));
+ }
+
+ /// Add a `DW_OP_convert` or `DW_OP_GNU_convert` operation to the expression.
+ ///
+ /// `base` is the DIE of the base type, or `None` for the generic type.
+ pub fn op_convert(&mut self, base: Option<UnitEntryId>) {
+ self.operations.push(Operation::Convert(base));
+ }
+
+ /// Add a `DW_OP_reinterpret` or `DW_OP_GNU_reinterpret` operation to the expression.
+ ///
+ /// `base` is the DIE of the base type, or `None` for the generic type.
+ pub fn op_reinterpret(&mut self, base: Option<UnitEntryId>) {
+ self.operations.push(Operation::Reinterpret(base));
+ }
+
+ /// Add a `DW_OP_entry_value` or `DW_OP_GNU_entry_value` operation to the expression.
+ pub fn op_entry_value(&mut self, expression: Expression) {
+ self.operations.push(Operation::EntryValue(expression));
+ }
+
+ /// Add a `DW_OP_regx` operation to the expression.
+ ///
+ /// This may be emitted as a smaller equivalent operation.
+ pub fn op_reg(&mut self, register: Register) {
+ self.operations.push(Operation::Register(register));
+ }
+
+ /// Add a `DW_OP_implicit_value` operation to the expression.
+ pub fn op_implicit_value(&mut self, data: Box<[u8]>) {
+ self.operations.push(Operation::ImplicitValue(data));
+ }
+
+ /// Add a `DW_OP_implicit_pointer` or `DW_OP_GNU_implicit_pointer` operation to the expression.
+ pub fn op_implicit_pointer(&mut self, entry: Reference, byte_offset: i64) {
+ self.operations
+ .push(Operation::ImplicitPointer { entry, byte_offset });
+ }
+
+ /// Add a `DW_OP_piece` operation to the expression.
+ pub fn op_piece(&mut self, size_in_bytes: u64) {
+ self.operations.push(Operation::Piece { size_in_bytes });
+ }
+
+ /// Add a `DW_OP_bit_piece` operation to the expression.
+ pub fn op_bit_piece(&mut self, size_in_bits: u64, bit_offset: u64) {
+ self.operations.push(Operation::BitPiece {
+ size_in_bits,
+ bit_offset,
+ });
+ }
+
+ /// Add a `DW_OP_GNU_parameter_ref` operation to the expression.
+ pub fn op_gnu_parameter_ref(&mut self, entry: UnitEntryId) {
+ self.operations.push(Operation::ParameterRef(entry));
+ }
+
+ /// Add a `DW_OP_WASM_location 0x0` operation to the expression.
+ pub fn op_wasm_local(&mut self, index: u32) {
+ self.operations.push(Operation::WasmLocal(index));
+ }
+
+ /// Add a `DW_OP_WASM_location 0x1` operation to the expression.
+ pub fn op_wasm_global(&mut self, index: u32) {
+ self.operations.push(Operation::WasmGlobal(index));
+ }
+
+ /// Add a `DW_OP_WASM_location 0x2` operation to the expression.
+ pub fn op_wasm_stack(&mut self, index: u32) {
+ self.operations.push(Operation::WasmStack(index));
+ }
+
+ pub(crate) fn size(&self, encoding: Encoding, unit_offsets: Option<&UnitOffsets>) -> usize {
+ let mut size = 0;
+ for operation in &self.operations {
+ size += operation.size(encoding, unit_offsets);
+ }
+ size
+ }
+
+ pub(crate) fn write<W: Writer>(
+ &self,
+ w: &mut W,
+ mut refs: Option<&mut Vec<DebugInfoReference>>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ ) -> Result<()> {
+ // TODO: only calculate offsets if needed?
+ let mut offsets = Vec::with_capacity(self.operations.len());
+ let mut offset = w.len();
+ for operation in &self.operations {
+ offsets.push(offset);
+ offset += operation.size(encoding, unit_offsets);
+ }
+ offsets.push(offset);
+ for (operation, offset) in self.operations.iter().zip(offsets.iter().copied()) {
+ let refs = match refs {
+ Some(ref mut refs) => Some(&mut **refs),
+ None => None,
+ };
+ debug_assert_eq!(w.len(), offset);
+ operation.write(w, refs, encoding, unit_offsets, &offsets)?;
+ }
+ Ok(())
+ }
+}
+
+/// A single DWARF operation.
+//
+// This type is intentionally not public so that we can change the
+// representation of expressions as needed.
+//
+// Variants are listed in the order they appear in Section 2.5.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+enum Operation {
+ /// Raw bytecode.
+ ///
+ /// Does not support references.
+ Raw(Vec<u8>),
+ /// An operation that has no explicit operands.
+ ///
+ /// Represents:
+ /// - `DW_OP_drop`, `DW_OP_swap`, `DW_OP_rot`
+ /// - `DW_OP_push_object_address`, `DW_OP_form_tls_address`, `DW_OP_call_frame_cfa`
+ /// - `DW_OP_abs`, `DW_OP_and`, `DW_OP_div`, `DW_OP_minus`, `DW_OP_mod`, `DW_OP_mul`,
+ /// `DW_OP_neg`, `DW_OP_not`, `DW_OP_or`, `DW_OP_plus`, `DW_OP_shl`, `DW_OP_shr`,
+ /// `DW_OP_shra`, `DW_OP_xor`
+ /// - `DW_OP_le`, `DW_OP_ge`, `DW_OP_eq`, `DW_OP_lt`, `DW_OP_gt`, `DW_OP_ne`
+ /// - `DW_OP_nop`
+ /// - `DW_OP_stack_value`
+ Simple(DwOp),
+ /// Relocate the address if needed, and push it on the stack.
+ ///
+ /// Represents `DW_OP_addr`.
+ Address(Address),
+ /// Push an unsigned constant value on the stack.
+ ///
+ /// Represents `DW_OP_constu`.
+ UnsignedConstant(u64),
+ /// Push a signed constant value on the stack.
+ ///
+ /// Represents `DW_OP_consts`.
+ SignedConstant(i64),
+ /* TODO: requires .debug_addr write support
+ /// Read the address at the given index in `.debug_addr, relocate the address if needed,
+ /// and push it on the stack.
+ ///
+ /// Represents `DW_OP_addrx`.
+ AddressIndex(DebugAddrIndex<Offset>),
+ /// Read the address at the given index in `.debug_addr, and push it on the stack.
+ /// Do not relocate the address.
+ ///
+ /// Represents `DW_OP_constx`.
+ ConstantIndex(DebugAddrIndex<Offset>),
+ */
+ /// Interpret the value bytes as a constant of a given type, and push it on the stack.
+ ///
+ /// Represents `DW_OP_const_type`.
+ ConstantType(UnitEntryId, Box<[u8]>),
+ /// Compute the frame base (using `DW_AT_frame_base`), add the
+ /// given offset, and then push the resulting sum on the stack.
+ ///
+ /// Represents `DW_OP_fbreg`.
+ FrameOffset(i64),
+ /// Find the contents of the given register, add the offset, and then
+ /// push the resulting sum on the stack.
+ ///
+ /// Represents `DW_OP_bregx`.
+ RegisterOffset(Register, i64),
+ /// Interpret the contents of the given register as a value of the given type,
+ /// and push it on the stack.
+ ///
+ /// Represents `DW_OP_regval_type`.
+ RegisterType(Register, UnitEntryId),
+ /// Copy the item at a stack index and push it on top of the stack.
+ ///
+ /// Represents `DW_OP_pick`, `DW_OP_dup`, and `DW_OP_over`.
+ Pick(u8),
+ /// Pop the topmost value of the stack, dereference it, and push the
+ /// resulting value.
+ ///
+ /// Represents `DW_OP_deref` and `DW_OP_xderef`.
+ Deref {
+ /// True if the dereference operation takes an address space
+ /// argument from the stack; false otherwise.
+ space: bool,
+ },
+ /// Pop the topmost value of the stack, dereference it to obtain a value
+ /// of the given size, and push the resulting value.
+ ///
+ /// Represents `DW_OP_deref_size` and `DW_OP_xderef_size`.
+ DerefSize {
+ /// True if the dereference operation takes an address space
+ /// argument from the stack; false otherwise.
+ space: bool,
+ /// The size of the data to dereference.
+ size: u8,
+ },
+ /// Pop the topmost value of the stack, dereference it to obtain a value
+ /// of the given type, and push the resulting value.
+ ///
+ /// Represents `DW_OP_deref_type` and `DW_OP_xderef_type`.
+ DerefType {
+ /// True if the dereference operation takes an address space
+ /// argument from the stack; false otherwise.
+ space: bool,
+ /// The size of the data to dereference.
+ size: u8,
+ /// The DIE of the base type, or `None` for the generic type.
+ base: UnitEntryId,
+ },
+ /// Add an unsigned constant to the topmost value on the stack.
+ ///
+ /// Represents `DW_OP_plus_uconst`.
+ PlusConstant(u64),
+ /// Unconditional branch to the target location.
+ ///
+ /// The value is the index within the expression of the operation to branch to.
+ /// This will be converted to a relative offset when writing.
+ ///
+ /// Represents `DW_OP_skip`.
+ Skip(usize),
+ /// Branch to the target location if the top of stack is nonzero.
+ ///
+ /// The value is the index within the expression of the operation to branch to.
+ /// This will be converted to a relative offset when writing.
+ ///
+ /// Represents `DW_OP_bra`.
+ Branch(usize),
+ /// Evaluate a DWARF expression as a subroutine.
+ ///
+ /// The expression comes from the `DW_AT_location` attribute of the indicated DIE.
+ ///
+ /// Represents `DW_OP_call4`.
+ Call(UnitEntryId),
+ /// Evaluate an external DWARF expression as a subroutine.
+ ///
+ /// The expression comes from the `DW_AT_location` attribute of the indicated DIE,
+ /// which may be in another compilation unit or shared object.
+ ///
+ /// Represents `DW_OP_call_ref`.
+ CallRef(Reference),
+ /// Pop the top stack entry, convert it to a different type, and push it on the stack.
+ ///
+ /// Represents `DW_OP_convert`.
+ Convert(Option<UnitEntryId>),
+ /// Pop the top stack entry, reinterpret the bits in its value as a different type,
+ /// and push it on the stack.
+ ///
+ /// Represents `DW_OP_reinterpret`.
+ Reinterpret(Option<UnitEntryId>),
+ /// Evaluate an expression at the entry to the current subprogram, and push it on the stack.
+ ///
+ /// Represents `DW_OP_entry_value`.
+ EntryValue(Expression),
+ // FIXME: EntryRegister
+ /// Indicate that this piece's location is in the given register.
+ ///
+ /// Completes the piece or expression.
+ ///
+ /// Represents `DW_OP_regx`.
+ Register(Register),
+ /// The object has no location, but has a known constant value.
+ ///
+ /// Completes the piece or expression.
+ ///
+ /// Represents `DW_OP_implicit_value`.
+ ImplicitValue(Box<[u8]>),
+ /// The object is a pointer to a value which has no actual location, such as
+ /// an implicit value or a stack value.
+ ///
+ /// Completes the piece or expression.
+ ///
+ /// Represents `DW_OP_implicit_pointer`.
+ ImplicitPointer {
+ /// The DIE of the value that this is an implicit pointer into.
+ entry: Reference,
+ /// The byte offset into the value that the implicit pointer points to.
+ byte_offset: i64,
+ },
+ /// Terminate a piece.
+ ///
+ /// Represents `DW_OP_piece`.
+ Piece {
+ /// The size of this piece in bytes.
+ size_in_bytes: u64,
+ },
+ /// Terminate a piece with a size in bits.
+ ///
+ /// Represents `DW_OP_bit_piece`.
+ BitPiece {
+ /// The size of this piece in bits.
+ size_in_bits: u64,
+ /// The bit offset of this piece.
+ bit_offset: u64,
+ },
+ /// This represents a parameter that was optimized out.
+ ///
+ /// The entry is the definition of the parameter, and is matched to
+ /// the `DW_TAG_GNU_call_site_parameter` in the caller that also
+ /// points to the same definition of the parameter.
+ ///
+ /// Represents `DW_OP_GNU_parameter_ref`.
+ ParameterRef(UnitEntryId),
+ /// The index of a local in the currently executing function.
+ ///
+ /// Represents `DW_OP_WASM_location 0x00`.
+ WasmLocal(u32),
+ /// The index of a global.
+ ///
+ /// Represents `DW_OP_WASM_location 0x01`.
+ WasmGlobal(u32),
+ /// The index of an item on the operand stack.
+ ///
+ /// Represents `DW_OP_WASM_location 0x02`.
+ WasmStack(u32),
+}
+
+impl Operation {
+ fn size(&self, encoding: Encoding, unit_offsets: Option<&UnitOffsets>) -> usize {
+ let base_size = |base| {
+ // Errors are handled during writes.
+ match unit_offsets {
+ Some(offsets) => uleb128_size(offsets.unit_offset(base)),
+ None => 0,
+ }
+ };
+ 1 + match *self {
+ Operation::Raw(ref bytecode) => return bytecode.len(),
+ Operation::Simple(_) => 0,
+ Operation::Address(_) => encoding.address_size as usize,
+ Operation::UnsignedConstant(value) => {
+ if value < 32 {
+ 0
+ } else {
+ uleb128_size(value)
+ }
+ }
+ Operation::SignedConstant(value) => sleb128_size(value),
+ Operation::ConstantType(base, ref value) => base_size(base) + 1 + value.len(),
+ Operation::FrameOffset(offset) => sleb128_size(offset),
+ Operation::RegisterOffset(register, offset) => {
+ if register.0 < 32 {
+ sleb128_size(offset)
+ } else {
+ uleb128_size(register.0.into()) + sleb128_size(offset)
+ }
+ }
+ Operation::RegisterType(register, base) => {
+ uleb128_size(register.0.into()) + base_size(base)
+ }
+ Operation::Pick(index) => {
+ if index > 1 {
+ 1
+ } else {
+ 0
+ }
+ }
+ Operation::Deref { .. } => 0,
+ Operation::DerefSize { .. } => 1,
+ Operation::DerefType { base, .. } => 1 + base_size(base),
+ Operation::PlusConstant(value) => uleb128_size(value),
+ Operation::Skip(_) => 2,
+ Operation::Branch(_) => 2,
+ Operation::Call(_) => 4,
+ Operation::CallRef(_) => encoding.format.word_size() as usize,
+ Operation::Convert(base) => match base {
+ Some(base) => base_size(base),
+ None => 1,
+ },
+ Operation::Reinterpret(base) => match base {
+ Some(base) => base_size(base),
+ None => 1,
+ },
+ Operation::EntryValue(ref expression) => {
+ let length = expression.size(encoding, unit_offsets);
+ uleb128_size(length as u64) + length
+ }
+ Operation::Register(register) => {
+ if register.0 < 32 {
+ 0
+ } else {
+ uleb128_size(register.0.into())
+ }
+ }
+ Operation::ImplicitValue(ref data) => uleb128_size(data.len() as u64) + data.len(),
+ Operation::ImplicitPointer { byte_offset, .. } => {
+ encoding.format.word_size() as usize + sleb128_size(byte_offset)
+ }
+ Operation::Piece { size_in_bytes } => uleb128_size(size_in_bytes),
+ Operation::BitPiece {
+ size_in_bits,
+ bit_offset,
+ } => uleb128_size(size_in_bits) + uleb128_size(bit_offset),
+ Operation::ParameterRef(_) => 4,
+ Operation::WasmLocal(index)
+ | Operation::WasmGlobal(index)
+ | Operation::WasmStack(index) => 1 + uleb128_size(index.into()),
+ }
+ }
+
+ pub(crate) fn write<W: Writer>(
+ &self,
+ w: &mut W,
+ refs: Option<&mut Vec<DebugInfoReference>>,
+ encoding: Encoding,
+ unit_offsets: Option<&UnitOffsets>,
+ offsets: &[usize],
+ ) -> Result<()> {
+ let entry_offset = |entry| match unit_offsets {
+ Some(offsets) => {
+ let offset = offsets.unit_offset(entry);
+ if offset == 0 {
+ Err(Error::UnsupportedExpressionForwardReference)
+ } else {
+ Ok(offset)
+ }
+ }
+ None => Err(Error::UnsupportedCfiExpressionReference),
+ };
+ match *self {
+ Operation::Raw(ref bytecode) => w.write(bytecode)?,
+ Operation::Simple(opcode) => w.write_u8(opcode.0)?,
+ Operation::Address(address) => {
+ w.write_u8(constants::DW_OP_addr.0)?;
+ w.write_address(address, encoding.address_size)?;
+ }
+ Operation::UnsignedConstant(value) => {
+ if value < 32 {
+ w.write_u8(constants::DW_OP_lit0.0 + value as u8)?;
+ } else {
+ w.write_u8(constants::DW_OP_constu.0)?;
+ w.write_uleb128(value)?;
+ }
+ }
+ Operation::SignedConstant(value) => {
+ w.write_u8(constants::DW_OP_consts.0)?;
+ w.write_sleb128(value)?;
+ }
+ Operation::ConstantType(base, ref value) => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_const_type.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_const_type.0)?;
+ }
+ w.write_uleb128(entry_offset(base)?)?;
+ w.write_udata(value.len() as u64, 1)?;
+ w.write(&value)?;
+ }
+ Operation::FrameOffset(offset) => {
+ w.write_u8(constants::DW_OP_fbreg.0)?;
+ w.write_sleb128(offset)?;
+ }
+ Operation::RegisterOffset(register, offset) => {
+ if register.0 < 32 {
+ w.write_u8(constants::DW_OP_breg0.0 + register.0 as u8)?;
+ } else {
+ w.write_u8(constants::DW_OP_bregx.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ w.write_sleb128(offset)?;
+ }
+ Operation::RegisterType(register, base) => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_regval_type.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_regval_type.0)?;
+ }
+ w.write_uleb128(register.0.into())?;
+ w.write_uleb128(entry_offset(base)?)?;
+ }
+ Operation::Pick(index) => match index {
+ 0 => w.write_u8(constants::DW_OP_dup.0)?,
+ 1 => w.write_u8(constants::DW_OP_over.0)?,
+ _ => {
+ w.write_u8(constants::DW_OP_pick.0)?;
+ w.write_u8(index)?;
+ }
+ },
+ Operation::Deref { space } => {
+ if space {
+ w.write_u8(constants::DW_OP_xderef.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_deref.0)?;
+ }
+ }
+ Operation::DerefSize { space, size } => {
+ if space {
+ w.write_u8(constants::DW_OP_xderef_size.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_deref_size.0)?;
+ }
+ w.write_u8(size)?;
+ }
+ Operation::DerefType { space, size, base } => {
+ if space {
+ w.write_u8(constants::DW_OP_xderef_type.0)?;
+ } else {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_deref_type.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_deref_type.0)?;
+ }
+ }
+ w.write_u8(size)?;
+ w.write_uleb128(entry_offset(base)?)?;
+ }
+ Operation::PlusConstant(value) => {
+ w.write_u8(constants::DW_OP_plus_uconst.0)?;
+ w.write_uleb128(value)?;
+ }
+ Operation::Skip(target) => {
+ w.write_u8(constants::DW_OP_skip.0)?;
+ let offset = offsets[target] as i64 - (w.len() as i64 + 2);
+ w.write_sdata(offset, 2)?;
+ }
+ Operation::Branch(target) => {
+ w.write_u8(constants::DW_OP_bra.0)?;
+ let offset = offsets[target] as i64 - (w.len() as i64 + 2);
+ w.write_sdata(offset, 2)?;
+ }
+ Operation::Call(entry) => {
+ w.write_u8(constants::DW_OP_call4.0)?;
+ // TODO: this probably won't work in practice, because we may
+ // only know the offsets of base type DIEs at this point.
+ w.write_udata(entry_offset(entry)?, 4)?;
+ }
+ Operation::CallRef(entry) => {
+ w.write_u8(constants::DW_OP_call_ref.0)?;
+ let size = encoding.format.word_size();
+ match entry {
+ Reference::Symbol(symbol) => w.write_reference(symbol, size)?,
+ Reference::Entry(unit, entry) => {
+ let refs = refs.ok_or(Error::InvalidReference)?;
+ refs.push(DebugInfoReference {
+ offset: w.len(),
+ unit,
+ entry,
+ size,
+ });
+ w.write_udata(0, size)?;
+ }
+ }
+ }
+ Operation::Convert(base) => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_convert.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_convert.0)?;
+ }
+ match base {
+ Some(base) => w.write_uleb128(entry_offset(base)?)?,
+ None => w.write_u8(0)?,
+ }
+ }
+ Operation::Reinterpret(base) => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_reinterpret.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_reinterpret.0)?;
+ }
+ match base {
+ Some(base) => w.write_uleb128(entry_offset(base)?)?,
+ None => w.write_u8(0)?,
+ }
+ }
+ Operation::EntryValue(ref expression) => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_entry_value.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_entry_value.0)?;
+ }
+ let length = expression.size(encoding, unit_offsets);
+ w.write_uleb128(length as u64)?;
+ expression.write(w, refs, encoding, unit_offsets)?;
+ }
+ Operation::Register(register) => {
+ if register.0 < 32 {
+ w.write_u8(constants::DW_OP_reg0.0 + register.0 as u8)?;
+ } else {
+ w.write_u8(constants::DW_OP_regx.0)?;
+ w.write_uleb128(register.0.into())?;
+ }
+ }
+ Operation::ImplicitValue(ref data) => {
+ w.write_u8(constants::DW_OP_implicit_value.0)?;
+ w.write_uleb128(data.len() as u64)?;
+ w.write(&data)?;
+ }
+ Operation::ImplicitPointer { entry, byte_offset } => {
+ if encoding.version >= 5 {
+ w.write_u8(constants::DW_OP_implicit_pointer.0)?;
+ } else {
+ w.write_u8(constants::DW_OP_GNU_implicit_pointer.0)?;
+ }
+ let size = if encoding.version == 2 {
+ encoding.address_size
+ } else {
+ encoding.format.word_size()
+ };
+ match entry {
+ Reference::Symbol(symbol) => {
+ w.write_reference(symbol, size)?;
+ }
+ Reference::Entry(unit, entry) => {
+ let refs = refs.ok_or(Error::InvalidReference)?;
+ refs.push(DebugInfoReference {
+ offset: w.len(),
+ unit,
+ entry,
+ size,
+ });
+ w.write_udata(0, size)?;
+ }
+ }
+ w.write_sleb128(byte_offset)?;
+ }
+ Operation::Piece { size_in_bytes } => {
+ w.write_u8(constants::DW_OP_piece.0)?;
+ w.write_uleb128(size_in_bytes)?;
+ }
+ Operation::BitPiece {
+ size_in_bits,
+ bit_offset,
+ } => {
+ w.write_u8(constants::DW_OP_bit_piece.0)?;
+ w.write_uleb128(size_in_bits)?;
+ w.write_uleb128(bit_offset)?;
+ }
+ Operation::ParameterRef(entry) => {
+ w.write_u8(constants::DW_OP_GNU_parameter_ref.0)?;
+ w.write_udata(entry_offset(entry)?, 4)?;
+ }
+ Operation::WasmLocal(index) => {
+ w.write(&[constants::DW_OP_WASM_location.0, 0])?;
+ w.write_uleb128(index.into())?;
+ }
+ Operation::WasmGlobal(index) => {
+ w.write(&[constants::DW_OP_WASM_location.0, 1])?;
+ w.write_uleb128(index.into())?;
+ }
+ Operation::WasmStack(index) => {
+ w.write(&[constants::DW_OP_WASM_location.0, 2])?;
+ w.write_uleb128(index.into())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+#[cfg(feature = "read")]
+pub(crate) mod convert {
+ use super::*;
+ use crate::common::UnitSectionOffset;
+ use crate::read::{self, Reader};
+ use crate::write::{ConvertError, ConvertResult, UnitEntryId, UnitId};
+ use std::collections::HashMap;
+
+ impl Expression {
+ /// Create an expression from the input expression.
+ pub fn from<R: Reader<Offset = usize>>(
+ from_expression: read::Expression<R>,
+ encoding: Encoding,
+ dwarf: Option<&read::Dwarf<R>>,
+ unit: Option<&read::Unit<R>>,
+ entry_ids: Option<&HashMap<UnitSectionOffset, (UnitId, UnitEntryId)>>,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<Expression> {
+ let convert_unit_offset = |offset: read::UnitOffset| -> ConvertResult<_> {
+ let entry_ids = entry_ids.ok_or(ConvertError::UnsupportedOperation)?;
+ let unit = unit.ok_or(ConvertError::UnsupportedOperation)?;
+ let id = entry_ids
+ .get(&offset.to_unit_section_offset(unit))
+ .ok_or(ConvertError::InvalidUnitRef)?;
+ Ok(id.1)
+ };
+ let convert_debug_info_offset = |offset| -> ConvertResult<_> {
+ // TODO: support relocations
+ let entry_ids = entry_ids.ok_or(ConvertError::UnsupportedOperation)?;
+ let id = entry_ids
+ .get(&UnitSectionOffset::DebugInfoOffset(offset))
+ .ok_or(ConvertError::InvalidDebugInfoRef)?;
+ Ok(Reference::Entry(id.0, id.1))
+ };
+
+ // Calculate offsets for use in branch/skip operations.
+ let mut offsets = Vec::new();
+ let mut offset = 0;
+ let mut from_operations = from_expression.clone().operations(encoding);
+ while let Some(_) = from_operations.next()? {
+ offsets.push(offset);
+ offset = from_operations.offset_from(&from_expression);
+ }
+ offsets.push(from_expression.0.len());
+
+ let mut from_operations = from_expression.clone().operations(encoding);
+ let mut operations = Vec::new();
+ while let Some(from_operation) = from_operations.next()? {
+ let operation = match from_operation {
+ read::Operation::Deref {
+ base_type,
+ size,
+ space,
+ } => {
+ if base_type.0 != 0 {
+ let base = convert_unit_offset(base_type)?;
+ Operation::DerefType { space, size, base }
+ } else if size != encoding.address_size {
+ Operation::DerefSize { space, size }
+ } else {
+ Operation::Deref { space }
+ }
+ }
+ read::Operation::Drop => Operation::Simple(constants::DW_OP_drop),
+ read::Operation::Pick { index } => Operation::Pick(index),
+ read::Operation::Swap => Operation::Simple(constants::DW_OP_swap),
+ read::Operation::Rot => Operation::Simple(constants::DW_OP_rot),
+ read::Operation::Abs => Operation::Simple(constants::DW_OP_abs),
+ read::Operation::And => Operation::Simple(constants::DW_OP_and),
+ read::Operation::Div => Operation::Simple(constants::DW_OP_div),
+ read::Operation::Minus => Operation::Simple(constants::DW_OP_minus),
+ read::Operation::Mod => Operation::Simple(constants::DW_OP_mod),
+ read::Operation::Mul => Operation::Simple(constants::DW_OP_mul),
+ read::Operation::Neg => Operation::Simple(constants::DW_OP_neg),
+ read::Operation::Not => Operation::Simple(constants::DW_OP_not),
+ read::Operation::Or => Operation::Simple(constants::DW_OP_or),
+ read::Operation::Plus => Operation::Simple(constants::DW_OP_plus),
+ read::Operation::PlusConstant { value } => Operation::PlusConstant(value),
+ read::Operation::Shl => Operation::Simple(constants::DW_OP_shl),
+ read::Operation::Shr => Operation::Simple(constants::DW_OP_shr),
+ read::Operation::Shra => Operation::Simple(constants::DW_OP_shra),
+ read::Operation::Xor => Operation::Simple(constants::DW_OP_xor),
+ read::Operation::Eq => Operation::Simple(constants::DW_OP_eq),
+ read::Operation::Ge => Operation::Simple(constants::DW_OP_ge),
+ read::Operation::Gt => Operation::Simple(constants::DW_OP_gt),
+ read::Operation::Le => Operation::Simple(constants::DW_OP_le),
+ read::Operation::Lt => Operation::Simple(constants::DW_OP_lt),
+ read::Operation::Ne => Operation::Simple(constants::DW_OP_ne),
+ read::Operation::Bra { target } => {
+ let offset = from_operations
+ .offset_from(&from_expression)
+ .wrapping_add(i64::from(target) as usize);
+ let index = offsets
+ .binary_search(&offset)
+ .map_err(|_| ConvertError::InvalidBranchTarget)?;
+ Operation::Branch(index)
+ }
+ read::Operation::Skip { target } => {
+ let offset = from_operations
+ .offset_from(&from_expression)
+ .wrapping_add(i64::from(target) as usize);
+ let index = offsets
+ .binary_search(&offset)
+ .map_err(|_| ConvertError::InvalidBranchTarget)?;
+ Operation::Skip(index)
+ }
+ read::Operation::UnsignedConstant { value } => {
+ Operation::UnsignedConstant(value)
+ }
+ read::Operation::SignedConstant { value } => Operation::SignedConstant(value),
+ read::Operation::Register { register } => Operation::Register(register),
+ read::Operation::RegisterOffset {
+ register,
+ offset,
+ base_type,
+ } => {
+ if base_type.0 != 0 {
+ Operation::RegisterType(register, convert_unit_offset(base_type)?)
+ } else {
+ Operation::RegisterOffset(register, offset)
+ }
+ }
+ read::Operation::FrameOffset { offset } => Operation::FrameOffset(offset),
+ read::Operation::Nop => Operation::Simple(constants::DW_OP_nop),
+ read::Operation::PushObjectAddress => {
+ Operation::Simple(constants::DW_OP_push_object_address)
+ }
+ read::Operation::Call { offset } => match offset {
+ read::DieReference::UnitRef(offset) => {
+ Operation::Call(convert_unit_offset(offset)?)
+ }
+ read::DieReference::DebugInfoRef(offset) => {
+ Operation::CallRef(convert_debug_info_offset(offset)?)
+ }
+ },
+ read::Operation::TLS => Operation::Simple(constants::DW_OP_form_tls_address),
+ read::Operation::CallFrameCFA => {
+ Operation::Simple(constants::DW_OP_call_frame_cfa)
+ }
+ read::Operation::Piece {
+ size_in_bits,
+ bit_offset: None,
+ } => Operation::Piece {
+ size_in_bytes: size_in_bits / 8,
+ },
+ read::Operation::Piece {
+ size_in_bits,
+ bit_offset: Some(bit_offset),
+ } => Operation::BitPiece {
+ size_in_bits,
+ bit_offset,
+ },
+ read::Operation::ImplicitValue { data } => {
+ Operation::ImplicitValue(data.to_slice()?.into_owned().into())
+ }
+ read::Operation::StackValue => Operation::Simple(constants::DW_OP_stack_value),
+ read::Operation::ImplicitPointer { value, byte_offset } => {
+ let entry = convert_debug_info_offset(value)?;
+ Operation::ImplicitPointer { entry, byte_offset }
+ }
+ read::Operation::EntryValue { expression } => {
+ let expression = Expression::from(
+ read::Expression(expression),
+ encoding,
+ dwarf,
+ unit,
+ entry_ids,
+ convert_address,
+ )?;
+ Operation::EntryValue(expression)
+ }
+ read::Operation::ParameterRef { offset } => {
+ let entry = convert_unit_offset(offset)?;
+ Operation::ParameterRef(entry)
+ }
+ read::Operation::Address { address } => {
+ let address =
+ convert_address(address).ok_or(ConvertError::InvalidAddress)?;
+ Operation::Address(address)
+ }
+ read::Operation::AddressIndex { index } => {
+ let dwarf = dwarf.ok_or(ConvertError::UnsupportedOperation)?;
+ let unit = unit.ok_or(ConvertError::UnsupportedOperation)?;
+ let val = dwarf.address(unit, index)?;
+ let address = convert_address(val).ok_or(ConvertError::InvalidAddress)?;
+ Operation::Address(address)
+ }
+ read::Operation::ConstantIndex { index } => {
+ let dwarf = dwarf.ok_or(ConvertError::UnsupportedOperation)?;
+ let unit = unit.ok_or(ConvertError::UnsupportedOperation)?;
+ let val = dwarf.address(unit, index)?;
+ Operation::UnsignedConstant(val)
+ }
+ read::Operation::TypedLiteral { base_type, value } => {
+ let entry = convert_unit_offset(base_type)?;
+ Operation::ConstantType(entry, value.to_slice()?.into_owned().into())
+ }
+ read::Operation::Convert { base_type } => {
+ if base_type.0 == 0 {
+ Operation::Convert(None)
+ } else {
+ let entry = convert_unit_offset(base_type)?;
+ Operation::Convert(Some(entry))
+ }
+ }
+ read::Operation::Reinterpret { base_type } => {
+ if base_type.0 == 0 {
+ Operation::Reinterpret(None)
+ } else {
+ let entry = convert_unit_offset(base_type)?;
+ Operation::Reinterpret(Some(entry))
+ }
+ }
+ read::Operation::WasmLocal { index } => Operation::WasmLocal(index),
+ read::Operation::WasmGlobal { index } => Operation::WasmGlobal(index),
+ read::Operation::WasmStack { index } => Operation::WasmStack(index),
+ };
+ operations.push(operation);
+ }
+ Ok(Expression { operations })
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::common::{
+ DebugAbbrevOffset, DebugAddrBase, DebugInfoOffset, DebugLocListsBase, DebugRngListsBase,
+ DebugStrOffsetsBase, Format, SectionId,
+ };
+ use crate::read;
+ use crate::write::{
+ DebugLineStrOffsets, DebugStrOffsets, EndianVec, LineProgram, Sections, Unit, UnitTable,
+ };
+ use crate::LittleEndian;
+ use std::collections::HashMap;
+
+ #[test]
+ fn test_operation() {
+ for &version in &[3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ let mut units = UnitTable::default();
+ let unit_id = units.add(Unit::new(encoding, LineProgram::none()));
+ let unit = units.get_mut(unit_id);
+ let entry_id = unit.add(unit.root(), constants::DW_TAG_base_type);
+ let reference = Reference::Entry(unit_id, entry_id);
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let debug_info_offsets = units
+ .write(&mut sections, &debug_line_str_offsets, &debug_str_offsets)
+ .unwrap();
+ let unit_offsets = debug_info_offsets.unit_offsets(unit_id);
+ let debug_info_offset = unit_offsets.debug_info_offset(entry_id);
+ let entry_offset =
+ read::UnitOffset(unit_offsets.unit_offset(entry_id) as usize);
+
+ let mut reg_expression = Expression::new();
+ reg_expression.op_reg(Register(23));
+
+ let operations: &[(&dyn Fn(&mut Expression), Operation, read::Operation<_>)] =
+ &[
+ (
+ &|x| x.op_deref(),
+ Operation::Deref { space: false },
+ read::Operation::Deref {
+ base_type: read::UnitOffset(0),
+ size: address_size,
+ space: false,
+ },
+ ),
+ (
+ &|x| x.op_xderef(),
+ Operation::Deref { space: true },
+ read::Operation::Deref {
+ base_type: read::UnitOffset(0),
+ size: address_size,
+ space: true,
+ },
+ ),
+ (
+ &|x| x.op_deref_size(2),
+ Operation::DerefSize {
+ space: false,
+ size: 2,
+ },
+ read::Operation::Deref {
+ base_type: read::UnitOffset(0),
+ size: 2,
+ space: false,
+ },
+ ),
+ (
+ &|x| x.op_xderef_size(2),
+ Operation::DerefSize {
+ space: true,
+ size: 2,
+ },
+ read::Operation::Deref {
+ base_type: read::UnitOffset(0),
+ size: 2,
+ space: true,
+ },
+ ),
+ (
+ &|x| x.op_deref_type(2, entry_id),
+ Operation::DerefType {
+ space: false,
+ size: 2,
+ base: entry_id,
+ },
+ read::Operation::Deref {
+ base_type: entry_offset,
+ size: 2,
+ space: false,
+ },
+ ),
+ (
+ &|x| x.op_xderef_type(2, entry_id),
+ Operation::DerefType {
+ space: true,
+ size: 2,
+ base: entry_id,
+ },
+ read::Operation::Deref {
+ base_type: entry_offset,
+ size: 2,
+ space: true,
+ },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_drop),
+ Operation::Simple(constants::DW_OP_drop),
+ read::Operation::Drop,
+ ),
+ (
+ &|x| x.op_pick(0),
+ Operation::Pick(0),
+ read::Operation::Pick { index: 0 },
+ ),
+ (
+ &|x| x.op_pick(1),
+ Operation::Pick(1),
+ read::Operation::Pick { index: 1 },
+ ),
+ (
+ &|x| x.op_pick(2),
+ Operation::Pick(2),
+ read::Operation::Pick { index: 2 },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_swap),
+ Operation::Simple(constants::DW_OP_swap),
+ read::Operation::Swap,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_rot),
+ Operation::Simple(constants::DW_OP_rot),
+ read::Operation::Rot,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_abs),
+ Operation::Simple(constants::DW_OP_abs),
+ read::Operation::Abs,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_and),
+ Operation::Simple(constants::DW_OP_and),
+ read::Operation::And,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_div),
+ Operation::Simple(constants::DW_OP_div),
+ read::Operation::Div,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_minus),
+ Operation::Simple(constants::DW_OP_minus),
+ read::Operation::Minus,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_mod),
+ Operation::Simple(constants::DW_OP_mod),
+ read::Operation::Mod,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_mul),
+ Operation::Simple(constants::DW_OP_mul),
+ read::Operation::Mul,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_neg),
+ Operation::Simple(constants::DW_OP_neg),
+ read::Operation::Neg,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_not),
+ Operation::Simple(constants::DW_OP_not),
+ read::Operation::Not,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_or),
+ Operation::Simple(constants::DW_OP_or),
+ read::Operation::Or,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_plus),
+ Operation::Simple(constants::DW_OP_plus),
+ read::Operation::Plus,
+ ),
+ (
+ &|x| x.op_plus_uconst(23),
+ Operation::PlusConstant(23),
+ read::Operation::PlusConstant { value: 23 },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_shl),
+ Operation::Simple(constants::DW_OP_shl),
+ read::Operation::Shl,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_shr),
+ Operation::Simple(constants::DW_OP_shr),
+ read::Operation::Shr,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_shra),
+ Operation::Simple(constants::DW_OP_shra),
+ read::Operation::Shra,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_xor),
+ Operation::Simple(constants::DW_OP_xor),
+ read::Operation::Xor,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_eq),
+ Operation::Simple(constants::DW_OP_eq),
+ read::Operation::Eq,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_ge),
+ Operation::Simple(constants::DW_OP_ge),
+ read::Operation::Ge,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_gt),
+ Operation::Simple(constants::DW_OP_gt),
+ read::Operation::Gt,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_le),
+ Operation::Simple(constants::DW_OP_le),
+ read::Operation::Le,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_lt),
+ Operation::Simple(constants::DW_OP_lt),
+ read::Operation::Lt,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_ne),
+ Operation::Simple(constants::DW_OP_ne),
+ read::Operation::Ne,
+ ),
+ (
+ &|x| x.op_constu(23),
+ Operation::UnsignedConstant(23),
+ read::Operation::UnsignedConstant { value: 23 },
+ ),
+ (
+ &|x| x.op_consts(-23),
+ Operation::SignedConstant(-23),
+ read::Operation::SignedConstant { value: -23 },
+ ),
+ (
+ &|x| x.op_reg(Register(23)),
+ Operation::Register(Register(23)),
+ read::Operation::Register {
+ register: Register(23),
+ },
+ ),
+ (
+ &|x| x.op_reg(Register(123)),
+ Operation::Register(Register(123)),
+ read::Operation::Register {
+ register: Register(123),
+ },
+ ),
+ (
+ &|x| x.op_breg(Register(23), 34),
+ Operation::RegisterOffset(Register(23), 34),
+ read::Operation::RegisterOffset {
+ register: Register(23),
+ offset: 34,
+ base_type: read::UnitOffset(0),
+ },
+ ),
+ (
+ &|x| x.op_breg(Register(123), 34),
+ Operation::RegisterOffset(Register(123), 34),
+ read::Operation::RegisterOffset {
+ register: Register(123),
+ offset: 34,
+ base_type: read::UnitOffset(0),
+ },
+ ),
+ (
+ &|x| x.op_regval_type(Register(23), entry_id),
+ Operation::RegisterType(Register(23), entry_id),
+ read::Operation::RegisterOffset {
+ register: Register(23),
+ offset: 0,
+ base_type: entry_offset,
+ },
+ ),
+ (
+ &|x| x.op_fbreg(34),
+ Operation::FrameOffset(34),
+ read::Operation::FrameOffset { offset: 34 },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_nop),
+ Operation::Simple(constants::DW_OP_nop),
+ read::Operation::Nop,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_push_object_address),
+ Operation::Simple(constants::DW_OP_push_object_address),
+ read::Operation::PushObjectAddress,
+ ),
+ (
+ &|x| x.op_call(entry_id),
+ Operation::Call(entry_id),
+ read::Operation::Call {
+ offset: read::DieReference::UnitRef(entry_offset),
+ },
+ ),
+ (
+ &|x| x.op_call_ref(reference),
+ Operation::CallRef(reference),
+ read::Operation::Call {
+ offset: read::DieReference::DebugInfoRef(debug_info_offset),
+ },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_form_tls_address),
+ Operation::Simple(constants::DW_OP_form_tls_address),
+ read::Operation::TLS,
+ ),
+ (
+ &|x| x.op(constants::DW_OP_call_frame_cfa),
+ Operation::Simple(constants::DW_OP_call_frame_cfa),
+ read::Operation::CallFrameCFA,
+ ),
+ (
+ &|x| x.op_piece(23),
+ Operation::Piece { size_in_bytes: 23 },
+ read::Operation::Piece {
+ size_in_bits: 23 * 8,
+ bit_offset: None,
+ },
+ ),
+ (
+ &|x| x.op_bit_piece(23, 34),
+ Operation::BitPiece {
+ size_in_bits: 23,
+ bit_offset: 34,
+ },
+ read::Operation::Piece {
+ size_in_bits: 23,
+ bit_offset: Some(34),
+ },
+ ),
+ (
+ &|x| x.op_implicit_value(vec![23].into()),
+ Operation::ImplicitValue(vec![23].into()),
+ read::Operation::ImplicitValue {
+ data: read::EndianSlice::new(&[23], LittleEndian),
+ },
+ ),
+ (
+ &|x| x.op(constants::DW_OP_stack_value),
+ Operation::Simple(constants::DW_OP_stack_value),
+ read::Operation::StackValue,
+ ),
+ (
+ &|x| x.op_implicit_pointer(reference, 23),
+ Operation::ImplicitPointer {
+ entry: reference,
+ byte_offset: 23,
+ },
+ read::Operation::ImplicitPointer {
+ value: debug_info_offset,
+ byte_offset: 23,
+ },
+ ),
+ (
+ &|x| x.op_entry_value(reg_expression.clone()),
+ Operation::EntryValue(reg_expression.clone()),
+ read::Operation::EntryValue {
+ expression: read::EndianSlice::new(
+ &[constants::DW_OP_reg23.0],
+ LittleEndian,
+ ),
+ },
+ ),
+ (
+ &|x| x.op_gnu_parameter_ref(entry_id),
+ Operation::ParameterRef(entry_id),
+ read::Operation::ParameterRef {
+ offset: entry_offset,
+ },
+ ),
+ (
+ &|x| x.op_addr(Address::Constant(23)),
+ Operation::Address(Address::Constant(23)),
+ read::Operation::Address { address: 23 },
+ ),
+ (
+ &|x| x.op_const_type(entry_id, vec![23].into()),
+ Operation::ConstantType(entry_id, vec![23].into()),
+ read::Operation::TypedLiteral {
+ base_type: entry_offset,
+ value: read::EndianSlice::new(&[23], LittleEndian),
+ },
+ ),
+ (
+ &|x| x.op_convert(None),
+ Operation::Convert(None),
+ read::Operation::Convert {
+ base_type: read::UnitOffset(0),
+ },
+ ),
+ (
+ &|x| x.op_convert(Some(entry_id)),
+ Operation::Convert(Some(entry_id)),
+ read::Operation::Convert {
+ base_type: entry_offset,
+ },
+ ),
+ (
+ &|x| x.op_reinterpret(None),
+ Operation::Reinterpret(None),
+ read::Operation::Reinterpret {
+ base_type: read::UnitOffset(0),
+ },
+ ),
+ (
+ &|x| x.op_reinterpret(Some(entry_id)),
+ Operation::Reinterpret(Some(entry_id)),
+ read::Operation::Reinterpret {
+ base_type: entry_offset,
+ },
+ ),
+ (
+ &|x| x.op_wasm_local(1000),
+ Operation::WasmLocal(1000),
+ read::Operation::WasmLocal { index: 1000 },
+ ),
+ (
+ &|x| x.op_wasm_global(1000),
+ Operation::WasmGlobal(1000),
+ read::Operation::WasmGlobal { index: 1000 },
+ ),
+ (
+ &|x| x.op_wasm_stack(1000),
+ Operation::WasmStack(1000),
+ read::Operation::WasmStack { index: 1000 },
+ ),
+ ];
+
+ let mut expression = Expression::new();
+ let start_index = expression.next_index();
+ for (f, o, _) in operations {
+ f(&mut expression);
+ assert_eq!(expression.operations.last(), Some(o));
+ }
+
+ let bra_index = expression.op_bra();
+ let skip_index = expression.op_skip();
+ expression.op(constants::DW_OP_nop);
+ let end_index = expression.next_index();
+ expression.set_target(bra_index, start_index);
+ expression.set_target(skip_index, end_index);
+
+ let mut w = EndianVec::new(LittleEndian);
+ let mut refs = Vec::new();
+ expression
+ .write(&mut w, Some(&mut refs), encoding, Some(&unit_offsets))
+ .unwrap();
+ for r in &refs {
+ assert_eq!(r.unit, unit_id);
+ assert_eq!(r.entry, entry_id);
+ w.write_offset_at(
+ r.offset,
+ debug_info_offset.0,
+ SectionId::DebugInfo,
+ r.size,
+ )
+ .unwrap();
+ }
+
+ let read_expression =
+ read::Expression(read::EndianSlice::new(w.slice(), LittleEndian));
+ let mut read_operations = read_expression.operations(encoding);
+ for (_, _, operation) in operations {
+ assert_eq!(read_operations.next(), Ok(Some(*operation)));
+ }
+
+ // 4 = DW_OP_skip + i16 + DW_OP_nop
+ assert_eq!(
+ read_operations.next(),
+ Ok(Some(read::Operation::Bra {
+ target: -(w.len() as i16) + 4
+ }))
+ );
+ // 1 = DW_OP_nop
+ assert_eq!(
+ read_operations.next(),
+ Ok(Some(read::Operation::Skip { target: 1 }))
+ );
+ assert_eq!(read_operations.next(), Ok(Some(read::Operation::Nop)));
+ assert_eq!(read_operations.next(), Ok(None));
+
+ // Fake the unit.
+ let unit = read::Unit {
+ header: read::UnitHeader::new(
+ encoding,
+ 0,
+ read::UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ read::EndianSlice::new(&[], LittleEndian),
+ ),
+ abbreviations: read::Abbreviations::default(),
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase(0),
+ addr_base: DebugAddrBase(0),
+ loclists_base: DebugLocListsBase(0),
+ rnglists_base: DebugRngListsBase(0),
+ line_program: None,
+ dwo_id: None,
+ };
+
+ let mut entry_ids = HashMap::new();
+ entry_ids.insert(debug_info_offset.into(), (unit_id, entry_id));
+ let convert_expression = Expression::from(
+ read_expression,
+ encoding,
+ None, /* dwarf */
+ Some(&unit),
+ Some(&entry_ids),
+ &|address| Some(Address::Constant(address)),
+ )
+ .unwrap();
+ let mut convert_operations = convert_expression.operations.iter();
+ for (_, operation, _) in operations {
+ assert_eq!(convert_operations.next(), Some(operation));
+ }
+ assert_eq!(
+ convert_operations.next(),
+ Some(&Operation::Branch(start_index))
+ );
+ assert_eq!(convert_operations.next(), Some(&Operation::Skip(end_index)));
+ assert_eq!(
+ convert_operations.next(),
+ Some(&Operation::Simple(constants::DW_OP_nop))
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/range.rs b/vendor/gimli-0.26.2/src/write/range.rs
new file mode 100644
index 000000000..b44ce1b7b
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/range.rs
@@ -0,0 +1,415 @@
+use alloc::vec::Vec;
+use indexmap::IndexSet;
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{Encoding, RangeListsOffset, SectionId};
+use crate::write::{Address, BaseId, Error, Result, Section, Sections, Writer};
+
+define_section!(
+ DebugRanges,
+ RangeListsOffset,
+ "A writable `.debug_ranges` section."
+);
+define_section!(
+ DebugRngLists,
+ RangeListsOffset,
+ "A writable `.debug_rnglists` section."
+);
+
+define_offsets!(
+ RangeListOffsets: RangeListId => RangeListsOffset,
+ "The section offsets of a series of range lists within the `.debug_ranges` or `.debug_rnglists` sections."
+);
+
+define_id!(
+ RangeListId,
+ "An identifier for a range list in a `RangeListTable`."
+);
+
+/// A table of range lists that will be stored in a `.debug_ranges` or `.debug_rnglists` section.
+#[derive(Debug, Default)]
+pub struct RangeListTable {
+ base_id: BaseId,
+ ranges: IndexSet<RangeList>,
+}
+
+impl RangeListTable {
+ /// Add a range list to the table.
+ pub fn add(&mut self, range_list: RangeList) -> RangeListId {
+ let (index, _) = self.ranges.insert_full(range_list);
+ RangeListId::new(self.base_id, index)
+ }
+
+ /// Write the range list table to the appropriate section for the given DWARF version.
+ pub(crate) fn write<W: Writer>(
+ &self,
+ sections: &mut Sections<W>,
+ encoding: Encoding,
+ ) -> Result<RangeListOffsets> {
+ if self.ranges.is_empty() {
+ return Ok(RangeListOffsets::none());
+ }
+
+ match encoding.version {
+ 2..=4 => self.write_ranges(&mut sections.debug_ranges, encoding.address_size),
+ 5 => self.write_rnglists(&mut sections.debug_rnglists, encoding),
+ _ => Err(Error::UnsupportedVersion(encoding.version)),
+ }
+ }
+
+ /// Write the range list table to the `.debug_ranges` section.
+ fn write_ranges<W: Writer>(
+ &self,
+ w: &mut DebugRanges<W>,
+ address_size: u8,
+ ) -> Result<RangeListOffsets> {
+ let mut offsets = Vec::new();
+ for range_list in self.ranges.iter() {
+ offsets.push(w.offset());
+ for range in &range_list.0 {
+ // Note that we must ensure none of the ranges have both begin == 0 and end == 0.
+ // We do this by ensuring that begin != end, which is a bit more restrictive
+ // than required, but still seems reasonable.
+ match *range {
+ Range::BaseAddress { address } => {
+ let marker = !0 >> (64 - address_size * 8);
+ w.write_udata(marker, address_size)?;
+ w.write_address(address, address_size)?;
+ }
+ Range::OffsetPair { begin, end } => {
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_udata(begin, address_size)?;
+ w.write_udata(end, address_size)?;
+ }
+ Range::StartEnd { begin, end } => {
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_address(begin, address_size)?;
+ w.write_address(end, address_size)?;
+ }
+ Range::StartLength { begin, length } => {
+ let end = match begin {
+ Address::Constant(begin) => Address::Constant(begin + length),
+ Address::Symbol { symbol, addend } => Address::Symbol {
+ symbol,
+ addend: addend + length as i64,
+ },
+ };
+ if begin == end {
+ return Err(Error::InvalidRange);
+ }
+ w.write_address(begin, address_size)?;
+ w.write_address(end, address_size)?;
+ }
+ }
+ }
+ w.write_udata(0, address_size)?;
+ w.write_udata(0, address_size)?;
+ }
+ Ok(RangeListOffsets {
+ base_id: self.base_id,
+ offsets,
+ })
+ }
+
+ /// Write the range list table to the `.debug_rnglists` section.
+ fn write_rnglists<W: Writer>(
+ &self,
+ w: &mut DebugRngLists<W>,
+ encoding: Encoding,
+ ) -> Result<RangeListOffsets> {
+ let mut offsets = Vec::new();
+
+ if encoding.version != 5 {
+ return Err(Error::NeedVersion(5));
+ }
+
+ let length_offset = w.write_initial_length(encoding.format)?;
+ let length_base = w.len();
+
+ w.write_u16(encoding.version)?;
+ w.write_u8(encoding.address_size)?;
+ w.write_u8(0)?; // segment_selector_size
+ w.write_u32(0)?; // offset_entry_count (when set to zero DW_FORM_rnglistx can't be used, see section 7.28)
+ // FIXME implement DW_FORM_rnglistx writing and implement the offset entry list
+
+ for range_list in self.ranges.iter() {
+ offsets.push(w.offset());
+ for range in &range_list.0 {
+ match *range {
+ Range::BaseAddress { address } => {
+ w.write_u8(crate::constants::DW_RLE_base_address.0)?;
+ w.write_address(address, encoding.address_size)?;
+ }
+ Range::OffsetPair { begin, end } => {
+ w.write_u8(crate::constants::DW_RLE_offset_pair.0)?;
+ w.write_uleb128(begin)?;
+ w.write_uleb128(end)?;
+ }
+ Range::StartEnd { begin, end } => {
+ w.write_u8(crate::constants::DW_RLE_start_end.0)?;
+ w.write_address(begin, encoding.address_size)?;
+ w.write_address(end, encoding.address_size)?;
+ }
+ Range::StartLength { begin, length } => {
+ w.write_u8(crate::constants::DW_RLE_start_length.0)?;
+ w.write_address(begin, encoding.address_size)?;
+ w.write_uleb128(length)?;
+ }
+ }
+ }
+
+ w.write_u8(crate::constants::DW_RLE_end_of_list.0)?;
+ }
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, encoding.format)?;
+
+ Ok(RangeListOffsets {
+ base_id: self.base_id,
+ offsets,
+ })
+ }
+}
+
+/// A range list that will be stored in a `.debug_ranges` or `.debug_rnglists` section.
+#[derive(Clone, Debug, Eq, PartialEq, Hash)]
+pub struct RangeList(pub Vec<Range>);
+
+/// A single range.
+#[derive(Clone, Debug, Eq, PartialEq, Hash)]
+pub enum Range {
+ /// DW_RLE_base_address
+ BaseAddress {
+ /// Base address.
+ address: Address,
+ },
+ /// DW_RLE_offset_pair
+ OffsetPair {
+ /// Start of range relative to base address.
+ begin: u64,
+ /// End of range relative to base address.
+ end: u64,
+ },
+ /// DW_RLE_start_end
+ StartEnd {
+ /// Start of range.
+ begin: Address,
+ /// End of range.
+ end: Address,
+ },
+ /// DW_RLE_start_length
+ StartLength {
+ /// Start of range.
+ begin: Address,
+ /// Length of range.
+ length: u64,
+ },
+}
+
+#[cfg(feature = "read")]
+mod convert {
+ use super::*;
+
+ use crate::read::{self, Reader};
+ use crate::write::{ConvertError, ConvertResult, ConvertUnitContext};
+
+ impl RangeList {
+ /// Create a range list by reading the data from the give range list iter.
+ pub(crate) fn from<R: Reader<Offset = usize>>(
+ mut from: read::RawRngListIter<R>,
+ context: &ConvertUnitContext<R>,
+ ) -> ConvertResult<Self> {
+ let mut have_base_address = context.base_address != Address::Constant(0);
+ let convert_address =
+ |x| (context.convert_address)(x).ok_or(ConvertError::InvalidAddress);
+ let mut ranges = Vec::new();
+ while let Some(from_range) = from.next()? {
+ let range = match from_range {
+ read::RawRngListEntry::AddressOrOffsetPair { begin, end } => {
+ // These were parsed as addresses, even if they are offsets.
+ let begin = convert_address(begin)?;
+ let end = convert_address(end)?;
+ match (begin, end) {
+ (Address::Constant(begin_offset), Address::Constant(end_offset)) => {
+ if have_base_address {
+ Range::OffsetPair {
+ begin: begin_offset,
+ end: end_offset,
+ }
+ } else {
+ Range::StartEnd { begin, end }
+ }
+ }
+ _ => {
+ if have_base_address {
+ // At least one of begin/end is an address, but we also have
+ // a base address. Adding addresses is undefined.
+ return Err(ConvertError::InvalidRangeRelativeAddress);
+ }
+ Range::StartEnd { begin, end }
+ }
+ }
+ }
+ read::RawRngListEntry::BaseAddress { addr } => {
+ have_base_address = true;
+ let address = convert_address(addr)?;
+ Range::BaseAddress { address }
+ }
+ read::RawRngListEntry::BaseAddressx { addr } => {
+ have_base_address = true;
+ let address = convert_address(context.dwarf.address(context.unit, addr)?)?;
+ Range::BaseAddress { address }
+ }
+ read::RawRngListEntry::StartxEndx { begin, end } => {
+ let begin = convert_address(context.dwarf.address(context.unit, begin)?)?;
+ let end = convert_address(context.dwarf.address(context.unit, end)?)?;
+ Range::StartEnd { begin, end }
+ }
+ read::RawRngListEntry::StartxLength { begin, length } => {
+ let begin = convert_address(context.dwarf.address(context.unit, begin)?)?;
+ Range::StartLength { begin, length }
+ }
+ read::RawRngListEntry::OffsetPair { begin, end } => {
+ Range::OffsetPair { begin, end }
+ }
+ read::RawRngListEntry::StartEnd { begin, end } => {
+ let begin = convert_address(begin)?;
+ let end = convert_address(end)?;
+ Range::StartEnd { begin, end }
+ }
+ read::RawRngListEntry::StartLength { begin, length } => {
+ let begin = convert_address(begin)?;
+ Range::StartLength { begin, length }
+ }
+ };
+ // Filtering empty ranges out.
+ match range {
+ Range::StartLength { length, .. } if length == 0 => continue,
+ Range::StartEnd { begin, end, .. } if begin == end => continue,
+ Range::OffsetPair { begin, end, .. } if begin == end => continue,
+ _ => (),
+ }
+ ranges.push(range);
+ }
+ Ok(RangeList(ranges))
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::common::{
+ DebugAbbrevOffset, DebugAddrBase, DebugInfoOffset, DebugLocListsBase, DebugRngListsBase,
+ DebugStrOffsetsBase, Format,
+ };
+ use crate::read;
+ use crate::write::{
+ ConvertUnitContext, EndianVec, LineStringTable, LocationListTable, Range, RangeListTable,
+ StringTable,
+ };
+ use crate::LittleEndian;
+ use std::collections::HashMap;
+
+ #[test]
+ fn test_range() {
+ let mut line_strings = LineStringTable::default();
+ let mut strings = StringTable::default();
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ let mut range_list = RangeList(vec![
+ Range::StartLength {
+ begin: Address::Constant(6666),
+ length: 7777,
+ },
+ Range::StartEnd {
+ begin: Address::Constant(4444),
+ end: Address::Constant(5555),
+ },
+ Range::BaseAddress {
+ address: Address::Constant(1111),
+ },
+ Range::OffsetPair {
+ begin: 2222,
+ end: 3333,
+ },
+ ]);
+
+ let mut ranges = RangeListTable::default();
+ let range_list_id = ranges.add(range_list.clone());
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let range_list_offsets = ranges.write(&mut sections, encoding).unwrap();
+
+ let read_debug_ranges =
+ read::DebugRanges::new(sections.debug_ranges.slice(), LittleEndian);
+ let read_debug_rnglists =
+ read::DebugRngLists::new(sections.debug_rnglists.slice(), LittleEndian);
+ let read_ranges = read::RangeLists::new(read_debug_ranges, read_debug_rnglists);
+ let offset = range_list_offsets.get(range_list_id);
+ let read_range_list = read_ranges.raw_ranges(offset, encoding).unwrap();
+
+ let dwarf = read::Dwarf {
+ ranges: read_ranges,
+ ..Default::default()
+ };
+ let unit = read::Unit {
+ header: read::UnitHeader::new(
+ encoding,
+ 0,
+ read::UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ read::EndianSlice::default(),
+ ),
+ abbreviations: read::Abbreviations::default(),
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase(0),
+ addr_base: DebugAddrBase(0),
+ loclists_base: DebugLocListsBase(0),
+ rnglists_base: DebugRngListsBase(0),
+ line_program: None,
+ dwo_id: None,
+ };
+ let context = ConvertUnitContext {
+ dwarf: &dwarf,
+ unit: &unit,
+ line_strings: &mut line_strings,
+ strings: &mut strings,
+ ranges: &mut ranges,
+ locations: &mut LocationListTable::default(),
+ convert_address: &|address| Some(Address::Constant(address)),
+ base_address: Address::Constant(0),
+ line_program_offset: None,
+ line_program_files: Vec::new(),
+ entry_ids: &HashMap::new(),
+ };
+ let convert_range_list = RangeList::from(read_range_list, &context).unwrap();
+
+ if version <= 4 {
+ range_list.0[0] = Range::StartEnd {
+ begin: Address::Constant(6666),
+ end: Address::Constant(6666 + 7777),
+ };
+ }
+ assert_eq!(range_list, convert_range_list);
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/section.rs b/vendor/gimli-0.26.2/src/write/section.rs
new file mode 100644
index 000000000..e8f3378cd
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/section.rs
@@ -0,0 +1,172 @@
+use std::ops::DerefMut;
+use std::result;
+use std::vec::Vec;
+
+use crate::common::SectionId;
+use crate::write::{
+ DebugAbbrev, DebugFrame, DebugInfo, DebugInfoReference, DebugLine, DebugLineStr, DebugLoc,
+ DebugLocLists, DebugRanges, DebugRngLists, DebugStr, EhFrame, Writer,
+};
+
+macro_rules! define_section {
+ ($name:ident, $offset:ident, $docs:expr) => {
+ #[doc=$docs]
+ #[derive(Debug, Default)]
+ pub struct $name<W: Writer>(pub W);
+
+ impl<W: Writer> $name<W> {
+ /// Return the offset of the next write.
+ pub fn offset(&self) -> $offset {
+ $offset(self.len())
+ }
+ }
+
+ impl<W: Writer> From<W> for $name<W> {
+ #[inline]
+ fn from(w: W) -> Self {
+ $name(w)
+ }
+ }
+
+ impl<W: Writer> Deref for $name<W> {
+ type Target = W;
+
+ #[inline]
+ fn deref(&self) -> &W {
+ &self.0
+ }
+ }
+
+ impl<W: Writer> DerefMut for $name<W> {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut W {
+ &mut self.0
+ }
+ }
+
+ impl<W: Writer> Section<W> for $name<W> {
+ #[inline]
+ fn id(&self) -> SectionId {
+ SectionId::$name
+ }
+ }
+ };
+}
+
+/// Functionality common to all writable DWARF sections.
+pub trait Section<W: Writer>: DerefMut<Target = W> {
+ /// Returns the DWARF section kind for this type.
+ fn id(&self) -> SectionId;
+
+ /// Returns the ELF section name for this type.
+ fn name(&self) -> &'static str {
+ self.id().name()
+ }
+}
+
+/// All of the writable DWARF sections.
+#[derive(Debug, Default)]
+pub struct Sections<W: Writer> {
+ /// The `.debug_abbrev` section.
+ pub debug_abbrev: DebugAbbrev<W>,
+ /// The `.debug_info` section.
+ pub debug_info: DebugInfo<W>,
+ /// The `.debug_line` section.
+ pub debug_line: DebugLine<W>,
+ /// The `.debug_line_str` section.
+ pub debug_line_str: DebugLineStr<W>,
+ /// The `.debug_ranges` section.
+ pub debug_ranges: DebugRanges<W>,
+ /// The `.debug_rnglists` section.
+ pub debug_rnglists: DebugRngLists<W>,
+ /// The `.debug_loc` section.
+ pub debug_loc: DebugLoc<W>,
+ /// The `.debug_loclists` section.
+ pub debug_loclists: DebugLocLists<W>,
+ /// The `.debug_str` section.
+ pub debug_str: DebugStr<W>,
+ /// The `.debug_frame` section.
+ pub debug_frame: DebugFrame<W>,
+ /// The `.eh_frame` section.
+ pub eh_frame: EhFrame<W>,
+ /// Unresolved references in the `.debug_info` section.
+ pub(crate) debug_info_refs: Vec<DebugInfoReference>,
+ /// Unresolved references in the `.debug_loc` section.
+ pub(crate) debug_loc_refs: Vec<DebugInfoReference>,
+ /// Unresolved references in the `.debug_loclists` section.
+ pub(crate) debug_loclists_refs: Vec<DebugInfoReference>,
+}
+
+impl<W: Writer + Clone> Sections<W> {
+ /// Create a new `Sections` using clones of the given `section`.
+ pub fn new(section: W) -> Self {
+ Sections {
+ debug_abbrev: DebugAbbrev(section.clone()),
+ debug_info: DebugInfo(section.clone()),
+ debug_line: DebugLine(section.clone()),
+ debug_line_str: DebugLineStr(section.clone()),
+ debug_ranges: DebugRanges(section.clone()),
+ debug_rnglists: DebugRngLists(section.clone()),
+ debug_loc: DebugLoc(section.clone()),
+ debug_loclists: DebugLocLists(section.clone()),
+ debug_str: DebugStr(section.clone()),
+ debug_frame: DebugFrame(section.clone()),
+ eh_frame: EhFrame(section.clone()),
+ debug_info_refs: Vec::new(),
+ debug_loc_refs: Vec::new(),
+ debug_loclists_refs: Vec::new(),
+ }
+ }
+}
+
+impl<W: Writer> Sections<W> {
+ /// For each section, call `f` once with a shared reference.
+ pub fn for_each<F, E>(&self, mut f: F) -> result::Result<(), E>
+ where
+ F: FnMut(SectionId, &W) -> result::Result<(), E>,
+ {
+ macro_rules! f {
+ ($s:expr) => {
+ f($s.id(), &$s)
+ };
+ }
+ // Ordered so that earlier sections do not reference later sections.
+ f!(self.debug_abbrev)?;
+ f!(self.debug_str)?;
+ f!(self.debug_line_str)?;
+ f!(self.debug_line)?;
+ f!(self.debug_ranges)?;
+ f!(self.debug_rnglists)?;
+ f!(self.debug_loc)?;
+ f!(self.debug_loclists)?;
+ f!(self.debug_info)?;
+ f!(self.debug_frame)?;
+ f!(self.eh_frame)?;
+ Ok(())
+ }
+
+ /// For each section, call `f` once with a mutable reference.
+ pub fn for_each_mut<F, E>(&mut self, mut f: F) -> result::Result<(), E>
+ where
+ F: FnMut(SectionId, &mut W) -> result::Result<(), E>,
+ {
+ macro_rules! f {
+ ($s:expr) => {
+ f($s.id(), &mut $s)
+ };
+ }
+ // Ordered so that earlier sections do not reference later sections.
+ f!(self.debug_abbrev)?;
+ f!(self.debug_str)?;
+ f!(self.debug_line_str)?;
+ f!(self.debug_line)?;
+ f!(self.debug_ranges)?;
+ f!(self.debug_rnglists)?;
+ f!(self.debug_loc)?;
+ f!(self.debug_loclists)?;
+ f!(self.debug_info)?;
+ f!(self.debug_frame)?;
+ f!(self.eh_frame)?;
+ Ok(())
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/str.rs b/vendor/gimli-0.26.2/src/write/str.rs
new file mode 100644
index 000000000..83285c035
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/str.rs
@@ -0,0 +1,172 @@
+use alloc::vec::Vec;
+use indexmap::IndexSet;
+use std::ops::{Deref, DerefMut};
+
+use crate::common::{DebugLineStrOffset, DebugStrOffset, SectionId};
+use crate::write::{BaseId, Result, Section, Writer};
+
+// Requirements:
+// - values are `[u8]`, null bytes are not allowed
+// - insertion returns a fixed id
+// - inserting a duplicate returns the id of the existing value
+// - able to convert an id to a section offset
+// Optional?
+// - able to get an existing value given an id
+//
+// Limitations of current implementation (using IndexSet):
+// - inserting requires either an allocation for duplicates,
+// or a double lookup for non-duplicates
+// - doesn't preserve offsets when updating an existing `.debug_str` section
+//
+// Possible changes:
+// - calculate offsets as we add values, and use that as the id.
+// This would avoid the need for DebugStrOffsets but would make it
+// hard to implement `get`.
+macro_rules! define_string_table {
+ ($name:ident, $id:ident, $section:ident, $offsets:ident, $docs:expr) => {
+ #[doc=$docs]
+ #[derive(Debug, Default)]
+ pub struct $name {
+ base_id: BaseId,
+ strings: IndexSet<Vec<u8>>,
+ }
+
+ impl $name {
+ /// Add a string to the string table and return its id.
+ ///
+ /// If the string already exists, then return the id of the existing string.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `bytes` contains a null byte.
+ pub fn add<T>(&mut self, bytes: T) -> $id
+ where
+ T: Into<Vec<u8>>,
+ {
+ let bytes = bytes.into();
+ assert!(!bytes.contains(&0));
+ let (index, _) = self.strings.insert_full(bytes);
+ $id::new(self.base_id, index)
+ }
+
+ /// Return the number of strings in the table.
+ #[inline]
+ pub fn count(&self) -> usize {
+ self.strings.len()
+ }
+
+ /// Get a reference to a string in the table.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ pub fn get(&self, id: $id) -> &[u8] {
+ debug_assert_eq!(self.base_id, id.base_id);
+ self.strings.get_index(id.index).map(Vec::as_slice).unwrap()
+ }
+
+ /// Write the string table to the `.debug_str` section.
+ ///
+ /// Returns the offsets at which the strings are written.
+ pub fn write<W: Writer>(&self, w: &mut $section<W>) -> Result<$offsets> {
+ let mut offsets = Vec::new();
+ for bytes in self.strings.iter() {
+ offsets.push(w.offset());
+ w.write(bytes)?;
+ w.write_u8(0)?;
+ }
+
+ Ok($offsets {
+ base_id: self.base_id,
+ offsets,
+ })
+ }
+ }
+ };
+}
+
+define_id!(StringId, "An identifier for a string in a `StringTable`.");
+
+define_string_table!(
+ StringTable,
+ StringId,
+ DebugStr,
+ DebugStrOffsets,
+ "A table of strings that will be stored in a `.debug_str` section."
+);
+
+define_section!(DebugStr, DebugStrOffset, "A writable `.debug_str` section.");
+
+define_offsets!(
+ DebugStrOffsets: StringId => DebugStrOffset,
+ "The section offsets of all strings within a `.debug_str` section."
+);
+
+define_id!(
+ LineStringId,
+ "An identifier for a string in a `LineStringTable`."
+);
+
+define_string_table!(
+ LineStringTable,
+ LineStringId,
+ DebugLineStr,
+ DebugLineStrOffsets,
+ "A table of strings that will be stored in a `.debug_line_str` section."
+);
+
+define_section!(
+ DebugLineStr,
+ DebugLineStrOffset,
+ "A writable `.debug_line_str` section."
+);
+
+define_offsets!(
+ DebugLineStrOffsets: LineStringId => DebugLineStrOffset,
+ "The section offsets of all strings within a `.debug_line_str` section."
+);
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::read;
+ use crate::write::EndianVec;
+ use crate::LittleEndian;
+
+ #[test]
+ fn test_string_table() {
+ let mut strings = StringTable::default();
+ assert_eq!(strings.count(), 0);
+ let id1 = strings.add(&b"one"[..]);
+ let id2 = strings.add(&b"two"[..]);
+ assert_eq!(strings.add(&b"one"[..]), id1);
+ assert_eq!(strings.add(&b"two"[..]), id2);
+ assert_eq!(strings.get(id1), &b"one"[..]);
+ assert_eq!(strings.get(id2), &b"two"[..]);
+ assert_eq!(strings.count(), 2);
+
+ let mut debug_str = DebugStr::from(EndianVec::new(LittleEndian));
+ let offsets = strings.write(&mut debug_str).unwrap();
+ assert_eq!(debug_str.slice(), b"one\0two\0");
+ assert_eq!(offsets.get(id1), DebugStrOffset(0));
+ assert_eq!(offsets.get(id2), DebugStrOffset(4));
+ assert_eq!(offsets.count(), 2);
+ }
+
+ #[test]
+ fn test_string_table_read() {
+ let mut strings = StringTable::default();
+ let id1 = strings.add(&b"one"[..]);
+ let id2 = strings.add(&b"two"[..]);
+
+ let mut debug_str = DebugStr::from(EndianVec::new(LittleEndian));
+ let offsets = strings.write(&mut debug_str).unwrap();
+
+ let read_debug_str = read::DebugStr::new(debug_str.slice(), LittleEndian);
+ let str1 = read_debug_str.get_str(offsets.get(id1)).unwrap();
+ let str2 = read_debug_str.get_str(offsets.get(id2)).unwrap();
+ assert_eq!(str1.slice(), &b"one"[..]);
+ assert_eq!(str2.slice(), &b"two"[..]);
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/unit.rs b/vendor/gimli-0.26.2/src/write/unit.rs
new file mode 100644
index 000000000..bf85ff421
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/unit.rs
@@ -0,0 +1,3157 @@
+use alloc::vec::Vec;
+use std::ops::{Deref, DerefMut};
+use std::{slice, usize};
+
+use crate::common::{
+ DebugAbbrevOffset, DebugInfoOffset, DebugLineOffset, DebugMacinfoOffset, DebugMacroOffset,
+ DebugStrOffset, DebugTypeSignature, DwoId, Encoding, Format, SectionId,
+};
+use crate::constants;
+use crate::leb128::write::{sleb128_size, uleb128_size};
+use crate::write::{
+ Abbreviation, AbbreviationTable, Address, AttributeSpecification, BaseId, DebugLineStrOffsets,
+ DebugStrOffsets, Error, Expression, FileId, LineProgram, LineStringId, LocationListId,
+ LocationListOffsets, LocationListTable, RangeListId, RangeListOffsets, RangeListTable,
+ Reference, Result, Section, Sections, StringId, Writer,
+};
+
+define_id!(UnitId, "An identifier for a unit in a `UnitTable`.");
+
+define_id!(UnitEntryId, "An identifier for an entry in a `Unit`.");
+
+/// A table of units that will be stored in the `.debug_info` section.
+#[derive(Debug, Default)]
+pub struct UnitTable {
+ base_id: BaseId,
+ units: Vec<Unit>,
+}
+
+impl UnitTable {
+ /// Create a new unit and add it to the table.
+ ///
+ /// `address_size` must be in bytes.
+ ///
+ /// Returns the `UnitId` of the new unit.
+ #[inline]
+ pub fn add(&mut self, unit: Unit) -> UnitId {
+ let id = UnitId::new(self.base_id, self.units.len());
+ self.units.push(unit);
+ id
+ }
+
+ /// Return the number of units.
+ #[inline]
+ pub fn count(&self) -> usize {
+ self.units.len()
+ }
+
+ /// Return the id of a unit.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index >= self.count()`.
+ #[inline]
+ pub fn id(&self, index: usize) -> UnitId {
+ assert!(index < self.count());
+ UnitId::new(self.base_id, index)
+ }
+
+ /// Get a reference to a unit.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ #[inline]
+ pub fn get(&self, id: UnitId) -> &Unit {
+ debug_assert_eq!(self.base_id, id.base_id);
+ &self.units[id.index]
+ }
+
+ /// Get a mutable reference to a unit.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ #[inline]
+ pub fn get_mut(&mut self, id: UnitId) -> &mut Unit {
+ debug_assert_eq!(self.base_id, id.base_id);
+ &mut self.units[id.index]
+ }
+
+ /// Write the units to the given sections.
+ ///
+ /// `strings` must contain the `.debug_str` offsets of the corresponding
+ /// `StringTable`.
+ pub fn write<W: Writer>(
+ &mut self,
+ sections: &mut Sections<W>,
+ line_strings: &DebugLineStrOffsets,
+ strings: &DebugStrOffsets,
+ ) -> Result<DebugInfoOffsets> {
+ let mut offsets = DebugInfoOffsets {
+ base_id: self.base_id,
+ units: Vec::new(),
+ };
+ for unit in &mut self.units {
+ // TODO: maybe share abbreviation tables
+ let abbrev_offset = sections.debug_abbrev.offset();
+ let mut abbrevs = AbbreviationTable::default();
+
+ offsets.units.push(unit.write(
+ sections,
+ abbrev_offset,
+ &mut abbrevs,
+ line_strings,
+ strings,
+ )?);
+
+ abbrevs.write(&mut sections.debug_abbrev)?;
+ }
+
+ write_section_refs(
+ &mut sections.debug_info_refs,
+ &mut sections.debug_info.0,
+ &offsets,
+ )?;
+ write_section_refs(
+ &mut sections.debug_loc_refs,
+ &mut sections.debug_loc.0,
+ &offsets,
+ )?;
+ write_section_refs(
+ &mut sections.debug_loclists_refs,
+ &mut sections.debug_loclists.0,
+ &offsets,
+ )?;
+
+ Ok(offsets)
+ }
+}
+
+fn write_section_refs<W: Writer>(
+ references: &mut Vec<DebugInfoReference>,
+ w: &mut W,
+ offsets: &DebugInfoOffsets,
+) -> Result<()> {
+ for r in references.drain(..) {
+ let entry_offset = offsets.entry(r.unit, r.entry).0;
+ debug_assert_ne!(entry_offset, 0);
+ w.write_offset_at(r.offset, entry_offset, SectionId::DebugInfo, r.size)?;
+ }
+ Ok(())
+}
+
+/// A unit's debugging information.
+#[derive(Debug)]
+pub struct Unit {
+ base_id: BaseId,
+ /// The encoding parameters for this unit.
+ encoding: Encoding,
+ /// The line number program for this unit.
+ pub line_program: LineProgram,
+ /// A table of range lists used by this unit.
+ pub ranges: RangeListTable,
+ /// A table of location lists used by this unit.
+ pub locations: LocationListTable,
+ /// All entries in this unit. The order is unrelated to the tree order.
+ // Requirements:
+ // - entries form a tree
+ // - entries can be added in any order
+ // - entries have a fixed id
+ // - able to quickly lookup an entry from its id
+ // Limitations of current implemention:
+ // - mutable iteration of children is messy due to borrow checker
+ entries: Vec<DebuggingInformationEntry>,
+ /// The index of the root entry in entries.
+ root: UnitEntryId,
+}
+
+impl Unit {
+ /// Create a new `Unit`.
+ pub fn new(encoding: Encoding, line_program: LineProgram) -> Self {
+ let base_id = BaseId::default();
+ let ranges = RangeListTable::default();
+ let locations = LocationListTable::default();
+ let mut entries = Vec::new();
+ let root = DebuggingInformationEntry::new(
+ base_id,
+ &mut entries,
+ None,
+ constants::DW_TAG_compile_unit,
+ );
+ Unit {
+ base_id,
+ encoding,
+ line_program,
+ ranges,
+ locations,
+ entries,
+ root,
+ }
+ }
+
+ /// Return the encoding parameters for this unit.
+ #[inline]
+ pub fn encoding(&self) -> Encoding {
+ self.encoding
+ }
+
+ /// Return the DWARF version for this unit.
+ #[inline]
+ pub fn version(&self) -> u16 {
+ self.encoding.version
+ }
+
+ /// Return the address size in bytes for this unit.
+ #[inline]
+ pub fn address_size(&self) -> u8 {
+ self.encoding.address_size
+ }
+
+ /// Return the DWARF format for this unit.
+ #[inline]
+ pub fn format(&self) -> Format {
+ self.encoding.format
+ }
+
+ /// Return the number of `DebuggingInformationEntry`s created for this unit.
+ ///
+ /// This includes entries that no longer have a parent.
+ #[inline]
+ pub fn count(&self) -> usize {
+ self.entries.len()
+ }
+
+ /// Return the id of the root entry.
+ #[inline]
+ pub fn root(&self) -> UnitEntryId {
+ self.root
+ }
+
+ /// Add a new `DebuggingInformationEntry` to this unit and return its id.
+ ///
+ /// The `parent` must be within the same unit.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `parent` is invalid.
+ #[inline]
+ pub fn add(&mut self, parent: UnitEntryId, tag: constants::DwTag) -> UnitEntryId {
+ debug_assert_eq!(self.base_id, parent.base_id);
+ DebuggingInformationEntry::new(self.base_id, &mut self.entries, Some(parent), tag)
+ }
+
+ /// Get a reference to an entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ #[inline]
+ pub fn get(&self, id: UnitEntryId) -> &DebuggingInformationEntry {
+ debug_assert_eq!(self.base_id, id.base_id);
+ &self.entries[id.index]
+ }
+
+ /// Get a mutable reference to an entry.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` is invalid.
+ #[inline]
+ pub fn get_mut(&mut self, id: UnitEntryId) -> &mut DebuggingInformationEntry {
+ debug_assert_eq!(self.base_id, id.base_id);
+ &mut self.entries[id.index]
+ }
+
+ /// Return true if `self.line_program` is used by a DIE.
+ fn line_program_in_use(&self) -> bool {
+ if self.line_program.is_none() {
+ return false;
+ }
+ if !self.line_program.is_empty() {
+ return true;
+ }
+
+ for entry in &self.entries {
+ for attr in &entry.attrs {
+ if let AttributeValue::FileIndex(Some(_)) = attr.value {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+
+ /// Write the unit to the given sections.
+ pub(crate) fn write<W: Writer>(
+ &mut self,
+ sections: &mut Sections<W>,
+ abbrev_offset: DebugAbbrevOffset,
+ abbrevs: &mut AbbreviationTable,
+ line_strings: &DebugLineStrOffsets,
+ strings: &DebugStrOffsets,
+ ) -> Result<UnitOffsets> {
+ let line_program = if self.line_program_in_use() {
+ self.entries[self.root.index]
+ .set(constants::DW_AT_stmt_list, AttributeValue::LineProgramRef);
+ Some(self.line_program.write(
+ &mut sections.debug_line,
+ self.encoding,
+ line_strings,
+ strings,
+ )?)
+ } else {
+ self.entries[self.root.index].delete(constants::DW_AT_stmt_list);
+ None
+ };
+
+ // TODO: use .debug_types for type units in DWARF v4.
+ let w = &mut sections.debug_info;
+
+ let mut offsets = UnitOffsets {
+ base_id: self.base_id,
+ unit: w.offset(),
+ // Entries can be written in any order, so create the complete vec now.
+ entries: vec![EntryOffset::none(); self.entries.len()],
+ };
+
+ let length_offset = w.write_initial_length(self.format())?;
+ let length_base = w.len();
+
+ w.write_u16(self.version())?;
+ if 2 <= self.version() && self.version() <= 4 {
+ w.write_offset(
+ abbrev_offset.0,
+ SectionId::DebugAbbrev,
+ self.format().word_size(),
+ )?;
+ w.write_u8(self.address_size())?;
+ } else if self.version() == 5 {
+ w.write_u8(constants::DW_UT_compile.0)?;
+ w.write_u8(self.address_size())?;
+ w.write_offset(
+ abbrev_offset.0,
+ SectionId::DebugAbbrev,
+ self.format().word_size(),
+ )?;
+ } else {
+ return Err(Error::UnsupportedVersion(self.version()));
+ }
+
+ // Calculate all DIE offsets, so that we are able to output references to them.
+ // However, references to base types in expressions use ULEB128, so base types
+ // must be moved to the front before we can calculate offsets.
+ self.reorder_base_types();
+ let mut offset = w.len();
+ self.entries[self.root.index].calculate_offsets(
+ self,
+ &mut offset,
+ &mut offsets,
+ abbrevs,
+ )?;
+
+ let range_lists = self.ranges.write(sections, self.encoding)?;
+ // Location lists can't be written until we have DIE offsets.
+ let loc_lists = self
+ .locations
+ .write(sections, self.encoding, Some(&offsets))?;
+
+ let w = &mut sections.debug_info;
+ let mut unit_refs = Vec::new();
+ self.entries[self.root.index].write(
+ w,
+ &mut sections.debug_info_refs,
+ &mut unit_refs,
+ self,
+ &mut offsets,
+ abbrevs,
+ line_program,
+ line_strings,
+ strings,
+ &range_lists,
+ &loc_lists,
+ )?;
+
+ let length = (w.len() - length_base) as u64;
+ w.write_initial_length_at(length_offset, length, self.format())?;
+
+ for (offset, entry) in unit_refs {
+ // This does not need relocation.
+ w.write_udata_at(
+ offset.0,
+ offsets.unit_offset(entry),
+ self.format().word_size(),
+ )?;
+ }
+
+ Ok(offsets)
+ }
+
+ /// Reorder base types to come first so that typed stack operations
+ /// can get their offset.
+ fn reorder_base_types(&mut self) {
+ let root = &self.entries[self.root.index];
+ let mut root_children = Vec::with_capacity(root.children.len());
+ for entry in &root.children {
+ if self.entries[entry.index].tag == constants::DW_TAG_base_type {
+ root_children.push(*entry);
+ }
+ }
+ for entry in &root.children {
+ if self.entries[entry.index].tag != constants::DW_TAG_base_type {
+ root_children.push(*entry);
+ }
+ }
+ self.entries[self.root.index].children = root_children;
+ }
+}
+
+/// A Debugging Information Entry (DIE).
+///
+/// DIEs have a set of attributes and optionally have children DIEs as well.
+///
+/// DIEs form a tree without any cycles. This is enforced by specifying the
+/// parent when creating a DIE, and disallowing changes of parent.
+#[derive(Debug)]
+pub struct DebuggingInformationEntry {
+ id: UnitEntryId,
+ parent: Option<UnitEntryId>,
+ tag: constants::DwTag,
+ /// Whether to emit `DW_AT_sibling`.
+ sibling: bool,
+ attrs: Vec<Attribute>,
+ children: Vec<UnitEntryId>,
+}
+
+impl DebuggingInformationEntry {
+ /// Create a new `DebuggingInformationEntry`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `parent` is invalid.
+ #[allow(clippy::new_ret_no_self)]
+ fn new(
+ base_id: BaseId,
+ entries: &mut Vec<DebuggingInformationEntry>,
+ parent: Option<UnitEntryId>,
+ tag: constants::DwTag,
+ ) -> UnitEntryId {
+ let id = UnitEntryId::new(base_id, entries.len());
+ entries.push(DebuggingInformationEntry {
+ id,
+ parent,
+ tag,
+ sibling: false,
+ attrs: Vec::new(),
+ children: Vec::new(),
+ });
+ if let Some(parent) = parent {
+ debug_assert_eq!(base_id, parent.base_id);
+ assert_ne!(parent, id);
+ entries[parent.index].children.push(id);
+ }
+ id
+ }
+
+ /// Return the id of this entry.
+ #[inline]
+ pub fn id(&self) -> UnitEntryId {
+ self.id
+ }
+
+ /// Return the parent of this entry.
+ #[inline]
+ pub fn parent(&self) -> Option<UnitEntryId> {
+ self.parent
+ }
+
+ /// Return the tag of this entry.
+ #[inline]
+ pub fn tag(&self) -> constants::DwTag {
+ self.tag
+ }
+
+ /// Return `true` if a `DW_AT_sibling` attribute will be emitted.
+ #[inline]
+ pub fn sibling(&self) -> bool {
+ self.sibling
+ }
+
+ /// Set whether a `DW_AT_sibling` attribute will be emitted.
+ ///
+ /// The attribute will only be emitted if the DIE has children.
+ #[inline]
+ pub fn set_sibling(&mut self, sibling: bool) {
+ self.sibling = sibling;
+ }
+
+ /// Iterate over the attributes of this entry.
+ #[inline]
+ pub fn attrs(&self) -> slice::Iter<Attribute> {
+ self.attrs.iter()
+ }
+
+ /// Iterate over the attributes of this entry for modification.
+ #[inline]
+ pub fn attrs_mut(&mut self) -> slice::IterMut<Attribute> {
+ self.attrs.iter_mut()
+ }
+
+ /// Get an attribute.
+ pub fn get(&self, name: constants::DwAt) -> Option<&AttributeValue> {
+ self.attrs
+ .iter()
+ .find(|attr| attr.name == name)
+ .map(|attr| &attr.value)
+ }
+
+ /// Get an attribute for modification.
+ pub fn get_mut(&mut self, name: constants::DwAt) -> Option<&mut AttributeValue> {
+ self.attrs
+ .iter_mut()
+ .find(|attr| attr.name == name)
+ .map(|attr| &mut attr.value)
+ }
+
+ /// Set an attribute.
+ ///
+ /// Replaces any existing attribute with the same name.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `name` is `DW_AT_sibling`. Use `set_sibling` instead.
+ pub fn set(&mut self, name: constants::DwAt, value: AttributeValue) {
+ assert_ne!(name, constants::DW_AT_sibling);
+ if let Some(attr) = self.attrs.iter_mut().find(|attr| attr.name == name) {
+ attr.value = value;
+ return;
+ }
+ self.attrs.push(Attribute { name, value });
+ }
+
+ /// Delete an attribute.
+ ///
+ /// Replaces any existing attribute with the same name.
+ pub fn delete(&mut self, name: constants::DwAt) {
+ self.attrs.retain(|x| x.name != name);
+ }
+
+ /// Iterate over the children of this entry.
+ ///
+ /// Note: use `Unit::add` to add a new child to this entry.
+ #[inline]
+ pub fn children(&self) -> slice::Iter<UnitEntryId> {
+ self.children.iter()
+ }
+
+ /// Delete a child entry and all of its children.
+ pub fn delete_child(&mut self, id: UnitEntryId) {
+ self.children.retain(|&child| child != id);
+ }
+
+ /// Return the type abbreviation for this DIE.
+ fn abbreviation(&self, encoding: Encoding) -> Result<Abbreviation> {
+ let mut attrs = Vec::new();
+
+ if self.sibling && !self.children.is_empty() {
+ let form = match encoding.format {
+ Format::Dwarf32 => constants::DW_FORM_ref4,
+ Format::Dwarf64 => constants::DW_FORM_ref8,
+ };
+ attrs.push(AttributeSpecification::new(constants::DW_AT_sibling, form));
+ }
+
+ for attr in &self.attrs {
+ attrs.push(attr.specification(encoding)?);
+ }
+
+ Ok(Abbreviation::new(
+ self.tag,
+ !self.children.is_empty(),
+ attrs,
+ ))
+ }
+
+ fn calculate_offsets(
+ &self,
+ unit: &Unit,
+ offset: &mut usize,
+ offsets: &mut UnitOffsets,
+ abbrevs: &mut AbbreviationTable,
+ ) -> Result<()> {
+ offsets.entries[self.id.index].offset = DebugInfoOffset(*offset);
+ offsets.entries[self.id.index].abbrev = abbrevs.add(self.abbreviation(unit.encoding())?);
+ *offset += self.size(unit, offsets);
+ if !self.children.is_empty() {
+ for child in &self.children {
+ unit.entries[child.index].calculate_offsets(unit, offset, offsets, abbrevs)?;
+ }
+ // Null child
+ *offset += 1;
+ }
+ Ok(())
+ }
+
+ fn size(&self, unit: &Unit, offsets: &UnitOffsets) -> usize {
+ let mut size = uleb128_size(offsets.abbrev(self.id));
+ if self.sibling && !self.children.is_empty() {
+ size += unit.format().word_size() as usize;
+ }
+ for attr in &self.attrs {
+ size += attr.value.size(unit, offsets);
+ }
+ size
+ }
+
+ /// Write the entry to the given sections.
+ #[allow(clippy::too_many_arguments)]
+ fn write<W: Writer>(
+ &self,
+ w: &mut DebugInfo<W>,
+ debug_info_refs: &mut Vec<DebugInfoReference>,
+ unit_refs: &mut Vec<(DebugInfoOffset, UnitEntryId)>,
+ unit: &Unit,
+ offsets: &mut UnitOffsets,
+ abbrevs: &mut AbbreviationTable,
+ line_program: Option<DebugLineOffset>,
+ line_strings: &DebugLineStrOffsets,
+ strings: &DebugStrOffsets,
+ range_lists: &RangeListOffsets,
+ loc_lists: &LocationListOffsets,
+ ) -> Result<()> {
+ debug_assert_eq!(offsets.debug_info_offset(self.id), w.offset());
+ w.write_uleb128(offsets.abbrev(self.id))?;
+
+ let sibling_offset = if self.sibling && !self.children.is_empty() {
+ let offset = w.offset();
+ w.write_udata(0, unit.format().word_size())?;
+ Some(offset)
+ } else {
+ None
+ };
+
+ for attr in &self.attrs {
+ attr.value.write(
+ w,
+ debug_info_refs,
+ unit_refs,
+ unit,
+ offsets,
+ line_program,
+ line_strings,
+ strings,
+ range_lists,
+ loc_lists,
+ )?;
+ }
+
+ if !self.children.is_empty() {
+ for child in &self.children {
+ unit.entries[child.index].write(
+ w,
+ debug_info_refs,
+ unit_refs,
+ unit,
+ offsets,
+ abbrevs,
+ line_program,
+ line_strings,
+ strings,
+ range_lists,
+ loc_lists,
+ )?;
+ }
+ // Null child
+ w.write_u8(0)?;
+ }
+
+ if let Some(offset) = sibling_offset {
+ let next_offset = (w.offset().0 - offsets.unit.0) as u64;
+ // This does not need relocation.
+ w.write_udata_at(offset.0, next_offset, unit.format().word_size())?;
+ }
+ Ok(())
+ }
+}
+
+/// An attribute in a `DebuggingInformationEntry`, consisting of a name and
+/// associated value.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Attribute {
+ name: constants::DwAt,
+ value: AttributeValue,
+}
+
+impl Attribute {
+ /// Get the name of this attribute.
+ #[inline]
+ pub fn name(&self) -> constants::DwAt {
+ self.name
+ }
+
+ /// Get the value of this attribute.
+ #[inline]
+ pub fn get(&self) -> &AttributeValue {
+ &self.value
+ }
+
+ /// Set the value of this attribute.
+ #[inline]
+ pub fn set(&mut self, value: AttributeValue) {
+ self.value = value;
+ }
+
+ /// Return the type specification for this attribute.
+ fn specification(&self, encoding: Encoding) -> Result<AttributeSpecification> {
+ Ok(AttributeSpecification::new(
+ self.name,
+ self.value.form(encoding)?,
+ ))
+ }
+}
+
+/// The value of an attribute in a `DebuggingInformationEntry`.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum AttributeValue {
+ /// "Refers to some location in the address space of the described program."
+ Address(Address),
+
+ /// A slice of an arbitrary number of bytes.
+ Block(Vec<u8>),
+
+ /// A one byte constant data value. How to interpret the byte depends on context.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data1(u8),
+
+ /// A two byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// This value will be converted to the target endian before writing.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data2(u16),
+
+ /// A four byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// This value will be converted to the target endian before writing.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data4(u32),
+
+ /// An eight byte constant data value. How to interpret the bytes depends on context.
+ ///
+ /// This value will be converted to the target endian before writing.
+ ///
+ /// From section 7 of the standard: "Depending on context, it may be a
+ /// signed integer, an unsigned integer, a floating-point constant, or
+ /// anything else."
+ Data8(u64),
+
+ /// A signed integer constant.
+ Sdata(i64),
+
+ /// An unsigned integer constant.
+ Udata(u64),
+
+ /// "The information bytes contain a DWARF expression (see Section 2.5) or
+ /// location description (see Section 2.6)."
+ Exprloc(Expression),
+
+ /// A boolean that indicates presence or absence of the attribute.
+ Flag(bool),
+
+ /// An attribute that is always present.
+ FlagPresent,
+
+ /// A reference to a `DebuggingInformationEntry` in this unit.
+ UnitRef(UnitEntryId),
+
+ /// A reference to a `DebuggingInformationEntry` in a potentially different unit.
+ DebugInfoRef(Reference),
+
+ /// An offset into the `.debug_info` section of the supplementary object file.
+ ///
+ /// The API does not currently assist with generating this offset.
+ /// This variant will be removed from the API once support for writing
+ /// supplementary object files is implemented.
+ DebugInfoRefSup(DebugInfoOffset),
+
+ /// A reference to a line number program.
+ LineProgramRef,
+
+ /// A reference to a location list.
+ LocationListRef(LocationListId),
+
+ /// An offset into the `.debug_macinfo` section.
+ ///
+ /// The API does not currently assist with generating this offset.
+ /// This variant will be removed from the API once support for writing
+ /// `.debug_macinfo` sections is implemented.
+ DebugMacinfoRef(DebugMacinfoOffset),
+
+ /// An offset into the `.debug_macro` section.
+ ///
+ /// The API does not currently assist with generating this offset.
+ /// This variant will be removed from the API once support for writing
+ /// `.debug_macro` sections is implemented.
+ DebugMacroRef(DebugMacroOffset),
+
+ /// A reference to a range list.
+ RangeListRef(RangeListId),
+
+ /// A type signature.
+ ///
+ /// The API does not currently assist with generating this signature.
+ /// This variant will be removed from the API once support for writing
+ /// `.debug_types` sections is implemented.
+ DebugTypesRef(DebugTypeSignature),
+
+ /// A reference to a string in the `.debug_str` section.
+ StringRef(StringId),
+
+ /// An offset into the `.debug_str` section of the supplementary object file.
+ ///
+ /// The API does not currently assist with generating this offset.
+ /// This variant will be removed from the API once support for writing
+ /// supplementary object files is implemented.
+ DebugStrRefSup(DebugStrOffset),
+
+ /// A reference to a string in the `.debug_line_str` section.
+ LineStringRef(LineStringId),
+
+ /// A slice of bytes representing a string. Must not include null bytes.
+ /// Not guaranteed to be UTF-8 or anything like that.
+ String(Vec<u8>),
+
+ /// The value of a `DW_AT_encoding` attribute.
+ Encoding(constants::DwAte),
+
+ /// The value of a `DW_AT_decimal_sign` attribute.
+ DecimalSign(constants::DwDs),
+
+ /// The value of a `DW_AT_endianity` attribute.
+ Endianity(constants::DwEnd),
+
+ /// The value of a `DW_AT_accessibility` attribute.
+ Accessibility(constants::DwAccess),
+
+ /// The value of a `DW_AT_visibility` attribute.
+ Visibility(constants::DwVis),
+
+ /// The value of a `DW_AT_virtuality` attribute.
+ Virtuality(constants::DwVirtuality),
+
+ /// The value of a `DW_AT_language` attribute.
+ Language(constants::DwLang),
+
+ /// The value of a `DW_AT_address_class` attribute.
+ AddressClass(constants::DwAddr),
+
+ /// The value of a `DW_AT_identifier_case` attribute.
+ IdentifierCase(constants::DwId),
+
+ /// The value of a `DW_AT_calling_convention` attribute.
+ CallingConvention(constants::DwCc),
+
+ /// The value of a `DW_AT_inline` attribute.
+ Inline(constants::DwInl),
+
+ /// The value of a `DW_AT_ordering` attribute.
+ Ordering(constants::DwOrd),
+
+ /// An index into the filename entries from the line number information
+ /// table for the unit containing this value.
+ FileIndex(Option<FileId>),
+}
+
+impl AttributeValue {
+ /// Return the form that will be used to encode this value.
+ pub fn form(&self, encoding: Encoding) -> Result<constants::DwForm> {
+ // TODO: missing forms:
+ // - DW_FORM_indirect
+ // - DW_FORM_implicit_const
+ // - FW_FORM_block1/block2/block4
+ // - DW_FORM_str/strx1/strx2/strx3/strx4
+ // - DW_FORM_addrx/addrx1/addrx2/addrx3/addrx4
+ // - DW_FORM_data16
+ // - DW_FORM_line_strp
+ // - DW_FORM_loclistx
+ // - DW_FORM_rnglistx
+ let form = match *self {
+ AttributeValue::Address(_) => constants::DW_FORM_addr,
+ AttributeValue::Block(_) => constants::DW_FORM_block,
+ AttributeValue::Data1(_) => constants::DW_FORM_data1,
+ AttributeValue::Data2(_) => constants::DW_FORM_data2,
+ AttributeValue::Data4(_) => constants::DW_FORM_data4,
+ AttributeValue::Data8(_) => constants::DW_FORM_data8,
+ AttributeValue::Exprloc(_) => constants::DW_FORM_exprloc,
+ AttributeValue::Flag(_) => constants::DW_FORM_flag,
+ AttributeValue::FlagPresent => constants::DW_FORM_flag_present,
+ AttributeValue::UnitRef(_) => {
+ // Using a fixed size format lets us write a placeholder before we know
+ // the value.
+ match encoding.format {
+ Format::Dwarf32 => constants::DW_FORM_ref4,
+ Format::Dwarf64 => constants::DW_FORM_ref8,
+ }
+ }
+ AttributeValue::DebugInfoRef(_) => constants::DW_FORM_ref_addr,
+ AttributeValue::DebugInfoRefSup(_) => {
+ // TODO: should this depend on the size of supplementary section?
+ match encoding.format {
+ Format::Dwarf32 => constants::DW_FORM_ref_sup4,
+ Format::Dwarf64 => constants::DW_FORM_ref_sup8,
+ }
+ }
+ AttributeValue::LineProgramRef
+ | AttributeValue::LocationListRef(_)
+ | AttributeValue::DebugMacinfoRef(_)
+ | AttributeValue::DebugMacroRef(_)
+ | AttributeValue::RangeListRef(_) => {
+ if encoding.version == 2 || encoding.version == 3 {
+ match encoding.format {
+ Format::Dwarf32 => constants::DW_FORM_data4,
+ Format::Dwarf64 => constants::DW_FORM_data8,
+ }
+ } else {
+ constants::DW_FORM_sec_offset
+ }
+ }
+ AttributeValue::DebugTypesRef(_) => constants::DW_FORM_ref_sig8,
+ AttributeValue::StringRef(_) => constants::DW_FORM_strp,
+ AttributeValue::DebugStrRefSup(_) => constants::DW_FORM_strp_sup,
+ AttributeValue::LineStringRef(_) => constants::DW_FORM_line_strp,
+ AttributeValue::String(_) => constants::DW_FORM_string,
+ AttributeValue::Encoding(_)
+ | AttributeValue::DecimalSign(_)
+ | AttributeValue::Endianity(_)
+ | AttributeValue::Accessibility(_)
+ | AttributeValue::Visibility(_)
+ | AttributeValue::Virtuality(_)
+ | AttributeValue::Language(_)
+ | AttributeValue::AddressClass(_)
+ | AttributeValue::IdentifierCase(_)
+ | AttributeValue::CallingConvention(_)
+ | AttributeValue::Inline(_)
+ | AttributeValue::Ordering(_)
+ | AttributeValue::FileIndex(_)
+ | AttributeValue::Udata(_) => constants::DW_FORM_udata,
+ AttributeValue::Sdata(_) => constants::DW_FORM_sdata,
+ };
+ Ok(form)
+ }
+
+ fn size(&self, unit: &Unit, offsets: &UnitOffsets) -> usize {
+ macro_rules! debug_assert_form {
+ ($form:expr) => {
+ debug_assert_eq!(self.form(unit.encoding()).unwrap(), $form)
+ };
+ }
+ match *self {
+ AttributeValue::Address(_) => {
+ debug_assert_form!(constants::DW_FORM_addr);
+ unit.address_size() as usize
+ }
+ AttributeValue::Block(ref val) => {
+ debug_assert_form!(constants::DW_FORM_block);
+ uleb128_size(val.len() as u64) + val.len()
+ }
+ AttributeValue::Data1(_) => {
+ debug_assert_form!(constants::DW_FORM_data1);
+ 1
+ }
+ AttributeValue::Data2(_) => {
+ debug_assert_form!(constants::DW_FORM_data2);
+ 2
+ }
+ AttributeValue::Data4(_) => {
+ debug_assert_form!(constants::DW_FORM_data4);
+ 4
+ }
+ AttributeValue::Data8(_) => {
+ debug_assert_form!(constants::DW_FORM_data8);
+ 8
+ }
+ AttributeValue::Sdata(val) => {
+ debug_assert_form!(constants::DW_FORM_sdata);
+ sleb128_size(val)
+ }
+ AttributeValue::Udata(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val)
+ }
+ AttributeValue::Exprloc(ref val) => {
+ debug_assert_form!(constants::DW_FORM_exprloc);
+ let size = val.size(unit.encoding(), Some(offsets));
+ uleb128_size(size as u64) + size
+ }
+ AttributeValue::Flag(_) => {
+ debug_assert_form!(constants::DW_FORM_flag);
+ 1
+ }
+ AttributeValue::FlagPresent => {
+ debug_assert_form!(constants::DW_FORM_flag_present);
+ 0
+ }
+ AttributeValue::UnitRef(_) => {
+ match unit.format() {
+ Format::Dwarf32 => debug_assert_form!(constants::DW_FORM_ref4),
+ Format::Dwarf64 => debug_assert_form!(constants::DW_FORM_ref8),
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::DebugInfoRef(_) => {
+ debug_assert_form!(constants::DW_FORM_ref_addr);
+ if unit.version() == 2 {
+ unit.address_size() as usize
+ } else {
+ unit.format().word_size() as usize
+ }
+ }
+ AttributeValue::DebugInfoRefSup(_) => {
+ match unit.format() {
+ Format::Dwarf32 => debug_assert_form!(constants::DW_FORM_ref_sup4),
+ Format::Dwarf64 => debug_assert_form!(constants::DW_FORM_ref_sup8),
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::LineProgramRef => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::LocationListRef(_) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::DebugMacinfoRef(_) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::DebugMacroRef(_) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::RangeListRef(_) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ unit.format().word_size() as usize
+ }
+ AttributeValue::DebugTypesRef(_) => {
+ debug_assert_form!(constants::DW_FORM_ref_sig8);
+ 8
+ }
+ AttributeValue::StringRef(_) => {
+ debug_assert_form!(constants::DW_FORM_strp);
+ unit.format().word_size() as usize
+ }
+ AttributeValue::DebugStrRefSup(_) => {
+ debug_assert_form!(constants::DW_FORM_strp_sup);
+ unit.format().word_size() as usize
+ }
+ AttributeValue::LineStringRef(_) => {
+ debug_assert_form!(constants::DW_FORM_line_strp);
+ unit.format().word_size() as usize
+ }
+ AttributeValue::String(ref val) => {
+ debug_assert_form!(constants::DW_FORM_string);
+ val.len() + 1
+ }
+ AttributeValue::Encoding(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::DecimalSign(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Endianity(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Accessibility(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Visibility(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Virtuality(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Language(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::AddressClass(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::IdentifierCase(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::CallingConvention(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Inline(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::Ordering(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.0 as u64)
+ }
+ AttributeValue::FileIndex(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ uleb128_size(val.map(FileId::raw).unwrap_or(0))
+ }
+ }
+ }
+
+ /// Write the attribute value to the given sections.
+ #[allow(clippy::cyclomatic_complexity, clippy::too_many_arguments)]
+ fn write<W: Writer>(
+ &self,
+ w: &mut DebugInfo<W>,
+ debug_info_refs: &mut Vec<DebugInfoReference>,
+ unit_refs: &mut Vec<(DebugInfoOffset, UnitEntryId)>,
+ unit: &Unit,
+ offsets: &UnitOffsets,
+ line_program: Option<DebugLineOffset>,
+ line_strings: &DebugLineStrOffsets,
+ strings: &DebugStrOffsets,
+ range_lists: &RangeListOffsets,
+ loc_lists: &LocationListOffsets,
+ ) -> Result<()> {
+ macro_rules! debug_assert_form {
+ ($form:expr) => {
+ debug_assert_eq!(self.form(unit.encoding()).unwrap(), $form)
+ };
+ }
+ match *self {
+ AttributeValue::Address(val) => {
+ debug_assert_form!(constants::DW_FORM_addr);
+ w.write_address(val, unit.address_size())?;
+ }
+ AttributeValue::Block(ref val) => {
+ debug_assert_form!(constants::DW_FORM_block);
+ w.write_uleb128(val.len() as u64)?;
+ w.write(&val)?;
+ }
+ AttributeValue::Data1(val) => {
+ debug_assert_form!(constants::DW_FORM_data1);
+ w.write_u8(val)?;
+ }
+ AttributeValue::Data2(val) => {
+ debug_assert_form!(constants::DW_FORM_data2);
+ w.write_u16(val)?;
+ }
+ AttributeValue::Data4(val) => {
+ debug_assert_form!(constants::DW_FORM_data4);
+ w.write_u32(val)?;
+ }
+ AttributeValue::Data8(val) => {
+ debug_assert_form!(constants::DW_FORM_data8);
+ w.write_u64(val)?;
+ }
+ AttributeValue::Sdata(val) => {
+ debug_assert_form!(constants::DW_FORM_sdata);
+ w.write_sleb128(val)?;
+ }
+ AttributeValue::Udata(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(val)?;
+ }
+ AttributeValue::Exprloc(ref val) => {
+ debug_assert_form!(constants::DW_FORM_exprloc);
+ w.write_uleb128(val.size(unit.encoding(), Some(offsets)) as u64)?;
+ val.write(
+ &mut w.0,
+ Some(debug_info_refs),
+ unit.encoding(),
+ Some(offsets),
+ )?;
+ }
+ AttributeValue::Flag(val) => {
+ debug_assert_form!(constants::DW_FORM_flag);
+ w.write_u8(val as u8)?;
+ }
+ AttributeValue::FlagPresent => {
+ debug_assert_form!(constants::DW_FORM_flag_present);
+ }
+ AttributeValue::UnitRef(id) => {
+ match unit.format() {
+ Format::Dwarf32 => debug_assert_form!(constants::DW_FORM_ref4),
+ Format::Dwarf64 => debug_assert_form!(constants::DW_FORM_ref8),
+ }
+ unit_refs.push((w.offset(), id));
+ w.write_udata(0, unit.format().word_size())?;
+ }
+ AttributeValue::DebugInfoRef(reference) => {
+ debug_assert_form!(constants::DW_FORM_ref_addr);
+ let size = if unit.version() == 2 {
+ unit.address_size()
+ } else {
+ unit.format().word_size()
+ };
+ match reference {
+ Reference::Symbol(symbol) => w.write_reference(symbol, size)?,
+ Reference::Entry(unit, entry) => {
+ debug_info_refs.push(DebugInfoReference {
+ offset: w.len(),
+ unit,
+ entry,
+ size,
+ });
+ w.write_udata(0, size)?;
+ }
+ }
+ }
+ AttributeValue::DebugInfoRefSup(val) => {
+ match unit.format() {
+ Format::Dwarf32 => debug_assert_form!(constants::DW_FORM_ref_sup4),
+ Format::Dwarf64 => debug_assert_form!(constants::DW_FORM_ref_sup8),
+ }
+ w.write_udata(val.0 as u64, unit.format().word_size())?;
+ }
+ AttributeValue::LineProgramRef => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ match line_program {
+ Some(line_program) => {
+ w.write_offset(
+ line_program.0,
+ SectionId::DebugLine,
+ unit.format().word_size(),
+ )?;
+ }
+ None => return Err(Error::InvalidAttributeValue),
+ }
+ }
+ AttributeValue::LocationListRef(val) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ let section = if unit.version() <= 4 {
+ SectionId::DebugLoc
+ } else {
+ SectionId::DebugLocLists
+ };
+ w.write_offset(loc_lists.get(val).0, section, unit.format().word_size())?;
+ }
+ AttributeValue::DebugMacinfoRef(val) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ w.write_offset(val.0, SectionId::DebugMacinfo, unit.format().word_size())?;
+ }
+ AttributeValue::DebugMacroRef(val) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ w.write_offset(val.0, SectionId::DebugMacro, unit.format().word_size())?;
+ }
+ AttributeValue::RangeListRef(val) => {
+ if unit.version() >= 4 {
+ debug_assert_form!(constants::DW_FORM_sec_offset);
+ }
+ let section = if unit.version() <= 4 {
+ SectionId::DebugRanges
+ } else {
+ SectionId::DebugRngLists
+ };
+ w.write_offset(range_lists.get(val).0, section, unit.format().word_size())?;
+ }
+ AttributeValue::DebugTypesRef(val) => {
+ debug_assert_form!(constants::DW_FORM_ref_sig8);
+ w.write_u64(val.0)?;
+ }
+ AttributeValue::StringRef(val) => {
+ debug_assert_form!(constants::DW_FORM_strp);
+ w.write_offset(
+ strings.get(val).0,
+ SectionId::DebugStr,
+ unit.format().word_size(),
+ )?;
+ }
+ AttributeValue::DebugStrRefSup(val) => {
+ debug_assert_form!(constants::DW_FORM_strp_sup);
+ w.write_udata(val.0 as u64, unit.format().word_size())?;
+ }
+ AttributeValue::LineStringRef(val) => {
+ debug_assert_form!(constants::DW_FORM_line_strp);
+ w.write_offset(
+ line_strings.get(val).0,
+ SectionId::DebugLineStr,
+ unit.format().word_size(),
+ )?;
+ }
+ AttributeValue::String(ref val) => {
+ debug_assert_form!(constants::DW_FORM_string);
+ w.write(&val)?;
+ w.write_u8(0)?;
+ }
+ AttributeValue::Encoding(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::DecimalSign(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Endianity(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Accessibility(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Visibility(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Virtuality(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Language(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::AddressClass(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(val.0)?;
+ }
+ AttributeValue::IdentifierCase(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::CallingConvention(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Inline(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::Ordering(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(u64::from(val.0))?;
+ }
+ AttributeValue::FileIndex(val) => {
+ debug_assert_form!(constants::DW_FORM_udata);
+ w.write_uleb128(val.map(FileId::raw).unwrap_or(0))?;
+ }
+ }
+ Ok(())
+ }
+}
+
+define_section!(
+ DebugInfo,
+ DebugInfoOffset,
+ "A writable `.debug_info` section."
+);
+
+/// The section offsets of all elements within a `.debug_info` section.
+#[derive(Debug, Default)]
+pub struct DebugInfoOffsets {
+ base_id: BaseId,
+ units: Vec<UnitOffsets>,
+}
+
+impl DebugInfoOffsets {
+ #[cfg(test)]
+ pub(crate) fn unit_offsets(&self, unit: UnitId) -> &UnitOffsets {
+ debug_assert_eq!(self.base_id, unit.base_id);
+ &self.units[unit.index]
+ }
+
+ /// Get the `.debug_info` section offset for the given unit.
+ #[inline]
+ pub fn unit(&self, unit: UnitId) -> DebugInfoOffset {
+ debug_assert_eq!(self.base_id, unit.base_id);
+ self.units[unit.index].unit
+ }
+
+ /// Get the `.debug_info` section offset for the given entry.
+ #[inline]
+ pub fn entry(&self, unit: UnitId, entry: UnitEntryId) -> DebugInfoOffset {
+ debug_assert_eq!(self.base_id, unit.base_id);
+ self.units[unit.index].debug_info_offset(entry)
+ }
+}
+
+/// The section offsets of all elements of a unit within a `.debug_info` section.
+#[derive(Debug)]
+pub(crate) struct UnitOffsets {
+ base_id: BaseId,
+ unit: DebugInfoOffset,
+ entries: Vec<EntryOffset>,
+}
+
+impl UnitOffsets {
+ #[cfg(test)]
+ fn none() -> Self {
+ UnitOffsets {
+ base_id: BaseId::default(),
+ unit: DebugInfoOffset(0),
+ entries: Vec::new(),
+ }
+ }
+
+ /// Get the .debug_info offset for the given entry.
+ #[inline]
+ pub(crate) fn debug_info_offset(&self, entry: UnitEntryId) -> DebugInfoOffset {
+ debug_assert_eq!(self.base_id, entry.base_id);
+ let offset = self.entries[entry.index].offset;
+ debug_assert_ne!(offset.0, 0);
+ offset
+ }
+
+ /// Get the unit offset for the given entry.
+ #[inline]
+ pub(crate) fn unit_offset(&self, entry: UnitEntryId) -> u64 {
+ let offset = self.debug_info_offset(entry);
+ (offset.0 - self.unit.0) as u64
+ }
+
+ /// Get the abbreviation code for the given entry.
+ #[inline]
+ pub(crate) fn abbrev(&self, entry: UnitEntryId) -> u64 {
+ debug_assert_eq!(self.base_id, entry.base_id);
+ self.entries[entry.index].abbrev
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct EntryOffset {
+ offset: DebugInfoOffset,
+ abbrev: u64,
+}
+
+impl EntryOffset {
+ fn none() -> Self {
+ EntryOffset {
+ offset: DebugInfoOffset(0),
+ abbrev: 0,
+ }
+ }
+}
+
+/// A reference to a `.debug_info` entry that has yet to be resolved.
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct DebugInfoReference {
+ /// The offset within the section of the reference.
+ pub offset: usize,
+ /// The size of the reference.
+ pub size: u8,
+ /// The unit containing the entry.
+ pub unit: UnitId,
+ /// The entry being referenced.
+ pub entry: UnitEntryId,
+}
+
+#[cfg(feature = "read")]
+pub(crate) mod convert {
+ use super::*;
+ use crate::common::UnitSectionOffset;
+ use crate::read::{self, Reader};
+ use crate::write::{self, ConvertError, ConvertResult, LocationList, RangeList};
+ use std::collections::HashMap;
+
+ pub(crate) struct ConvertUnit<R: Reader<Offset = usize>> {
+ from_unit: read::Unit<R>,
+ base_id: BaseId,
+ encoding: Encoding,
+ entries: Vec<DebuggingInformationEntry>,
+ entry_offsets: Vec<read::UnitOffset>,
+ root: UnitEntryId,
+ }
+
+ pub(crate) struct ConvertUnitContext<'a, R: Reader<Offset = usize>> {
+ pub dwarf: &'a read::Dwarf<R>,
+ pub unit: &'a read::Unit<R>,
+ pub line_strings: &'a mut write::LineStringTable,
+ pub strings: &'a mut write::StringTable,
+ pub ranges: &'a mut write::RangeListTable,
+ pub locations: &'a mut write::LocationListTable,
+ pub convert_address: &'a dyn Fn(u64) -> Option<Address>,
+ pub base_address: Address,
+ pub line_program_offset: Option<DebugLineOffset>,
+ pub line_program_files: Vec<FileId>,
+ pub entry_ids: &'a HashMap<UnitSectionOffset, (UnitId, UnitEntryId)>,
+ }
+
+ impl UnitTable {
+ /// Create a unit table by reading the data in the given sections.
+ ///
+ /// This also updates the given tables with the values that are referenced from
+ /// attributes in this section.
+ ///
+ /// `convert_address` is a function to convert read addresses into the `Address`
+ /// type. For non-relocatable addresses, this function may simply return
+ /// `Address::Constant(address)`. For relocatable addresses, it is the caller's
+ /// responsibility to determine the symbol and addend corresponding to the address
+ /// and return `Address::Symbol { symbol, addend }`.
+ pub fn from<R: Reader<Offset = usize>>(
+ dwarf: &read::Dwarf<R>,
+ line_strings: &mut write::LineStringTable,
+ strings: &mut write::StringTable,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<UnitTable> {
+ let base_id = BaseId::default();
+ let mut unit_entries = Vec::new();
+ let mut entry_ids = HashMap::new();
+
+ let mut from_units = dwarf.units();
+ while let Some(from_unit) = from_units.next()? {
+ let unit_id = UnitId::new(base_id, unit_entries.len());
+ unit_entries.push(Unit::convert_entries(
+ from_unit,
+ unit_id,
+ &mut entry_ids,
+ dwarf,
+ )?);
+ }
+
+ // Attributes must be converted in a separate pass so that we can handle
+ // references to other compilation units.
+ let mut units = Vec::new();
+ for unit_entries in unit_entries.drain(..) {
+ units.push(Unit::convert_attributes(
+ unit_entries,
+ &entry_ids,
+ dwarf,
+ line_strings,
+ strings,
+ convert_address,
+ )?);
+ }
+
+ Ok(UnitTable { base_id, units })
+ }
+ }
+
+ impl Unit {
+ /// Create a unit by reading the data in the input sections.
+ ///
+ /// Does not add entry attributes.
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn convert_entries<R: Reader<Offset = usize>>(
+ from_header: read::UnitHeader<R>,
+ unit_id: UnitId,
+ entry_ids: &mut HashMap<UnitSectionOffset, (UnitId, UnitEntryId)>,
+ dwarf: &read::Dwarf<R>,
+ ) -> ConvertResult<ConvertUnit<R>> {
+ match from_header.type_() {
+ read::UnitType::Compilation => (),
+ _ => return Err(ConvertError::UnsupportedUnitType),
+ }
+ let base_id = BaseId::default();
+
+ let from_unit = dwarf.unit(from_header)?;
+ let encoding = from_unit.encoding();
+
+ let mut entries = Vec::new();
+ let mut entry_offsets = Vec::new();
+
+ let mut from_tree = from_unit.entries_tree(None)?;
+ let from_root = from_tree.root()?;
+ let root = DebuggingInformationEntry::convert_entry(
+ from_root,
+ &from_unit,
+ base_id,
+ &mut entries,
+ &mut entry_offsets,
+ entry_ids,
+ None,
+ unit_id,
+ )?;
+
+ Ok(ConvertUnit {
+ from_unit,
+ base_id,
+ encoding,
+ entries,
+ entry_offsets,
+ root,
+ })
+ }
+
+ /// Create entry attributes by reading the data in the input sections.
+ fn convert_attributes<R: Reader<Offset = usize>>(
+ unit: ConvertUnit<R>,
+ entry_ids: &HashMap<UnitSectionOffset, (UnitId, UnitEntryId)>,
+ dwarf: &read::Dwarf<R>,
+ line_strings: &mut write::LineStringTable,
+ strings: &mut write::StringTable,
+ convert_address: &dyn Fn(u64) -> Option<Address>,
+ ) -> ConvertResult<Unit> {
+ let from_unit = unit.from_unit;
+ let base_address =
+ convert_address(from_unit.low_pc).ok_or(ConvertError::InvalidAddress)?;
+
+ let (line_program_offset, line_program, line_program_files) =
+ match from_unit.line_program {
+ Some(ref from_program) => {
+ let from_program = from_program.clone();
+ let line_program_offset = from_program.header().offset();
+ let (line_program, line_program_files) = LineProgram::from(
+ from_program,
+ dwarf,
+ line_strings,
+ strings,
+ convert_address,
+ )?;
+ (Some(line_program_offset), line_program, line_program_files)
+ }
+ None => (None, LineProgram::none(), Vec::new()),
+ };
+
+ let mut ranges = RangeListTable::default();
+ let mut locations = LocationListTable::default();
+
+ let mut context = ConvertUnitContext {
+ entry_ids,
+ dwarf,
+ unit: &from_unit,
+ line_strings,
+ strings,
+ ranges: &mut ranges,
+ locations: &mut locations,
+ convert_address,
+ base_address,
+ line_program_offset,
+ line_program_files,
+ };
+
+ let mut entries = unit.entries;
+ for entry in &mut entries {
+ entry.convert_attributes(&mut context, &unit.entry_offsets)?;
+ }
+
+ Ok(Unit {
+ base_id: unit.base_id,
+ encoding: unit.encoding,
+ line_program,
+ ranges,
+ locations,
+ entries,
+ root: unit.root,
+ })
+ }
+ }
+
+ impl DebuggingInformationEntry {
+ /// Create an entry by reading the data in the input sections.
+ ///
+ /// Does not add the entry attributes.
+ fn convert_entry<R: Reader<Offset = usize>>(
+ from: read::EntriesTreeNode<R>,
+ from_unit: &read::Unit<R>,
+ base_id: BaseId,
+ entries: &mut Vec<DebuggingInformationEntry>,
+ entry_offsets: &mut Vec<read::UnitOffset>,
+ entry_ids: &mut HashMap<UnitSectionOffset, (UnitId, UnitEntryId)>,
+ parent: Option<UnitEntryId>,
+ unit_id: UnitId,
+ ) -> ConvertResult<UnitEntryId> {
+ let from_entry = from.entry();
+ let id = DebuggingInformationEntry::new(base_id, entries, parent, from_entry.tag());
+ let offset = from_entry.offset();
+ entry_offsets.push(offset);
+ entry_ids.insert(offset.to_unit_section_offset(from_unit), (unit_id, id));
+
+ let mut from_children = from.children();
+ while let Some(from_child) = from_children.next()? {
+ DebuggingInformationEntry::convert_entry(
+ from_child,
+ from_unit,
+ base_id,
+ entries,
+ entry_offsets,
+ entry_ids,
+ Some(id),
+ unit_id,
+ )?;
+ }
+ Ok(id)
+ }
+
+ /// Create an entry's attributes by reading the data in the input sections.
+ fn convert_attributes<R: Reader<Offset = usize>>(
+ &mut self,
+ context: &mut ConvertUnitContext<R>,
+ entry_offsets: &[read::UnitOffset],
+ ) -> ConvertResult<()> {
+ let offset = entry_offsets[self.id.index];
+ let from = context.unit.entry(offset)?;
+ let mut from_attrs = from.attrs();
+ while let Some(from_attr) = from_attrs.next()? {
+ if from_attr.name() == constants::DW_AT_sibling {
+ // This may point to a null entry, so we have to treat it differently.
+ self.set_sibling(true);
+ } else if let Some(attr) = Attribute::from(context, &from_attr)? {
+ self.set(attr.name, attr.value);
+ }
+ }
+ Ok(())
+ }
+ }
+
+ impl Attribute {
+ /// Create an attribute by reading the data in the given sections.
+ pub(crate) fn from<R: Reader<Offset = usize>>(
+ context: &mut ConvertUnitContext<R>,
+ from: &read::Attribute<R>,
+ ) -> ConvertResult<Option<Attribute>> {
+ let value = AttributeValue::from(context, from.value())?;
+ Ok(value.map(|value| Attribute {
+ name: from.name(),
+ value,
+ }))
+ }
+ }
+
+ impl AttributeValue {
+ /// Create an attribute value by reading the data in the given sections.
+ pub(crate) fn from<R: Reader<Offset = usize>>(
+ context: &mut ConvertUnitContext<R>,
+ from: read::AttributeValue<R>,
+ ) -> ConvertResult<Option<AttributeValue>> {
+ let to = match from {
+ read::AttributeValue::Addr(val) => match (context.convert_address)(val) {
+ Some(val) => AttributeValue::Address(val),
+ None => return Err(ConvertError::InvalidAddress),
+ },
+ read::AttributeValue::Block(r) => AttributeValue::Block(r.to_slice()?.into()),
+ read::AttributeValue::Data1(val) => AttributeValue::Data1(val),
+ read::AttributeValue::Data2(val) => AttributeValue::Data2(val),
+ read::AttributeValue::Data4(val) => AttributeValue::Data4(val),
+ read::AttributeValue::Data8(val) => AttributeValue::Data8(val),
+ read::AttributeValue::Sdata(val) => AttributeValue::Sdata(val),
+ read::AttributeValue::Udata(val) => AttributeValue::Udata(val),
+ read::AttributeValue::Exprloc(expression) => {
+ let expression = Expression::from(
+ expression,
+ context.unit.encoding(),
+ Some(context.dwarf),
+ Some(context.unit),
+ Some(context.entry_ids),
+ context.convert_address,
+ )?;
+ AttributeValue::Exprloc(expression)
+ }
+ // TODO: it would be nice to preserve the flag form.
+ read::AttributeValue::Flag(val) => AttributeValue::Flag(val),
+ read::AttributeValue::DebugAddrBase(_base) => {
+ // We convert all address indices to addresses,
+ // so this is unneeded.
+ return Ok(None);
+ }
+ read::AttributeValue::DebugAddrIndex(index) => {
+ let val = context.dwarf.address(context.unit, index)?;
+ match (context.convert_address)(val) {
+ Some(val) => AttributeValue::Address(val),
+ None => return Err(ConvertError::InvalidAddress),
+ }
+ }
+ read::AttributeValue::UnitRef(val) => {
+ if !context.unit.header.is_valid_offset(val) {
+ return Err(ConvertError::InvalidUnitRef);
+ }
+ let id = context
+ .entry_ids
+ .get(&val.to_unit_section_offset(context.unit))
+ .ok_or(ConvertError::InvalidUnitRef)?;
+ AttributeValue::UnitRef(id.1)
+ }
+ read::AttributeValue::DebugInfoRef(val) => {
+ // TODO: support relocation of this value
+ let id = context
+ .entry_ids
+ .get(&UnitSectionOffset::DebugInfoOffset(val))
+ .ok_or(ConvertError::InvalidDebugInfoRef)?;
+ AttributeValue::DebugInfoRef(Reference::Entry(id.0, id.1))
+ }
+ read::AttributeValue::DebugInfoRefSup(val) => AttributeValue::DebugInfoRefSup(val),
+ read::AttributeValue::DebugLineRef(val) => {
+ // There should only be the line program in the CU DIE which we've already
+ // converted, so check if it matches that.
+ if Some(val) == context.line_program_offset {
+ AttributeValue::LineProgramRef
+ } else {
+ return Err(ConvertError::InvalidLineRef);
+ }
+ }
+ read::AttributeValue::DebugMacinfoRef(val) => AttributeValue::DebugMacinfoRef(val),
+ read::AttributeValue::DebugMacroRef(val) => AttributeValue::DebugMacroRef(val),
+ read::AttributeValue::LocationListsRef(val) => {
+ let iter = context
+ .dwarf
+ .locations
+ .raw_locations(val, context.unit.encoding())?;
+ let loc_list = LocationList::from(iter, context)?;
+ let loc_id = context.locations.add(loc_list);
+ AttributeValue::LocationListRef(loc_id)
+ }
+ read::AttributeValue::DebugLocListsBase(_base) => {
+ // We convert all location list indices to offsets,
+ // so this is unneeded.
+ return Ok(None);
+ }
+ read::AttributeValue::DebugLocListsIndex(index) => {
+ let offset = context.dwarf.locations_offset(context.unit, index)?;
+ let iter = context
+ .dwarf
+ .locations
+ .raw_locations(offset, context.unit.encoding())?;
+ let loc_list = LocationList::from(iter, context)?;
+ let loc_id = context.locations.add(loc_list);
+ AttributeValue::LocationListRef(loc_id)
+ }
+ read::AttributeValue::RangeListsRef(offset) => {
+ let offset = context.dwarf.ranges_offset_from_raw(context.unit, offset);
+ let iter = context.dwarf.raw_ranges(context.unit, offset)?;
+ let range_list = RangeList::from(iter, context)?;
+ let range_id = context.ranges.add(range_list);
+ AttributeValue::RangeListRef(range_id)
+ }
+ read::AttributeValue::DebugRngListsBase(_base) => {
+ // We convert all range list indices to offsets,
+ // so this is unneeded.
+ return Ok(None);
+ }
+ read::AttributeValue::DebugRngListsIndex(index) => {
+ let offset = context.dwarf.ranges_offset(context.unit, index)?;
+ let iter = context
+ .dwarf
+ .ranges
+ .raw_ranges(offset, context.unit.encoding())?;
+ let range_list = RangeList::from(iter, context)?;
+ let range_id = context.ranges.add(range_list);
+ AttributeValue::RangeListRef(range_id)
+ }
+ read::AttributeValue::DebugTypesRef(val) => AttributeValue::DebugTypesRef(val),
+ read::AttributeValue::DebugStrRef(offset) => {
+ let r = context.dwarf.string(offset)?;
+ let id = context.strings.add(r.to_slice()?);
+ AttributeValue::StringRef(id)
+ }
+ read::AttributeValue::DebugStrRefSup(val) => AttributeValue::DebugStrRefSup(val),
+ read::AttributeValue::DebugStrOffsetsBase(_base) => {
+ // We convert all string offsets to `.debug_str` references,
+ // so this is unneeded.
+ return Ok(None);
+ }
+ read::AttributeValue::DebugStrOffsetsIndex(index) => {
+ let offset = context.dwarf.string_offset(context.unit, index)?;
+ let r = context.dwarf.string(offset)?;
+ let id = context.strings.add(r.to_slice()?);
+ AttributeValue::StringRef(id)
+ }
+ read::AttributeValue::DebugLineStrRef(offset) => {
+ let r = context.dwarf.line_string(offset)?;
+ let id = context.line_strings.add(r.to_slice()?);
+ AttributeValue::LineStringRef(id)
+ }
+ read::AttributeValue::String(r) => AttributeValue::String(r.to_slice()?.into()),
+ read::AttributeValue::Encoding(val) => AttributeValue::Encoding(val),
+ read::AttributeValue::DecimalSign(val) => AttributeValue::DecimalSign(val),
+ read::AttributeValue::Endianity(val) => AttributeValue::Endianity(val),
+ read::AttributeValue::Accessibility(val) => AttributeValue::Accessibility(val),
+ read::AttributeValue::Visibility(val) => AttributeValue::Visibility(val),
+ read::AttributeValue::Virtuality(val) => AttributeValue::Virtuality(val),
+ read::AttributeValue::Language(val) => AttributeValue::Language(val),
+ read::AttributeValue::AddressClass(val) => AttributeValue::AddressClass(val),
+ read::AttributeValue::IdentifierCase(val) => AttributeValue::IdentifierCase(val),
+ read::AttributeValue::CallingConvention(val) => {
+ AttributeValue::CallingConvention(val)
+ }
+ read::AttributeValue::Inline(val) => AttributeValue::Inline(val),
+ read::AttributeValue::Ordering(val) => AttributeValue::Ordering(val),
+ read::AttributeValue::FileIndex(val) => {
+ if val == 0 {
+ // 0 means not specified, even for version 5.
+ AttributeValue::FileIndex(None)
+ } else {
+ match context.line_program_files.get(val as usize) {
+ Some(id) => AttributeValue::FileIndex(Some(*id)),
+ None => return Err(ConvertError::InvalidFileIndex),
+ }
+ }
+ }
+ // Should always be a more specific section reference.
+ read::AttributeValue::SecOffset(_) => {
+ return Err(ConvertError::InvalidAttributeValue);
+ }
+ read::AttributeValue::DwoId(DwoId(val)) => AttributeValue::Udata(val),
+ };
+ Ok(Some(to))
+ }
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "read")]
+mod tests {
+ use super::*;
+ use crate::common::{
+ DebugAddrBase, DebugLocListsBase, DebugRngListsBase, DebugStrOffsetsBase, LineEncoding,
+ };
+ use crate::constants;
+ use crate::read;
+ use crate::write::{
+ DebugLine, DebugLineStr, DebugStr, DwarfUnit, EndianVec, LineString, LineStringTable,
+ Location, LocationList, LocationListTable, Range, RangeList, RangeListOffsets,
+ RangeListTable, StringTable,
+ };
+ use crate::LittleEndian;
+ use std::collections::HashMap;
+ use std::mem;
+
+ #[test]
+ #[allow(clippy::cyclomatic_complexity)]
+ fn test_unit_table() {
+ let mut strings = StringTable::default();
+
+ let mut units = UnitTable::default();
+ let unit_id1 = units.add(Unit::new(
+ Encoding {
+ version: 4,
+ address_size: 8,
+ format: Format::Dwarf32,
+ },
+ LineProgram::none(),
+ ));
+ let unit2 = units.add(Unit::new(
+ Encoding {
+ version: 2,
+ address_size: 4,
+ format: Format::Dwarf64,
+ },
+ LineProgram::none(),
+ ));
+ let unit3 = units.add(Unit::new(
+ Encoding {
+ version: 5,
+ address_size: 4,
+ format: Format::Dwarf32,
+ },
+ LineProgram::none(),
+ ));
+ assert_eq!(units.count(), 3);
+ {
+ let unit1 = units.get_mut(unit_id1);
+ assert_eq!(unit1.version(), 4);
+ assert_eq!(unit1.address_size(), 8);
+ assert_eq!(unit1.format(), Format::Dwarf32);
+ assert_eq!(unit1.count(), 1);
+
+ let root_id = unit1.root();
+ assert_eq!(root_id, UnitEntryId::new(unit1.base_id, 0));
+ {
+ let root = unit1.get_mut(root_id);
+ assert_eq!(root.id(), root_id);
+ assert!(root.parent().is_none());
+ assert_eq!(root.tag(), constants::DW_TAG_compile_unit);
+
+ // Test get/get_mut
+ assert!(root.get(constants::DW_AT_producer).is_none());
+ assert!(root.get_mut(constants::DW_AT_producer).is_none());
+ let mut producer = AttributeValue::String(b"root"[..].into());
+ root.set(constants::DW_AT_producer, producer.clone());
+ assert_eq!(root.get(constants::DW_AT_producer), Some(&producer));
+ assert_eq!(root.get_mut(constants::DW_AT_producer), Some(&mut producer));
+
+ // Test attrs
+ let mut attrs = root.attrs();
+ let attr = attrs.next().unwrap();
+ assert_eq!(attr.name(), constants::DW_AT_producer);
+ assert_eq!(attr.get(), &producer);
+ assert!(attrs.next().is_none());
+ }
+
+ let child1 = unit1.add(root_id, constants::DW_TAG_subprogram);
+ assert_eq!(child1, UnitEntryId::new(unit1.base_id, 1));
+ {
+ let child1 = unit1.get_mut(child1);
+ assert_eq!(child1.parent(), Some(root_id));
+
+ let tmp = AttributeValue::String(b"tmp"[..].into());
+ child1.set(constants::DW_AT_name, tmp.clone());
+ assert_eq!(child1.get(constants::DW_AT_name), Some(&tmp));
+
+ // Test attrs_mut
+ let name = AttributeValue::StringRef(strings.add(&b"child1"[..]));
+ {
+ let attr = child1.attrs_mut().next().unwrap();
+ assert_eq!(attr.name(), constants::DW_AT_name);
+ attr.set(name.clone());
+ }
+ assert_eq!(child1.get(constants::DW_AT_name), Some(&name));
+ }
+
+ let child2 = unit1.add(root_id, constants::DW_TAG_subprogram);
+ assert_eq!(child2, UnitEntryId::new(unit1.base_id, 2));
+ {
+ let child2 = unit1.get_mut(child2);
+ assert_eq!(child2.parent(), Some(root_id));
+
+ let tmp = AttributeValue::String(b"tmp"[..].into());
+ child2.set(constants::DW_AT_name, tmp.clone());
+ assert_eq!(child2.get(constants::DW_AT_name), Some(&tmp));
+
+ // Test replace
+ let name = AttributeValue::StringRef(strings.add(&b"child2"[..]));
+ child2.set(constants::DW_AT_name, name.clone());
+ assert_eq!(child2.get(constants::DW_AT_name), Some(&name));
+ }
+
+ {
+ let root = unit1.get(root_id);
+ assert_eq!(
+ root.children().cloned().collect::<Vec<_>>(),
+ vec![child1, child2]
+ );
+ }
+ }
+ {
+ let unit2 = units.get(unit2);
+ assert_eq!(unit2.version(), 2);
+ assert_eq!(unit2.address_size(), 4);
+ assert_eq!(unit2.format(), Format::Dwarf64);
+ assert_eq!(unit2.count(), 1);
+
+ let root = unit2.root();
+ assert_eq!(root, UnitEntryId::new(unit2.base_id, 0));
+ let root = unit2.get(root);
+ assert_eq!(root.id(), UnitEntryId::new(unit2.base_id, 0));
+ assert!(root.parent().is_none());
+ assert_eq!(root.tag(), constants::DW_TAG_compile_unit);
+ }
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = strings.write(&mut sections.debug_str).unwrap();
+ units
+ .write(&mut sections, &debug_line_str_offsets, &debug_str_offsets)
+ .unwrap();
+
+ println!("{:?}", sections.debug_str);
+ println!("{:?}", sections.debug_info);
+ println!("{:?}", sections.debug_abbrev);
+
+ let dwarf = read::Dwarf {
+ debug_abbrev: read::DebugAbbrev::new(sections.debug_abbrev.slice(), LittleEndian),
+ debug_info: read::DebugInfo::new(sections.debug_info.slice(), LittleEndian),
+ debug_str: read::DebugStr::new(sections.debug_str.slice(), LittleEndian),
+ ..Default::default()
+ };
+ let mut read_units = dwarf.units();
+
+ {
+ let read_unit1 = read_units.next().unwrap().unwrap();
+ let unit1 = units.get(unit_id1);
+ assert_eq!(unit1.version(), read_unit1.version());
+ assert_eq!(unit1.address_size(), read_unit1.address_size());
+ assert_eq!(unit1.format(), read_unit1.format());
+
+ let read_unit1 = dwarf.unit(read_unit1).unwrap();
+ let mut read_entries = read_unit1.entries();
+
+ let root = unit1.get(unit1.root());
+ {
+ let (depth, read_root) = read_entries.next_dfs().unwrap().unwrap();
+ assert_eq!(depth, 0);
+ assert_eq!(root.tag(), read_root.tag());
+ assert!(read_root.has_children());
+
+ let producer = match root.get(constants::DW_AT_producer).unwrap() {
+ AttributeValue::String(ref producer) => &**producer,
+ otherwise => panic!("unexpected {:?}", otherwise),
+ };
+ assert_eq!(producer, b"root");
+ let read_producer = read_root
+ .attr_value(constants::DW_AT_producer)
+ .unwrap()
+ .unwrap();
+ assert_eq!(
+ dwarf
+ .attr_string(&read_unit1, read_producer)
+ .unwrap()
+ .slice(),
+ producer
+ );
+ }
+
+ let mut children = root.children().cloned();
+
+ {
+ let child = children.next().unwrap();
+ assert_eq!(child, UnitEntryId::new(unit1.base_id, 1));
+ let child = unit1.get(child);
+ let (depth, read_child) = read_entries.next_dfs().unwrap().unwrap();
+ assert_eq!(depth, 1);
+ assert_eq!(child.tag(), read_child.tag());
+ assert!(!read_child.has_children());
+
+ let name = match child.get(constants::DW_AT_name).unwrap() {
+ AttributeValue::StringRef(name) => *name,
+ otherwise => panic!("unexpected {:?}", otherwise),
+ };
+ let name = strings.get(name);
+ assert_eq!(name, b"child1");
+ let read_name = read_child
+ .attr_value(constants::DW_AT_name)
+ .unwrap()
+ .unwrap();
+ assert_eq!(
+ dwarf.attr_string(&read_unit1, read_name).unwrap().slice(),
+ name
+ );
+ }
+
+ {
+ let child = children.next().unwrap();
+ assert_eq!(child, UnitEntryId::new(unit1.base_id, 2));
+ let child = unit1.get(child);
+ let (depth, read_child) = read_entries.next_dfs().unwrap().unwrap();
+ assert_eq!(depth, 0);
+ assert_eq!(child.tag(), read_child.tag());
+ assert!(!read_child.has_children());
+
+ let name = match child.get(constants::DW_AT_name).unwrap() {
+ AttributeValue::StringRef(name) => *name,
+ otherwise => panic!("unexpected {:?}", otherwise),
+ };
+ let name = strings.get(name);
+ assert_eq!(name, b"child2");
+ let read_name = read_child
+ .attr_value(constants::DW_AT_name)
+ .unwrap()
+ .unwrap();
+ assert_eq!(
+ dwarf.attr_string(&read_unit1, read_name).unwrap().slice(),
+ name
+ );
+ }
+
+ assert!(read_entries.next_dfs().unwrap().is_none());
+ }
+
+ {
+ let read_unit2 = read_units.next().unwrap().unwrap();
+ let unit2 = units.get(unit2);
+ assert_eq!(unit2.version(), read_unit2.version());
+ assert_eq!(unit2.address_size(), read_unit2.address_size());
+ assert_eq!(unit2.format(), read_unit2.format());
+
+ let abbrevs = dwarf.abbreviations(&read_unit2).unwrap();
+ let mut read_entries = read_unit2.entries(&abbrevs);
+
+ {
+ let root = unit2.get(unit2.root());
+ let (depth, read_root) = read_entries.next_dfs().unwrap().unwrap();
+ assert_eq!(depth, 0);
+ assert_eq!(root.tag(), read_root.tag());
+ assert!(!read_root.has_children());
+ }
+
+ assert!(read_entries.next_dfs().unwrap().is_none());
+ }
+
+ {
+ let read_unit3 = read_units.next().unwrap().unwrap();
+ let unit3 = units.get(unit3);
+ assert_eq!(unit3.version(), read_unit3.version());
+ assert_eq!(unit3.address_size(), read_unit3.address_size());
+ assert_eq!(unit3.format(), read_unit3.format());
+
+ let abbrevs = dwarf.abbreviations(&read_unit3).unwrap();
+ let mut read_entries = read_unit3.entries(&abbrevs);
+
+ {
+ let root = unit3.get(unit3.root());
+ let (depth, read_root) = read_entries.next_dfs().unwrap().unwrap();
+ assert_eq!(depth, 0);
+ assert_eq!(root.tag(), read_root.tag());
+ assert!(!read_root.has_children());
+ }
+
+ assert!(read_entries.next_dfs().unwrap().is_none());
+ }
+
+ assert!(read_units.next().unwrap().is_none());
+
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let convert_units = UnitTable::from(
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ &|address| Some(Address::Constant(address)),
+ )
+ .unwrap();
+ assert_eq!(convert_units.count(), units.count());
+
+ for i in 0..convert_units.count() {
+ let unit_id = units.id(i);
+ let unit = units.get(unit_id);
+ let convert_unit_id = convert_units.id(i);
+ let convert_unit = convert_units.get(convert_unit_id);
+ assert_eq!(convert_unit.version(), unit.version());
+ assert_eq!(convert_unit.address_size(), unit.address_size());
+ assert_eq!(convert_unit.format(), unit.format());
+ assert_eq!(convert_unit.count(), unit.count());
+
+ let root = unit.get(unit.root());
+ let convert_root = convert_unit.get(convert_unit.root());
+ assert_eq!(convert_root.tag(), root.tag());
+ for (convert_attr, attr) in convert_root.attrs().zip(root.attrs()) {
+ assert_eq!(convert_attr, attr);
+ }
+ }
+ }
+
+ #[test]
+ fn test_attribute_value() {
+ // Create a string table and a string with a non-zero id/offset.
+ let mut strings = StringTable::default();
+ strings.add("string one");
+ let string_id = strings.add("string two");
+ let mut debug_str = DebugStr::from(EndianVec::new(LittleEndian));
+ let debug_str_offsets = strings.write(&mut debug_str).unwrap();
+ let read_debug_str = read::DebugStr::new(debug_str.slice(), LittleEndian);
+
+ let mut line_strings = LineStringTable::default();
+ line_strings.add("line string one");
+ let line_string_id = line_strings.add("line string two");
+ let mut debug_line_str = DebugLineStr::from(EndianVec::new(LittleEndian));
+ let debug_line_str_offsets = line_strings.write(&mut debug_line_str).unwrap();
+ let read_debug_line_str =
+ read::DebugLineStr::from(read::EndianSlice::new(debug_line_str.slice(), LittleEndian));
+
+ let data = vec![1, 2, 3, 4];
+ let read_data = read::EndianSlice::new(&[1, 2, 3, 4], LittleEndian);
+
+ let mut expression = Expression::new();
+ expression.op_constu(57);
+ let read_expression = read::Expression(read::EndianSlice::new(
+ &[constants::DW_OP_constu.0, 57],
+ LittleEndian,
+ ));
+
+ let mut ranges = RangeListTable::default();
+ let range_id = ranges.add(RangeList(vec![Range::StartEnd {
+ begin: Address::Constant(0x1234),
+ end: Address::Constant(0x2345),
+ }]));
+
+ let mut locations = LocationListTable::default();
+ let loc_id = locations.add(LocationList(vec![Location::StartEnd {
+ begin: Address::Constant(0x1234),
+ end: Address::Constant(0x2345),
+ data: expression.clone(),
+ }]));
+
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let range_list_offsets = ranges.write(&mut sections, encoding).unwrap();
+ let loc_list_offsets = locations.write(&mut sections, encoding, None).unwrap();
+
+ let read_debug_ranges =
+ read::DebugRanges::new(sections.debug_ranges.slice(), LittleEndian);
+ let read_debug_rnglists =
+ read::DebugRngLists::new(sections.debug_rnglists.slice(), LittleEndian);
+
+ let read_debug_loc =
+ read::DebugLoc::new(sections.debug_loc.slice(), LittleEndian);
+ let read_debug_loclists =
+ read::DebugLocLists::new(sections.debug_loclists.slice(), LittleEndian);
+
+ let mut units = UnitTable::default();
+ let unit = units.add(Unit::new(encoding, LineProgram::none()));
+ let unit = units.get(unit);
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+ let from_unit = read::UnitHeader::new(
+ encoding,
+ 0,
+ read::UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ read::EndianSlice::new(&[], LittleEndian),
+ );
+
+ for &(ref name, ref value, ref expect_value) in &[
+ (
+ constants::DW_AT_name,
+ AttributeValue::Address(Address::Constant(0x1234)),
+ read::AttributeValue::Addr(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Block(data.clone()),
+ read::AttributeValue::Block(read_data),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Data1(0x12),
+ read::AttributeValue::Data1(0x12),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Data2(0x1234),
+ read::AttributeValue::Data2(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Data4(0x1234),
+ read::AttributeValue::Data4(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Data8(0x1234),
+ read::AttributeValue::Data8(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Sdata(0x1234),
+ read::AttributeValue::Sdata(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Udata(0x1234),
+ read::AttributeValue::Udata(0x1234),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Exprloc(expression.clone()),
+ read::AttributeValue::Exprloc(read_expression),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::Flag(false),
+ read::AttributeValue::Flag(false),
+ ),
+ /*
+ (
+ constants::DW_AT_name,
+ AttributeValue::FlagPresent,
+ read::AttributeValue::Flag(true),
+ ),
+ */
+ (
+ constants::DW_AT_name,
+ AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x1234)),
+ read::AttributeValue::DebugInfoRefSup(DebugInfoOffset(0x1234)),
+ ),
+ (
+ constants::DW_AT_location,
+ AttributeValue::LocationListRef(loc_id),
+ read::AttributeValue::SecOffset(loc_list_offsets.get(loc_id).0),
+ ),
+ (
+ constants::DW_AT_macro_info,
+ AttributeValue::DebugMacinfoRef(DebugMacinfoOffset(0x1234)),
+ read::AttributeValue::SecOffset(0x1234),
+ ),
+ (
+ constants::DW_AT_macros,
+ AttributeValue::DebugMacroRef(DebugMacroOffset(0x1234)),
+ read::AttributeValue::SecOffset(0x1234),
+ ),
+ (
+ constants::DW_AT_ranges,
+ AttributeValue::RangeListRef(range_id),
+ read::AttributeValue::SecOffset(range_list_offsets.get(range_id).0),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::DebugTypesRef(DebugTypeSignature(0x1234)),
+ read::AttributeValue::DebugTypesRef(DebugTypeSignature(0x1234)),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::StringRef(string_id),
+ read::AttributeValue::DebugStrRef(debug_str_offsets.get(string_id)),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::DebugStrRefSup(DebugStrOffset(0x1234)),
+ read::AttributeValue::DebugStrRefSup(DebugStrOffset(0x1234)),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::LineStringRef(line_string_id),
+ read::AttributeValue::DebugLineStrRef(
+ debug_line_str_offsets.get(line_string_id),
+ ),
+ ),
+ (
+ constants::DW_AT_name,
+ AttributeValue::String(data.clone()),
+ read::AttributeValue::String(read_data),
+ ),
+ (
+ constants::DW_AT_encoding,
+ AttributeValue::Encoding(constants::DwAte(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_decimal_sign,
+ AttributeValue::DecimalSign(constants::DwDs(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_endianity,
+ AttributeValue::Endianity(constants::DwEnd(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_accessibility,
+ AttributeValue::Accessibility(constants::DwAccess(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_visibility,
+ AttributeValue::Visibility(constants::DwVis(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_virtuality,
+ AttributeValue::Virtuality(constants::DwVirtuality(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_language,
+ AttributeValue::Language(constants::DwLang(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_address_class,
+ AttributeValue::AddressClass(constants::DwAddr(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_identifier_case,
+ AttributeValue::IdentifierCase(constants::DwId(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_calling_convention,
+ AttributeValue::CallingConvention(constants::DwCc(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_ordering,
+ AttributeValue::Ordering(constants::DwOrd(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ (
+ constants::DW_AT_inline,
+ AttributeValue::Inline(constants::DwInl(0x12)),
+ read::AttributeValue::Udata(0x12),
+ ),
+ ][..]
+ {
+ let form = value.form(encoding).unwrap();
+ let attr = Attribute {
+ name: *name,
+ value: value.clone(),
+ };
+
+ let offsets = UnitOffsets::none();
+ let line_program_offset = None;
+ let mut debug_info_refs = Vec::new();
+ let mut unit_refs = Vec::new();
+ let mut debug_info = DebugInfo::from(EndianVec::new(LittleEndian));
+ attr.value
+ .write(
+ &mut debug_info,
+ &mut debug_info_refs,
+ &mut unit_refs,
+ &unit,
+ &offsets,
+ line_program_offset,
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ &range_list_offsets,
+ &loc_list_offsets,
+ )
+ .unwrap();
+
+ let spec = read::AttributeSpecification::new(*name, form, None);
+ let mut r = read::EndianSlice::new(debug_info.slice(), LittleEndian);
+ let read_attr = read::parse_attribute(&mut r, encoding, spec).unwrap();
+ let read_value = &read_attr.raw_value();
+ // read::AttributeValue is invariant in the lifetime of R.
+ // The lifetimes here are all okay, so transmute it.
+ let read_value = unsafe {
+ mem::transmute::<
+ &read::AttributeValue<read::EndianSlice<LittleEndian>>,
+ &read::AttributeValue<read::EndianSlice<LittleEndian>>,
+ >(read_value)
+ };
+ assert_eq!(read_value, expect_value);
+
+ let dwarf = read::Dwarf {
+ debug_str: read_debug_str.clone(),
+ debug_line_str: read_debug_line_str.clone(),
+ ranges: read::RangeLists::new(read_debug_ranges, read_debug_rnglists),
+ locations: read::LocationLists::new(
+ read_debug_loc,
+ read_debug_loclists,
+ ),
+ ..Default::default()
+ };
+
+ let unit = read::Unit {
+ header: from_unit,
+ abbreviations: read::Abbreviations::default(),
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase(0),
+ addr_base: DebugAddrBase(0),
+ loclists_base: DebugLocListsBase(0),
+ rnglists_base: DebugRngListsBase(0),
+ line_program: None,
+ dwo_id: None,
+ };
+
+ let mut context = convert::ConvertUnitContext {
+ dwarf: &dwarf,
+ unit: &unit,
+ line_strings: &mut line_strings,
+ strings: &mut strings,
+ ranges: &mut ranges,
+ locations: &mut locations,
+ convert_address: &|address| Some(Address::Constant(address)),
+ base_address: Address::Constant(0),
+ line_program_offset: None,
+ line_program_files: Vec::new(),
+ entry_ids: &HashMap::new(),
+ };
+
+ let convert_attr =
+ Attribute::from(&mut context, &read_attr).unwrap().unwrap();
+ assert_eq!(convert_attr, attr);
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ #[allow(clippy::cyclomatic_complexity)]
+ fn test_unit_ref() {
+ let mut units = UnitTable::default();
+ let unit_id1 = units.add(Unit::new(
+ Encoding {
+ version: 4,
+ address_size: 8,
+ format: Format::Dwarf32,
+ },
+ LineProgram::none(),
+ ));
+ assert_eq!(unit_id1, units.id(0));
+ let unit_id2 = units.add(Unit::new(
+ Encoding {
+ version: 2,
+ address_size: 4,
+ format: Format::Dwarf64,
+ },
+ LineProgram::none(),
+ ));
+ assert_eq!(unit_id2, units.id(1));
+ let unit1_child1 = UnitEntryId::new(units.get(unit_id1).base_id, 1);
+ let unit1_child2 = UnitEntryId::new(units.get(unit_id1).base_id, 2);
+ let unit2_child1 = UnitEntryId::new(units.get(unit_id2).base_id, 1);
+ let unit2_child2 = UnitEntryId::new(units.get(unit_id2).base_id, 2);
+ {
+ let unit1 = units.get_mut(unit_id1);
+ let root = unit1.root();
+ let child_id1 = unit1.add(root, constants::DW_TAG_subprogram);
+ assert_eq!(child_id1, unit1_child1);
+ let child_id2 = unit1.add(root, constants::DW_TAG_subprogram);
+ assert_eq!(child_id2, unit1_child2);
+ {
+ let child1 = unit1.get_mut(child_id1);
+ child1.set(constants::DW_AT_type, AttributeValue::UnitRef(child_id2));
+ }
+ {
+ let child2 = unit1.get_mut(child_id2);
+ child2.set(
+ constants::DW_AT_type,
+ AttributeValue::DebugInfoRef(Reference::Entry(unit_id2, unit2_child1)),
+ );
+ }
+ }
+ {
+ let unit2 = units.get_mut(unit_id2);
+ let root = unit2.root();
+ let child_id1 = unit2.add(root, constants::DW_TAG_subprogram);
+ assert_eq!(child_id1, unit2_child1);
+ let child_id2 = unit2.add(root, constants::DW_TAG_subprogram);
+ assert_eq!(child_id2, unit2_child2);
+ {
+ let child1 = unit2.get_mut(child_id1);
+ child1.set(constants::DW_AT_type, AttributeValue::UnitRef(child_id2));
+ }
+ {
+ let child2 = unit2.get_mut(child_id2);
+ child2.set(
+ constants::DW_AT_type,
+ AttributeValue::DebugInfoRef(Reference::Entry(unit_id1, unit1_child1)),
+ );
+ }
+ }
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ let debug_info_offsets = units
+ .write(&mut sections, &debug_line_str_offsets, &debug_str_offsets)
+ .unwrap();
+
+ println!("{:?}", sections.debug_info);
+ println!("{:?}", sections.debug_abbrev);
+
+ let dwarf = read::Dwarf {
+ debug_abbrev: read::DebugAbbrev::new(sections.debug_abbrev.slice(), LittleEndian),
+ debug_info: read::DebugInfo::new(sections.debug_info.slice(), LittleEndian),
+ ..Default::default()
+ };
+
+ let mut read_units = dwarf.units();
+ {
+ let read_unit1 = read_units.next().unwrap().unwrap();
+ assert_eq!(
+ read_unit1.offset(),
+ debug_info_offsets.unit(unit_id1).into()
+ );
+
+ let abbrevs = dwarf.abbreviations(&read_unit1).unwrap();
+ let mut read_entries = read_unit1.entries(&abbrevs);
+ {
+ let (_, _read_root) = read_entries.next_dfs().unwrap().unwrap();
+ }
+ {
+ let (_, read_child1) = read_entries.next_dfs().unwrap().unwrap();
+ let offset = debug_info_offsets
+ .entry(unit_id1, unit1_child2)
+ .to_unit_offset(&read_unit1)
+ .unwrap();
+ assert_eq!(
+ read_child1.attr_value(constants::DW_AT_type).unwrap(),
+ Some(read::AttributeValue::UnitRef(offset))
+ );
+ }
+ {
+ let (_, read_child2) = read_entries.next_dfs().unwrap().unwrap();
+ let offset = debug_info_offsets.entry(unit_id2, unit2_child1);
+ assert_eq!(
+ read_child2.attr_value(constants::DW_AT_type).unwrap(),
+ Some(read::AttributeValue::DebugInfoRef(offset))
+ );
+ }
+ }
+ {
+ let read_unit2 = read_units.next().unwrap().unwrap();
+ assert_eq!(
+ read_unit2.offset(),
+ debug_info_offsets.unit(unit_id2).into()
+ );
+
+ let abbrevs = dwarf.abbreviations(&read_unit2).unwrap();
+ let mut read_entries = read_unit2.entries(&abbrevs);
+ {
+ let (_, _read_root) = read_entries.next_dfs().unwrap().unwrap();
+ }
+ {
+ let (_, read_child1) = read_entries.next_dfs().unwrap().unwrap();
+ let offset = debug_info_offsets
+ .entry(unit_id2, unit2_child2)
+ .to_unit_offset(&read_unit2)
+ .unwrap();
+ assert_eq!(
+ read_child1.attr_value(constants::DW_AT_type).unwrap(),
+ Some(read::AttributeValue::UnitRef(offset))
+ );
+ }
+ {
+ let (_, read_child2) = read_entries.next_dfs().unwrap().unwrap();
+ let offset = debug_info_offsets.entry(unit_id1, unit1_child1);
+ assert_eq!(
+ read_child2.attr_value(constants::DW_AT_type).unwrap(),
+ Some(read::AttributeValue::DebugInfoRef(offset))
+ );
+ }
+ }
+
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let convert_units = UnitTable::from(
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ &|address| Some(Address::Constant(address)),
+ )
+ .unwrap();
+ assert_eq!(convert_units.count(), units.count());
+
+ for i in 0..convert_units.count() {
+ let unit = units.get(units.id(i));
+ let convert_unit = convert_units.get(convert_units.id(i));
+ assert_eq!(convert_unit.version(), unit.version());
+ assert_eq!(convert_unit.address_size(), unit.address_size());
+ assert_eq!(convert_unit.format(), unit.format());
+ assert_eq!(convert_unit.count(), unit.count());
+
+ let root = unit.get(unit.root());
+ let convert_root = convert_unit.get(convert_unit.root());
+ assert_eq!(convert_root.tag(), root.tag());
+ for (convert_attr, attr) in convert_root.attrs().zip(root.attrs()) {
+ assert_eq!(convert_attr, attr);
+ }
+
+ let child1 = unit.get(UnitEntryId::new(unit.base_id, 1));
+ let convert_child1 = convert_unit.get(UnitEntryId::new(convert_unit.base_id, 1));
+ assert_eq!(convert_child1.tag(), child1.tag());
+ for (convert_attr, attr) in convert_child1.attrs().zip(child1.attrs()) {
+ assert_eq!(convert_attr.name, attr.name);
+ match (convert_attr.value.clone(), attr.value.clone()) {
+ (
+ AttributeValue::DebugInfoRef(Reference::Entry(convert_unit, convert_entry)),
+ AttributeValue::DebugInfoRef(Reference::Entry(unit, entry)),
+ ) => {
+ assert_eq!(convert_unit.index, unit.index);
+ assert_eq!(convert_entry.index, entry.index);
+ }
+ (AttributeValue::UnitRef(convert_id), AttributeValue::UnitRef(id)) => {
+ assert_eq!(convert_id.index, id.index);
+ }
+ (convert_value, value) => assert_eq!(convert_value, value),
+ }
+ }
+
+ let child2 = unit.get(UnitEntryId::new(unit.base_id, 2));
+ let convert_child2 = convert_unit.get(UnitEntryId::new(convert_unit.base_id, 2));
+ assert_eq!(convert_child2.tag(), child2.tag());
+ for (convert_attr, attr) in convert_child2.attrs().zip(child2.attrs()) {
+ assert_eq!(convert_attr.name, attr.name);
+ match (convert_attr.value.clone(), attr.value.clone()) {
+ (
+ AttributeValue::DebugInfoRef(Reference::Entry(convert_unit, convert_entry)),
+ AttributeValue::DebugInfoRef(Reference::Entry(unit, entry)),
+ ) => {
+ assert_eq!(convert_unit.index, unit.index);
+ assert_eq!(convert_entry.index, entry.index);
+ }
+ (AttributeValue::UnitRef(convert_id), AttributeValue::UnitRef(id)) => {
+ assert_eq!(convert_id.index, id.index);
+ }
+ (convert_value, value) => assert_eq!(convert_value, value),
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_sibling() {
+ fn add_child(
+ unit: &mut Unit,
+ parent: UnitEntryId,
+ tag: constants::DwTag,
+ name: &str,
+ ) -> UnitEntryId {
+ let id = unit.add(parent, tag);
+ let child = unit.get_mut(id);
+ child.set(constants::DW_AT_name, AttributeValue::String(name.into()));
+ child.set_sibling(true);
+ id
+ }
+
+ fn add_children(units: &mut UnitTable, unit_id: UnitId) {
+ let unit = units.get_mut(unit_id);
+ let root = unit.root();
+ let child1 = add_child(unit, root, constants::DW_TAG_subprogram, "child1");
+ add_child(unit, child1, constants::DW_TAG_variable, "grandchild1");
+ add_child(unit, root, constants::DW_TAG_subprogram, "child2");
+ add_child(unit, root, constants::DW_TAG_subprogram, "child3");
+ }
+
+ fn next_child<R: read::Reader<Offset = usize>>(
+ entries: &mut read::EntriesCursor<R>,
+ ) -> (read::UnitOffset, Option<read::UnitOffset>) {
+ let (_, entry) = entries.next_dfs().unwrap().unwrap();
+ let offset = entry.offset();
+ let sibling =
+ entry
+ .attr_value(constants::DW_AT_sibling)
+ .unwrap()
+ .map(|attr| match attr {
+ read::AttributeValue::UnitRef(offset) => offset,
+ _ => panic!("bad sibling value"),
+ });
+ (offset, sibling)
+ }
+
+ fn check_sibling<R: read::Reader<Offset = usize>>(
+ unit: &read::UnitHeader<R>,
+ debug_abbrev: &read::DebugAbbrev<R>,
+ ) {
+ let abbrevs = unit.abbreviations(debug_abbrev).unwrap();
+ let mut entries = unit.entries(&abbrevs);
+ // root
+ entries.next_dfs().unwrap().unwrap();
+ // child1
+ let (_, sibling1) = next_child(&mut entries);
+ // grandchild1
+ entries.next_dfs().unwrap().unwrap();
+ // child2
+ let (offset2, sibling2) = next_child(&mut entries);
+ // child3
+ let (_, _) = next_child(&mut entries);
+ assert_eq!(sibling1, Some(offset2));
+ assert_eq!(sibling2, None);
+ }
+
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 8,
+ };
+ let mut units = UnitTable::default();
+ let unit_id1 = units.add(Unit::new(encoding, LineProgram::none()));
+ add_children(&mut units, unit_id1);
+ let unit_id2 = units.add(Unit::new(encoding, LineProgram::none()));
+ add_children(&mut units, unit_id2);
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ units
+ .write(&mut sections, &debug_line_str_offsets, &debug_str_offsets)
+ .unwrap();
+
+ println!("{:?}", sections.debug_info);
+ println!("{:?}", sections.debug_abbrev);
+
+ let read_debug_info = read::DebugInfo::new(sections.debug_info.slice(), LittleEndian);
+ let read_debug_abbrev = read::DebugAbbrev::new(sections.debug_abbrev.slice(), LittleEndian);
+ let mut read_units = read_debug_info.units();
+ check_sibling(&read_units.next().unwrap().unwrap(), &read_debug_abbrev);
+ check_sibling(&read_units.next().unwrap().unwrap(), &read_debug_abbrev);
+ }
+
+ #[test]
+ fn test_line_ref() {
+ for &version in &[2, 3, 4, 5] {
+ for &address_size in &[4, 8] {
+ for &format in &[Format::Dwarf32, Format::Dwarf64] {
+ let encoding = Encoding {
+ format,
+ version,
+ address_size,
+ };
+
+ // The line program we'll be referencing.
+ let mut line_program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ LineString::String(b"comp_dir".to_vec()),
+ LineString::String(b"comp_name".to_vec()),
+ None,
+ );
+ let dir = line_program.default_directory();
+ let file1 =
+ line_program.add_file(LineString::String(b"file1".to_vec()), dir, None);
+ let file2 =
+ line_program.add_file(LineString::String(b"file2".to_vec()), dir, None);
+
+ // Write, read, and convert the line program, so that we have the info
+ // required to convert the attributes.
+ let line_strings = DebugLineStrOffsets::none();
+ let strings = DebugStrOffsets::none();
+ let mut debug_line = DebugLine::from(EndianVec::new(LittleEndian));
+ let line_program_offset = line_program
+ .write(&mut debug_line, encoding, &line_strings, &strings)
+ .unwrap();
+ let read_debug_line = read::DebugLine::new(debug_line.slice(), LittleEndian);
+ let read_line_program = read_debug_line
+ .program(
+ line_program_offset,
+ address_size,
+ Some(read::EndianSlice::new(b"comp_dir", LittleEndian)),
+ Some(read::EndianSlice::new(b"comp_name", LittleEndian)),
+ )
+ .unwrap();
+ let dwarf = read::Dwarf::default();
+ let mut convert_line_strings = LineStringTable::default();
+ let mut convert_strings = StringTable::default();
+ let (_, line_program_files) = LineProgram::from(
+ read_line_program,
+ &dwarf,
+ &mut convert_line_strings,
+ &mut convert_strings,
+ &|address| Some(Address::Constant(address)),
+ )
+ .unwrap();
+
+ // Fake the unit.
+ let mut units = UnitTable::default();
+ let unit = units.add(Unit::new(encoding, LineProgram::none()));
+ let unit = units.get(unit);
+ let from_unit = read::UnitHeader::new(
+ encoding,
+ 0,
+ read::UnitType::Compilation,
+ DebugAbbrevOffset(0),
+ DebugInfoOffset(0).into(),
+ read::EndianSlice::new(&[], LittleEndian),
+ );
+
+ for &(ref name, ref value, ref expect_value) in &[
+ (
+ constants::DW_AT_stmt_list,
+ AttributeValue::LineProgramRef,
+ read::AttributeValue::SecOffset(line_program_offset.0),
+ ),
+ (
+ constants::DW_AT_decl_file,
+ AttributeValue::FileIndex(Some(file1)),
+ read::AttributeValue::Udata(file1.raw()),
+ ),
+ (
+ constants::DW_AT_decl_file,
+ AttributeValue::FileIndex(Some(file2)),
+ read::AttributeValue::Udata(file2.raw()),
+ ),
+ ][..]
+ {
+ let mut ranges = RangeListTable::default();
+ let mut locations = LocationListTable::default();
+ let mut strings = StringTable::default();
+ let mut line_strings = LineStringTable::default();
+
+ let form = value.form(encoding).unwrap();
+ let attr = Attribute {
+ name: *name,
+ value: value.clone(),
+ };
+
+ let mut debug_info_refs = Vec::new();
+ let mut unit_refs = Vec::new();
+ let mut debug_info = DebugInfo::from(EndianVec::new(LittleEndian));
+ let offsets = UnitOffsets::none();
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let range_list_offsets = RangeListOffsets::none();
+ let loc_list_offsets = LocationListOffsets::none();
+ attr.value
+ .write(
+ &mut debug_info,
+ &mut debug_info_refs,
+ &mut unit_refs,
+ &unit,
+ &offsets,
+ Some(line_program_offset),
+ &debug_line_str_offsets,
+ &debug_str_offsets,
+ &range_list_offsets,
+ &loc_list_offsets,
+ )
+ .unwrap();
+
+ let spec = read::AttributeSpecification::new(*name, form, None);
+ let mut r = read::EndianSlice::new(debug_info.slice(), LittleEndian);
+ let read_attr = read::parse_attribute(&mut r, encoding, spec).unwrap();
+ let read_value = &read_attr.raw_value();
+ // read::AttributeValue is invariant in the lifetime of R.
+ // The lifetimes here are all okay, so transmute it.
+ let read_value = unsafe {
+ mem::transmute::<
+ &read::AttributeValue<read::EndianSlice<LittleEndian>>,
+ &read::AttributeValue<read::EndianSlice<LittleEndian>>,
+ >(read_value)
+ };
+ assert_eq!(read_value, expect_value);
+
+ let unit = read::Unit {
+ header: from_unit,
+ abbreviations: read::Abbreviations::default(),
+ name: None,
+ comp_dir: None,
+ low_pc: 0,
+ str_offsets_base: DebugStrOffsetsBase(0),
+ addr_base: DebugAddrBase(0),
+ loclists_base: DebugLocListsBase(0),
+ rnglists_base: DebugRngListsBase(0),
+ line_program: None,
+ dwo_id: None,
+ };
+
+ let mut context = convert::ConvertUnitContext {
+ dwarf: &dwarf,
+ unit: &unit,
+ line_strings: &mut line_strings,
+ strings: &mut strings,
+ ranges: &mut ranges,
+ locations: &mut locations,
+ convert_address: &|address| Some(Address::Constant(address)),
+ base_address: Address::Constant(0),
+ line_program_offset: Some(line_program_offset),
+ line_program_files: line_program_files.clone(),
+ entry_ids: &HashMap::new(),
+ };
+
+ let convert_attr =
+ Attribute::from(&mut context, &read_attr).unwrap().unwrap();
+ assert_eq!(convert_attr, attr);
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_line_program_used() {
+ for used in vec![false, true] {
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 5,
+ address_size: 8,
+ };
+
+ let line_program = LineProgram::new(
+ encoding,
+ LineEncoding::default(),
+ LineString::String(b"comp_dir".to_vec()),
+ LineString::String(b"comp_name".to_vec()),
+ None,
+ );
+
+ let mut unit = Unit::new(encoding, line_program);
+ let file_id = if used { Some(FileId::new(0)) } else { None };
+ let root = unit.root();
+ unit.get_mut(root).set(
+ constants::DW_AT_decl_file,
+ AttributeValue::FileIndex(file_id),
+ );
+
+ let mut units = UnitTable::default();
+ units.add(unit);
+
+ let debug_line_str_offsets = DebugLineStrOffsets::none();
+ let debug_str_offsets = DebugStrOffsets::none();
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+ units
+ .write(&mut sections, &debug_line_str_offsets, &debug_str_offsets)
+ .unwrap();
+ assert_eq!(!used, sections.debug_line.slice().is_empty());
+ }
+ }
+
+ #[test]
+ fn test_delete_child() {
+ fn set_name(unit: &mut Unit, id: UnitEntryId, name: &str) {
+ let entry = unit.get_mut(id);
+ entry.set(constants::DW_AT_name, AttributeValue::String(name.into()));
+ }
+ fn check_name<R: read::Reader>(
+ entry: &read::DebuggingInformationEntry<R>,
+ debug_str: &read::DebugStr<R>,
+ name: &str,
+ ) {
+ let name_attr = entry.attr(constants::DW_AT_name).unwrap().unwrap();
+ let entry_name = name_attr.string_value(debug_str).unwrap();
+ let entry_name_str = entry_name.to_string().unwrap();
+ assert_eq!(entry_name_str, name);
+ }
+ let encoding = Encoding {
+ format: Format::Dwarf32,
+ version: 4,
+ address_size: 8,
+ };
+ let mut dwarf = DwarfUnit::new(encoding);
+ let root = dwarf.unit.root();
+
+ // Add and delete entries in the root unit
+ let child1 = dwarf.unit.add(root, constants::DW_TAG_subprogram);
+ set_name(&mut dwarf.unit, child1, "child1");
+ let grandchild1 = dwarf.unit.add(child1, constants::DW_TAG_variable);
+ set_name(&mut dwarf.unit, grandchild1, "grandchild1");
+ let child2 = dwarf.unit.add(root, constants::DW_TAG_subprogram);
+ set_name(&mut dwarf.unit, child2, "child2");
+ // This deletes both `child1` and its child `grandchild1`
+ dwarf.unit.get_mut(root).delete_child(child1);
+ let child3 = dwarf.unit.add(root, constants::DW_TAG_subprogram);
+ set_name(&mut dwarf.unit, child3, "child3");
+ let child4 = dwarf.unit.add(root, constants::DW_TAG_subprogram);
+ set_name(&mut dwarf.unit, child4, "child4");
+ let grandchild4 = dwarf.unit.add(child4, constants::DW_TAG_variable);
+ set_name(&mut dwarf.unit, grandchild4, "grandchild4");
+ dwarf.unit.get_mut(child4).delete_child(grandchild4);
+
+ let mut sections = Sections::new(EndianVec::new(LittleEndian));
+
+ // Write DWARF data which should only include `child2`, `child3` and `child4`
+ dwarf.write(&mut sections).unwrap();
+
+ let read_debug_info = read::DebugInfo::new(sections.debug_info.slice(), LittleEndian);
+ let read_debug_abbrev = read::DebugAbbrev::new(sections.debug_abbrev.slice(), LittleEndian);
+ let read_debug_str = read::DebugStr::new(sections.debug_str.slice(), LittleEndian);
+ let read_unit = read_debug_info.units().next().unwrap().unwrap();
+ let abbrevs = read_unit.abbreviations(&read_debug_abbrev).unwrap();
+ let mut entries = read_unit.entries(&abbrevs);
+ // root
+ entries.next_dfs().unwrap().unwrap();
+ // child2
+ let (_, read_child2) = entries.next_dfs().unwrap().unwrap();
+ check_name(read_child2, &read_debug_str, "child2");
+ // child3
+ let (_, read_child3) = entries.next_dfs().unwrap().unwrap();
+ check_name(read_child3, &read_debug_str, "child3");
+ // child4
+ let (_, read_child4) = entries.next_dfs().unwrap().unwrap();
+ check_name(read_child4, &read_debug_str, "child4");
+ // There should be no more entries
+ assert!(entries.next_dfs().unwrap().is_none());
+ }
+}
diff --git a/vendor/gimli-0.26.2/src/write/writer.rs b/vendor/gimli-0.26.2/src/write/writer.rs
new file mode 100644
index 000000000..0785d1686
--- /dev/null
+++ b/vendor/gimli-0.26.2/src/write/writer.rs
@@ -0,0 +1,497 @@
+use crate::common::{Format, SectionId};
+use crate::constants;
+use crate::endianity::Endianity;
+use crate::leb128;
+use crate::write::{Address, Error, Result};
+
+/// A trait for writing the data to a DWARF section.
+///
+/// All write operations append to the section unless otherwise specified.
+#[allow(clippy::len_without_is_empty)]
+pub trait Writer {
+ /// The endianity of bytes that are written.
+ type Endian: Endianity;
+
+ /// Return the endianity of bytes that are written.
+ fn endian(&self) -> Self::Endian;
+
+ /// Return the current section length.
+ ///
+ /// This may be used as an offset for future `write_at` calls.
+ fn len(&self) -> usize;
+
+ /// Write a slice.
+ fn write(&mut self, bytes: &[u8]) -> Result<()>;
+
+ /// Write a slice at a given offset.
+ ///
+ /// The write must not extend past the current section length.
+ fn write_at(&mut self, offset: usize, bytes: &[u8]) -> Result<()>;
+
+ /// Write an address.
+ ///
+ /// If the writer supports relocations, then it must provide its own implementation
+ /// of this method.
+ // TODO: use write_reference instead?
+ fn write_address(&mut self, address: Address, size: u8) -> Result<()> {
+ match address {
+ Address::Constant(val) => self.write_udata(val, size),
+ Address::Symbol { .. } => Err(Error::InvalidAddress),
+ }
+ }
+
+ /// Write an address with a `.eh_frame` pointer encoding.
+ ///
+ /// The given size is only used for `DW_EH_PE_absptr` formats.
+ ///
+ /// If the writer supports relocations, then it must provide its own implementation
+ /// of this method.
+ fn write_eh_pointer(
+ &mut self,
+ address: Address,
+ eh_pe: constants::DwEhPe,
+ size: u8,
+ ) -> Result<()> {
+ match address {
+ Address::Constant(val) => {
+ // Indirect doesn't matter here.
+ let val = match eh_pe.application() {
+ constants::DW_EH_PE_absptr => val,
+ constants::DW_EH_PE_pcrel => {
+ // TODO: better handling of sign
+ let offset = self.len() as u64;
+ val.wrapping_sub(offset)
+ }
+ _ => {
+ return Err(Error::UnsupportedPointerEncoding(eh_pe));
+ }
+ };
+ self.write_eh_pointer_data(val, eh_pe.format(), size)
+ }
+ Address::Symbol { .. } => Err(Error::InvalidAddress),
+ }
+ }
+
+ /// Write a value with a `.eh_frame` pointer format.
+ ///
+ /// The given size is only used for `DW_EH_PE_absptr` formats.
+ ///
+ /// This must not be used directly for values that may require relocation.
+ fn write_eh_pointer_data(
+ &mut self,
+ val: u64,
+ format: constants::DwEhPe,
+ size: u8,
+ ) -> Result<()> {
+ match format {
+ constants::DW_EH_PE_absptr => self.write_udata(val, size),
+ constants::DW_EH_PE_uleb128 => self.write_uleb128(val),
+ constants::DW_EH_PE_udata2 => self.write_udata(val, 2),
+ constants::DW_EH_PE_udata4 => self.write_udata(val, 4),
+ constants::DW_EH_PE_udata8 => self.write_udata(val, 8),
+ constants::DW_EH_PE_sleb128 => self.write_sleb128(val as i64),
+ constants::DW_EH_PE_sdata2 => self.write_sdata(val as i64, 2),
+ constants::DW_EH_PE_sdata4 => self.write_sdata(val as i64, 4),
+ constants::DW_EH_PE_sdata8 => self.write_sdata(val as i64, 8),
+ _ => {
+ return Err(Error::UnsupportedPointerEncoding(format));
+ }
+ }
+ }
+
+ /// Write an offset that is relative to the start of the given section.
+ ///
+ /// If the writer supports relocations, then it must provide its own implementation
+ /// of this method.
+ fn write_offset(&mut self, val: usize, _section: SectionId, size: u8) -> Result<()> {
+ self.write_udata(val as u64, size)
+ }
+
+ /// Write an offset that is relative to the start of the given section.
+ ///
+ /// If the writer supports relocations, then it must provide its own implementation
+ /// of this method.
+ fn write_offset_at(
+ &mut self,
+ offset: usize,
+ val: usize,
+ _section: SectionId,
+ size: u8,
+ ) -> Result<()> {
+ self.write_udata_at(offset, val as u64, size)
+ }
+
+ /// Write a reference to a symbol.
+ ///
+ /// If the writer supports symbols, then it must provide its own implementation
+ /// of this method.
+ fn write_reference(&mut self, _symbol: usize, _size: u8) -> Result<()> {
+ Err(Error::InvalidReference)
+ }
+
+ /// Write a u8.
+ fn write_u8(&mut self, val: u8) -> Result<()> {
+ let bytes = [val];
+ self.write(&bytes)
+ }
+
+ /// Write a u16.
+ fn write_u16(&mut self, val: u16) -> Result<()> {
+ let mut bytes = [0; 2];
+ self.endian().write_u16(&mut bytes, val);
+ self.write(&bytes)
+ }
+
+ /// Write a u32.
+ fn write_u32(&mut self, val: u32) -> Result<()> {
+ let mut bytes = [0; 4];
+ self.endian().write_u32(&mut bytes, val);
+ self.write(&bytes)
+ }
+
+ /// Write a u64.
+ fn write_u64(&mut self, val: u64) -> Result<()> {
+ let mut bytes = [0; 8];
+ self.endian().write_u64(&mut bytes, val);
+ self.write(&bytes)
+ }
+
+ /// Write a u8 at the given offset.
+ fn write_u8_at(&mut self, offset: usize, val: u8) -> Result<()> {
+ let bytes = [val];
+ self.write_at(offset, &bytes)
+ }
+
+ /// Write a u16 at the given offset.
+ fn write_u16_at(&mut self, offset: usize, val: u16) -> Result<()> {
+ let mut bytes = [0; 2];
+ self.endian().write_u16(&mut bytes, val);
+ self.write_at(offset, &bytes)
+ }
+
+ /// Write a u32 at the given offset.
+ fn write_u32_at(&mut self, offset: usize, val: u32) -> Result<()> {
+ let mut bytes = [0; 4];
+ self.endian().write_u32(&mut bytes, val);
+ self.write_at(offset, &bytes)
+ }
+
+ /// Write a u64 at the given offset.
+ fn write_u64_at(&mut self, offset: usize, val: u64) -> Result<()> {
+ let mut bytes = [0; 8];
+ self.endian().write_u64(&mut bytes, val);
+ self.write_at(offset, &bytes)
+ }
+
+ /// Write unsigned data of the given size.
+ ///
+ /// Returns an error if the value is too large for the size.
+ /// This must not be used directly for values that may require relocation.
+ fn write_udata(&mut self, val: u64, size: u8) -> Result<()> {
+ match size {
+ 1 => {
+ let write_val = val as u8;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u8(write_val)
+ }
+ 2 => {
+ let write_val = val as u16;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u16(write_val)
+ }
+ 4 => {
+ let write_val = val as u32;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u32(write_val)
+ }
+ 8 => self.write_u64(val),
+ otherwise => Err(Error::UnsupportedWordSize(otherwise)),
+ }
+ }
+
+ /// Write signed data of the given size.
+ ///
+ /// Returns an error if the value is too large for the size.
+ /// This must not be used directly for values that may require relocation.
+ fn write_sdata(&mut self, val: i64, size: u8) -> Result<()> {
+ match size {
+ 1 => {
+ let write_val = val as i8;
+ if val != i64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u8(write_val as u8)
+ }
+ 2 => {
+ let write_val = val as i16;
+ if val != i64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u16(write_val as u16)
+ }
+ 4 => {
+ let write_val = val as i32;
+ if val != i64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u32(write_val as u32)
+ }
+ 8 => self.write_u64(val as u64),
+ otherwise => Err(Error::UnsupportedWordSize(otherwise)),
+ }
+ }
+
+ /// Write a word of the given size at the given offset.
+ ///
+ /// Returns an error if the value is too large for the size.
+ /// This must not be used directly for values that may require relocation.
+ fn write_udata_at(&mut self, offset: usize, val: u64, size: u8) -> Result<()> {
+ match size {
+ 1 => {
+ let write_val = val as u8;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u8_at(offset, write_val)
+ }
+ 2 => {
+ let write_val = val as u16;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u16_at(offset, write_val)
+ }
+ 4 => {
+ let write_val = val as u32;
+ if val != u64::from(write_val) {
+ return Err(Error::ValueTooLarge);
+ }
+ self.write_u32_at(offset, write_val)
+ }
+ 8 => self.write_u64_at(offset, val),
+ otherwise => Err(Error::UnsupportedWordSize(otherwise)),
+ }
+ }
+
+ /// Write an unsigned LEB128 encoded integer.
+ fn write_uleb128(&mut self, val: u64) -> Result<()> {
+ let mut bytes = [0u8; 10];
+ // bytes is long enough so this will never fail.
+ let len = leb128::write::unsigned(&mut { &mut bytes[..] }, val).unwrap();
+ self.write(&bytes[..len])
+ }
+
+ /// Read an unsigned LEB128 encoded integer.
+ fn write_sleb128(&mut self, val: i64) -> Result<()> {
+ let mut bytes = [0u8; 10];
+ // bytes is long enough so this will never fail.
+ let len = leb128::write::signed(&mut { &mut bytes[..] }, val).unwrap();
+ self.write(&bytes[..len])
+ }
+
+ /// Write an initial length according to the given DWARF format.
+ ///
+ /// This will only write a length of zero, since the length isn't
+ /// known yet, and a subsequent call to `write_initial_length_at`
+ /// will write the actual length.
+ fn write_initial_length(&mut self, format: Format) -> Result<InitialLengthOffset> {
+ if format == Format::Dwarf64 {
+ self.write_u32(0xffff_ffff)?;
+ }
+ let offset = InitialLengthOffset(self.len());
+ self.write_udata(0, format.word_size())?;
+ Ok(offset)
+ }
+
+ /// Write an initial length at the given offset according to the given DWARF format.
+ ///
+ /// `write_initial_length` must have previously returned the offset.
+ fn write_initial_length_at(
+ &mut self,
+ offset: InitialLengthOffset,
+ length: u64,
+ format: Format,
+ ) -> Result<()> {
+ self.write_udata_at(offset.0, length, format.word_size())
+ }
+}
+
+/// The offset at which an initial length should be written.
+#[derive(Debug, Clone, Copy)]
+pub struct InitialLengthOffset(usize);
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::write;
+ use crate::{BigEndian, LittleEndian};
+ use std::{i64, u64};
+
+ #[test]
+ #[allow(clippy::cyclomatic_complexity)]
+ fn test_writer() {
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_address(Address::Constant(0x1122_3344), 4).unwrap();
+ assert_eq!(w.slice(), &[0x44, 0x33, 0x22, 0x11]);
+ assert_eq!(
+ w.write_address(
+ Address::Symbol {
+ symbol: 0,
+ addend: 0
+ },
+ 4
+ ),
+ Err(Error::InvalidAddress)
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_offset(0x1122_3344, SectionId::DebugInfo, 4)
+ .unwrap();
+ assert_eq!(w.slice(), &[0x44, 0x33, 0x22, 0x11]);
+ w.write_offset_at(1, 0x5566, SectionId::DebugInfo, 2)
+ .unwrap();
+ assert_eq!(w.slice(), &[0x44, 0x66, 0x55, 0x11]);
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_u8(0x11).unwrap();
+ w.write_u16(0x2233).unwrap();
+ w.write_u32(0x4455_6677).unwrap();
+ w.write_u64(0x8081_8283_8485_8687).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x11,
+ 0x33, 0x22,
+ 0x77, 0x66, 0x55, 0x44,
+ 0x87, 0x86, 0x85, 0x84, 0x83, 0x82, 0x81, 0x80,
+ ]);
+ w.write_u8_at(14, 0x11).unwrap();
+ w.write_u16_at(12, 0x2233).unwrap();
+ w.write_u32_at(8, 0x4455_6677).unwrap();
+ w.write_u64_at(0, 0x8081_8283_8485_8687).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x87, 0x86, 0x85, 0x84, 0x83, 0x82, 0x81, 0x80,
+ 0x77, 0x66, 0x55, 0x44,
+ 0x33, 0x22,
+ 0x11,
+ ]);
+
+ let mut w = write::EndianVec::new(BigEndian);
+ w.write_u8(0x11).unwrap();
+ w.write_u16(0x2233).unwrap();
+ w.write_u32(0x4455_6677).unwrap();
+ w.write_u64(0x8081_8283_8485_8687).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x11,
+ 0x22, 0x33,
+ 0x44, 0x55, 0x66, 0x77,
+ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ ]);
+ w.write_u8_at(14, 0x11).unwrap();
+ w.write_u16_at(12, 0x2233).unwrap();
+ w.write_u32_at(8, 0x4455_6677).unwrap();
+ w.write_u64_at(0, 0x8081_8283_8485_8687).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x44, 0x55, 0x66, 0x77,
+ 0x22, 0x33,
+ 0x11,
+ ]);
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_udata(0x11, 1).unwrap();
+ w.write_udata(0x2233, 2).unwrap();
+ w.write_udata(0x4455_6677, 4).unwrap();
+ w.write_udata(0x8081_8283_8485_8687, 8).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x11,
+ 0x33, 0x22,
+ 0x77, 0x66, 0x55, 0x44,
+ 0x87, 0x86, 0x85, 0x84, 0x83, 0x82, 0x81, 0x80,
+ ]);
+ assert_eq!(w.write_udata(0x100, 1), Err(Error::ValueTooLarge));
+ assert_eq!(w.write_udata(0x1_0000, 2), Err(Error::ValueTooLarge));
+ assert_eq!(w.write_udata(0x1_0000_0000, 4), Err(Error::ValueTooLarge));
+ assert_eq!(w.write_udata(0x00, 3), Err(Error::UnsupportedWordSize(3)));
+ w.write_udata_at(14, 0x11, 1).unwrap();
+ w.write_udata_at(12, 0x2233, 2).unwrap();
+ w.write_udata_at(8, 0x4455_6677, 4).unwrap();
+ w.write_udata_at(0, 0x8081_8283_8485_8687, 8).unwrap();
+ #[rustfmt::skip]
+ assert_eq!(w.slice(), &[
+ 0x87, 0x86, 0x85, 0x84, 0x83, 0x82, 0x81, 0x80,
+ 0x77, 0x66, 0x55, 0x44,
+ 0x33, 0x22,
+ 0x11,
+ ]);
+ assert_eq!(w.write_udata_at(0, 0x100, 1), Err(Error::ValueTooLarge));
+ assert_eq!(w.write_udata_at(0, 0x1_0000, 2), Err(Error::ValueTooLarge));
+ assert_eq!(
+ w.write_udata_at(0, 0x1_0000_0000, 4),
+ Err(Error::ValueTooLarge)
+ );
+ assert_eq!(
+ w.write_udata_at(0, 0x00, 3),
+ Err(Error::UnsupportedWordSize(3))
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_uleb128(0).unwrap();
+ assert_eq!(w.slice(), &[0]);
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_uleb128(u64::MAX).unwrap();
+ assert_eq!(
+ w.slice(),
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 1]
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_sleb128(0).unwrap();
+ assert_eq!(w.slice(), &[0]);
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_sleb128(i64::MAX).unwrap();
+ assert_eq!(
+ w.slice(),
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0]
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ w.write_sleb128(i64::MIN).unwrap();
+ assert_eq!(
+ w.slice(),
+ &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f]
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ let offset = w.write_initial_length(Format::Dwarf32).unwrap();
+ assert_eq!(w.slice(), &[0, 0, 0, 0]);
+ w.write_initial_length_at(offset, 0x1122_3344, Format::Dwarf32)
+ .unwrap();
+ assert_eq!(w.slice(), &[0x44, 0x33, 0x22, 0x11]);
+ assert_eq!(
+ w.write_initial_length_at(offset, 0x1_0000_0000, Format::Dwarf32),
+ Err(Error::ValueTooLarge)
+ );
+
+ let mut w = write::EndianVec::new(LittleEndian);
+ let offset = w.write_initial_length(Format::Dwarf64).unwrap();
+ assert_eq!(w.slice(), &[0xff, 0xff, 0xff, 0xff, 0, 0, 0, 0, 0, 0, 0, 0]);
+ w.write_initial_length_at(offset, 0x1122_3344_5566_7788, Format::Dwarf64)
+ .unwrap();
+ assert_eq!(
+ w.slice(),
+ &[0xff, 0xff, 0xff, 0xff, 0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11]
+ );
+ }
+}
diff --git a/vendor/gimli-0.26.2/tests/convert_self.rs b/vendor/gimli-0.26.2/tests/convert_self.rs
new file mode 100644
index 000000000..7c069ebd6
--- /dev/null
+++ b/vendor/gimli-0.26.2/tests/convert_self.rs
@@ -0,0 +1,158 @@
+#![cfg(all(feature = "read", feature = "write"))]
+
+use std::env;
+use std::fs::File;
+use std::io::Read;
+use std::path::PathBuf;
+
+use gimli::read;
+use gimli::write::{self, Address, EndianVec};
+use gimli::LittleEndian;
+
+fn read_section(section: &str) -> Vec<u8> {
+ let mut path = PathBuf::new();
+ if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") {
+ path.push(dir);
+ }
+ path.push("fixtures/self");
+ path.push(section);
+
+ println!("Reading section \"{}\" at path {:?}", section, path);
+ assert!(path.is_file());
+ let mut file = File::open(path).unwrap();
+
+ let mut buf = Vec::new();
+ file.read_to_end(&mut buf).unwrap();
+ buf
+}
+
+#[test]
+fn test_convert_debug_info() {
+ // Convert existing sections
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = read::DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_info = read_section("debug_info");
+ let debug_info = read::DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_line = read_section("debug_line");
+ let debug_line = read::DebugLine::new(&debug_line, LittleEndian);
+
+ let debug_str = read_section("debug_str");
+ let debug_str = read::DebugStr::new(&debug_str, LittleEndian);
+
+ let debug_ranges = read_section("debug_ranges");
+ let debug_ranges = read::DebugRanges::new(&debug_ranges, LittleEndian);
+
+ let debug_rnglists = read::DebugRngLists::new(&[], LittleEndian);
+
+ let ranges = gimli::RangeLists::new(debug_ranges, debug_rnglists);
+
+ let debug_loc = read_section("debug_loc");
+ let debug_loc = read::DebugLoc::new(&debug_loc, LittleEndian);
+
+ let debug_loclists = read::DebugLocLists::new(&[], LittleEndian);
+
+ let locations = gimli::LocationLists::new(debug_loc, debug_loclists);
+
+ let dwarf = read::Dwarf {
+ debug_abbrev,
+ debug_info,
+ debug_line,
+ debug_str,
+ ranges,
+ locations,
+ ..Default::default()
+ };
+
+ let mut dwarf = write::Dwarf::from(&dwarf, &|address| Some(Address::Constant(address)))
+ .expect("Should convert DWARF information");
+
+ assert_eq!(dwarf.units.count(), 23);
+ let entries: usize = (0..dwarf.units.count())
+ .map(|i| dwarf.units.get(dwarf.units.id(i)).count())
+ .sum();
+ assert_eq!(entries, 29_560);
+ assert_eq!(dwarf.line_strings.count(), 0);
+ assert_eq!(dwarf.strings.count(), 3921);
+
+ // Write to new sections
+ let mut write_sections = write::Sections::new(EndianVec::new(LittleEndian));
+ dwarf
+ .write(&mut write_sections)
+ .expect("Should write DWARF information");
+ let debug_info_data = write_sections.debug_info.slice();
+ let debug_abbrev_data = write_sections.debug_abbrev.slice();
+ let debug_line_data = write_sections.debug_line.slice();
+ let debug_ranges_data = write_sections.debug_ranges.slice();
+ let debug_loc_data = write_sections.debug_loc.slice();
+ let debug_str_data = write_sections.debug_str.slice();
+ assert_eq!(debug_info_data.len(), 394_930);
+ assert_eq!(debug_abbrev_data.len(), 9701);
+ assert_eq!(debug_line_data.len(), 105_797);
+ assert_eq!(debug_ranges_data.len(), 155_712);
+ assert_eq!(debug_loc_data.len(), 245_168);
+ assert_eq!(debug_str_data.len(), 144_731);
+
+ // Convert new sections
+ let debug_abbrev = read::DebugAbbrev::new(debug_abbrev_data, LittleEndian);
+ let debug_info = read::DebugInfo::new(debug_info_data, LittleEndian);
+ let debug_line = read::DebugLine::new(debug_line_data, LittleEndian);
+ let debug_str = read::DebugStr::new(debug_str_data, LittleEndian);
+ let debug_ranges = read::DebugRanges::new(debug_ranges_data, LittleEndian);
+ let debug_rnglists = read::DebugRngLists::new(&[], LittleEndian);
+ let debug_loc = read::DebugLoc::new(debug_loc_data, LittleEndian);
+ let debug_loclists = read::DebugLocLists::new(&[], LittleEndian);
+
+ let ranges = gimli::RangeLists::new(debug_ranges, debug_rnglists);
+ let locations = gimli::LocationLists::new(debug_loc, debug_loclists);
+
+ let dwarf = read::Dwarf {
+ debug_abbrev,
+ debug_info,
+ debug_line,
+ debug_str,
+ ranges,
+ locations,
+ ..Default::default()
+ };
+
+ let dwarf = write::Dwarf::from(&dwarf, &|address| Some(Address::Constant(address)))
+ .expect("Should convert DWARF information");
+
+ assert_eq!(dwarf.units.count(), 23);
+ let entries: usize = (0..dwarf.units.count())
+ .map(|i| dwarf.units.get(dwarf.units.id(i)).count())
+ .sum();
+ assert_eq!(entries, 29_560);
+ assert_eq!(dwarf.strings.count(), 3921);
+}
+
+#[test]
+fn test_convert_eh_frame() {
+ // Convert existing section
+ let eh_frame = read_section("eh_frame");
+ let mut eh_frame = read::EhFrame::new(&eh_frame, LittleEndian);
+ // The `.eh_frame` fixture data was created on a 64-bit machine.
+ eh_frame.set_address_size(8);
+ let frames = write::FrameTable::from(&eh_frame, &|address| Some(Address::Constant(address)))
+ .expect("Should convert eh_frame information");
+ assert_eq!(frames.cie_count(), 2);
+ assert_eq!(frames.fde_count(), 3482);
+
+ // Write to new section
+ let mut write_eh_frame = write::EhFrame(EndianVec::new(LittleEndian));
+ frames
+ .write_eh_frame(&mut write_eh_frame)
+ .expect("Should write eh_frame information");
+ let eh_frame = write_eh_frame.slice();
+ assert_eq!(eh_frame.len(), 147144);
+
+ // Convert new section
+ let mut eh_frame = read::EhFrame::new(&eh_frame, LittleEndian);
+ eh_frame.set_address_size(8);
+ let frames = write::FrameTable::from(&eh_frame, &|address| Some(Address::Constant(address)))
+ .expect("Should convert eh_frame information");
+ assert_eq!(frames.cie_count(), 2);
+ assert_eq!(frames.fde_count(), 3482);
+}
diff --git a/vendor/gimli-0.26.2/tests/parse_self.rs b/vendor/gimli-0.26.2/tests/parse_self.rs
new file mode 100755
index 000000000..fb316314e
--- /dev/null
+++ b/vendor/gimli-0.26.2/tests/parse_self.rs
@@ -0,0 +1,431 @@
+#![cfg(all(feature = "read", feature = "std", feature = "endian-reader"))]
+
+use gimli::{
+ AttributeValue, DebugAbbrev, DebugAddr, DebugAddrBase, DebugAranges, DebugInfo, DebugLine,
+ DebugLoc, DebugLocLists, DebugPubNames, DebugPubTypes, DebugRanges, DebugRngLists, DebugStr,
+ Encoding, EndianSlice, Expression, LittleEndian, LocationLists, Operation, RangeLists,
+ RangeListsOffset, Reader,
+};
+use std::collections::hash_map::HashMap;
+use std::env;
+use std::fs::File;
+use std::io::Read;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+fn read_section(section: &str) -> Vec<u8> {
+ let mut path = PathBuf::new();
+ if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") {
+ path.push(dir);
+ }
+ path.push("fixtures/self");
+ path.push(section);
+
+ println!("Reading section \"{}\" at path {:?}", section, path);
+ assert!(path.is_file());
+ let mut file = File::open(path).unwrap();
+
+ let mut buf = Vec::new();
+ file.read_to_end(&mut buf).unwrap();
+ buf
+}
+
+fn parse_expression<R: Reader>(expr: Expression<R>, encoding: Encoding) {
+ let mut pc = expr.0.clone();
+ while !pc.is_empty() {
+ Operation::parse(&mut pc, encoding).expect("Should parse operation");
+ }
+
+ // Also attempt to evaluate some of it.
+ let mut eval = expr.evaluation(encoding);
+ eval.set_initial_value(0);
+ eval.evaluate().expect("Should evaluate expression");
+}
+
+fn impl_parse_self_debug_info<R: gimli::Reader>(
+ debug_info: &DebugInfo<R>,
+ debug_abbrev: &DebugAbbrev<R>,
+) {
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let AttributeValue::Exprloc(expression) = attr.value() {
+ parse_expression(expression, unit.encoding());
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_debug_info() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ impl_parse_self_debug_info(&debug_info, &debug_abbrev);
+}
+
+#[test]
+fn test_parse_self_debug_info_with_endian_rc_slice() {
+ let debug_info = read_section("debug_info");
+ let debug_info = Rc::from(&debug_info[..]);
+ let debug_info = gimli::EndianRcSlice::new(debug_info, LittleEndian);
+ let debug_info = DebugInfo::from(debug_info);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = Rc::from(&debug_abbrev[..]);
+ let debug_abbrev = gimli::EndianRcSlice::new(debug_abbrev, LittleEndian);
+ let debug_abbrev = DebugAbbrev::from(debug_abbrev);
+
+ impl_parse_self_debug_info(&debug_info, &debug_abbrev);
+}
+
+#[test]
+fn test_parse_self_debug_line() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_line = read_section("debug_line");
+ let debug_line = DebugLine::new(&debug_line, LittleEndian);
+
+ let debug_str = read_section("debug_str");
+ let debug_str = DebugStr::new(&debug_str, LittleEndian);
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let unit_entry = cursor.current().expect("Should have a root entry");
+
+ let comp_dir = unit_entry
+ .attr_value(gimli::DW_AT_comp_dir)
+ .expect("Should parse comp_dir attribute")
+ .and_then(|val| val.string_value(&debug_str));
+ let comp_name = unit_entry
+ .attr_value(gimli::DW_AT_name)
+ .expect("Should parse name attribute")
+ .and_then(|val| val.string_value(&debug_str));
+
+ if let Some(AttributeValue::DebugLineRef(offset)) = unit_entry
+ .attr_value(gimli::DW_AT_stmt_list)
+ .expect("Should parse stmt_list")
+ {
+ let program = debug_line
+ .program(offset, unit.address_size(), comp_dir, comp_name)
+ .expect("should parse line number program header");
+
+ let mut results = Vec::new();
+ let mut rows = program.rows();
+ while let Some((_, row)) = rows
+ .next_row()
+ .expect("Should parse and execute all rows in the line number program")
+ {
+ results.push(*row);
+ }
+ results.reverse();
+
+ let program = debug_line
+ .program(offset, unit.address_size(), comp_dir, comp_name)
+ .expect("should parse line number program header");
+ let (program, sequences) = program
+ .sequences()
+ .expect("should parse and execute the entire line number program");
+ assert!(!sequences.is_empty()); // Should be at least one sequence.
+ for sequence in sequences {
+ let mut rows = program.resume_from(&sequence);
+ while let Some((_, row)) = rows
+ .next_row()
+ .expect("Should parse and execute all rows after resuming")
+ {
+ let other_row = results.pop().unwrap();
+ assert!(row.address() >= sequence.start);
+ assert!(row.address() <= sequence.end);
+ assert_eq!(row.address(), other_row.address());
+ assert_eq!(row.line(), other_row.line());
+ }
+ }
+ assert!(results.is_empty());
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_debug_loc() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let debug_loc = read_section("debug_loc");
+ let debug_loc = DebugLoc::new(&debug_loc, LittleEndian);
+ let debug_loclists = DebugLocLists::new(&[], LittleEndian);
+ let loclists = LocationLists::new(debug_loc, debug_loclists);
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let mut low_pc = 0;
+
+ {
+ let unit_entry = cursor.current().expect("Should have a root entry");
+ let low_pc_attr = unit_entry
+ .attr_value(gimli::DW_AT_low_pc)
+ .expect("Should parse low_pc");
+ if let Some(gimli::AttributeValue::Addr(address)) = low_pc_attr {
+ low_pc = address;
+ }
+ }
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let AttributeValue::LocationListsRef(offset) = attr.value() {
+ let mut locs = loclists
+ .locations(
+ offset,
+ unit.encoding(),
+ low_pc,
+ &debug_addr,
+ debug_addr_base,
+ )
+ .expect("Should parse locations OK");
+ while let Some(loc) = locs.next().expect("Should parse next location") {
+ assert!(loc.range.begin <= loc.range.end);
+ parse_expression(loc.data, unit.encoding());
+ }
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_debug_ranges() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_addr = DebugAddr::from(EndianSlice::new(&[], LittleEndian));
+ let debug_addr_base = DebugAddrBase(0);
+
+ let debug_ranges = read_section("debug_ranges");
+ let debug_ranges = DebugRanges::new(&debug_ranges, LittleEndian);
+ let debug_rnglists = DebugRngLists::new(&[], LittleEndian);
+ let rnglists = RangeLists::new(debug_ranges, debug_rnglists);
+
+ let mut iter = debug_info.units();
+ while let Some(unit) = iter.next().expect("Should parse compilation unit") {
+ let abbrevs = unit
+ .abbreviations(&debug_abbrev)
+ .expect("Should parse abbreviations");
+
+ let mut cursor = unit.entries(&abbrevs);
+ cursor.next_dfs().expect("Should parse next dfs");
+
+ let mut low_pc = 0;
+
+ {
+ let unit_entry = cursor.current().expect("Should have a root entry");
+ let low_pc_attr = unit_entry
+ .attr_value(gimli::DW_AT_low_pc)
+ .expect("Should parse low_pc");
+ if let Some(gimli::AttributeValue::Addr(address)) = low_pc_attr {
+ low_pc = address;
+ }
+ }
+
+ while cursor.next_dfs().expect("Should parse next dfs").is_some() {
+ let entry = cursor.current().expect("Should have a current entry");
+ let mut attrs = entry.attrs();
+ while let Some(attr) = attrs.next().expect("Should parse entry's attribute") {
+ if let AttributeValue::RangeListsRef(offset) = attr.value() {
+ let mut ranges = rnglists
+ .ranges(
+ RangeListsOffset(offset.0),
+ unit.encoding(),
+ low_pc,
+ &debug_addr,
+ debug_addr_base,
+ )
+ .expect("Should parse ranges OK");
+ while let Some(range) = ranges.next().expect("Should parse next range") {
+ assert!(range.begin <= range.end);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_debug_aranges() {
+ let debug_aranges = read_section("debug_aranges");
+ let debug_aranges = DebugAranges::new(&debug_aranges, LittleEndian);
+
+ let mut headers = debug_aranges.headers();
+ while let Some(header) = headers.next().expect("Should parse arange header OK") {
+ let mut entries = header.entries();
+ while let Some(_) = entries.next().expect("Should parse arange entry OK") {
+ // Not really anything else we can check right now.
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_debug_pubnames() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_pubnames = read_section("debug_pubnames");
+ let debug_pubnames = DebugPubNames::new(&debug_pubnames, LittleEndian);
+
+ let mut units = HashMap::new();
+ let mut abbrevs = HashMap::new();
+ let mut pubnames = debug_pubnames.items();
+ while let Some(entry) = pubnames.next().expect("Should parse pubname OK") {
+ let unit_offset = entry.unit_header_offset();
+ let unit = units.entry(unit_offset).or_insert_with(|| {
+ debug_info
+ .header_from_offset(unit_offset)
+ .expect("Should parse unit header OK")
+ });
+ let abbrev_offset = unit.debug_abbrev_offset();
+ let abbrevs = abbrevs.entry(abbrev_offset).or_insert_with(|| {
+ debug_abbrev
+ .abbreviations(abbrev_offset)
+ .expect("Should parse abbreviations OK")
+ });
+ let mut cursor = unit
+ .entries_at_offset(abbrevs, entry.die_offset())
+ .expect("DIE offset should be valid");
+ assert!(cursor.next_dfs().expect("Should parse DIE").is_some());
+ }
+}
+
+#[test]
+fn test_parse_self_debug_pubtypes() {
+ let debug_info = read_section("debug_info");
+ let debug_info = DebugInfo::new(&debug_info, LittleEndian);
+
+ let debug_abbrev = read_section("debug_abbrev");
+ let debug_abbrev = DebugAbbrev::new(&debug_abbrev, LittleEndian);
+
+ let debug_pubtypes = read_section("debug_pubtypes");
+ let debug_pubtypes = DebugPubTypes::new(&debug_pubtypes, LittleEndian);
+
+ let mut units = HashMap::new();
+ let mut abbrevs = HashMap::new();
+ let mut pubtypes = debug_pubtypes.items();
+ while let Some(entry) = pubtypes.next().expect("Should parse pubtype OK") {
+ let unit_offset = entry.unit_header_offset();
+ let unit = units.entry(unit_offset).or_insert_with(|| {
+ debug_info
+ .header_from_offset(unit_offset)
+ .expect("Should parse unit header OK")
+ });
+ let abbrev_offset = unit.debug_abbrev_offset();
+ let abbrevs = abbrevs.entry(abbrev_offset).or_insert_with(|| {
+ debug_abbrev
+ .abbreviations(abbrev_offset)
+ .expect("Should parse abbreviations OK")
+ });
+ let mut cursor = unit
+ .entries_at_offset(abbrevs, entry.die_offset())
+ .expect("DIE offset should be valid");
+ assert!(cursor.next_dfs().expect("Should parse DIE").is_some());
+ }
+}
+
+#[test]
+fn test_parse_self_eh_frame() {
+ use gimli::{BaseAddresses, CieOrFde, EhFrame, UnwindSection};
+
+ let eh_frame = read_section("eh_frame");
+ let mut eh_frame = EhFrame::new(&eh_frame, LittleEndian);
+ // The `.eh_frame` fixture data was created on a 64-bit machine.
+ eh_frame.set_address_size(8);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_text(0)
+ .set_got(0);
+ let mut entries = eh_frame.entries(&bases);
+ while let Some(entry) = entries.next().expect("Should parse CFI entry OK") {
+ match entry {
+ CieOrFde::Cie(cie) => {
+ let mut instrs = cie.instructions(&eh_frame, &bases);
+ while let Some(_) = instrs.next().expect("Can parse next CFI instruction OK") {
+ // TODO FITZGEN
+ }
+ }
+ CieOrFde::Fde(partial) => {
+ let fde = partial
+ .parse(UnwindSection::cie_from_offset)
+ .expect("Should be able to get CIE for FDE");
+
+ let mut instrs = fde.instructions(&eh_frame, &bases);
+ while let Some(_) = instrs.next().expect("Can parse next CFI instruction OK") {
+ // TODO FITZGEN
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_parse_self_eh_frame_hdr() {
+ use gimli::{BaseAddresses, EhFrameHdr};
+
+ let eh_frame_hdr = read_section("eh_frame_hdr");
+ let eh_frame_hdr = EhFrameHdr::new(&eh_frame_hdr, LittleEndian);
+
+ let bases = BaseAddresses::default()
+ .set_eh_frame(0)
+ .set_eh_frame_hdr(0)
+ .set_text(0)
+ .set_got(0);
+
+ // `.eh_frame_hdr` was generated on a 64 bit machine.
+ let address_size = 8;
+
+ let _parsed_header = eh_frame_hdr
+ .parse(&bases, address_size)
+ .expect("we can parse the `.eh_frame_hdr` section OK");
+}
diff --git a/vendor/gimli/.cargo-checksum.json b/vendor/gimli/.cargo-checksum.json
index 759ec93de..944895d6b 100644
--- a/vendor/gimli/.cargo-checksum.json
+++ b/vendor/gimli/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"789a696803d3f1bed3ff3566cac8e7cf15c4bf9428242d637d0ce7f3a0ad57a3","CONTRIBUTING.md":"5f513ec06013e4f6f097e9c9492da5a47b9f25c94c6ecadfb655a77405fe912c","Cargo.lock":"284bff6b09ef0fd214c34492417778d6d5b9f75dc54557015af01a95696c752a","Cargo.toml":"92dccbeaa61bc8c65da53917fbf32900b3cb2549f90b67b67e1c67672bac205e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"57e36d344dabe1c52a9c81eafb28787c309b86c47437abf8589ef17bf383fc5f","benches/bench.rs":"e0045b989683794951563aa91b37069b2f6ae55f95e288d23f5c984b46e3a7eb","examples/dwarf-validate.rs":"4aac1045e3c08bf00878eeff75c0cfc30c06171c5eab2e71d757505786729687","examples/dwarfdump.rs":"d74323c037689b32825efa9bf69614ee26a444513b266e819ecf486956ee3299","examples/simple.rs":"4c3425e8bd1880d9522f5ed2581fb5ccd452d4be678eebc0e147c48722a7be1d","examples/simple_line.rs":"ac795f859a17650dde466b5b23b8c161b2e3b8eb57e32f5b6718a3072f6bfad0","fixtures/self/README.md":"7cfd76031ec5a4b38cc4eb56ccbfe1bb590fb54c333d037550bdeaaeacfc20cb","fixtures/self/debug_abbrev":"7c0faa940d9c68d196d03ad55a20e5c746040fa428ff323277fa381deff82bba","fixtures/self/debug_aranges":"8c2aeb2335f61d04ecb7b747070d24f83a6517cbee79dc5c96d97fb6c53d6b6d","fixtures/self/debug_info":"42028a5983006e0703f9ca9515cd27d891ae4af70279fae5011d547f581e2661","fixtures/self/debug_inlined":"89d9516f06ff835621936037f5884fc56712bf304c1dcde52251ddd510fe8710","fixtures/self/debug_line":"b29aebcca3b38bb2bb8aa708cbe74a0dce5a3b0c18916b63d6d17282c017bec7","fixtures/self/debug_loc":"8906ccb9c204f233eb74c1d069dee97a19d18c2051f9147795d7b5364a9266aa","fixtures/self/debug_pubnames":"cf58e237f89c68afba724597fa7e260448636b45f2e69dc6f1bfe34006e27c48","fixtures/self/debug_pubtypes":"d43c1bed71c9d14d1683294cdc1833f069cf131d6e95ee808547919b4f352d81","fixtures/self/debug_ranges":"6d765ac18d33accd89186d077eeb505cbdf97d990c9201d63d9463cd7787ce7a","fixtures/self/debug_str":"9ed904b68eee77b8558b80b3b7ca03e8527f6c64483e9d6d845f40270eb21183","fixtures/self/eh_frame":"6dc3d84351cac42cf73d03452fbb532470dd94d08715154c48417e3f62095f17","fixtures/self/eh_frame_hdr":"afba7a0aa233c9a8c81c986495bd2505164844adb93272d6bc0c9e592e684716","rustfmt.toml":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/arch.rs":"1c4cb3e2a322f3f42fe0b82875c9d0ce060d9af2388990139bdce9a4487c32da","src/common.rs":"392f52a58db6101187ca5525bbeafca9bda2342debd058cabca37350cd9db619","src/constants.rs":"358cf7924c79bc72de59d23d1fa02b2047d6c763c8fbd8be263ab8cd3e3ba7ec","src/endianity.rs":"1f7e62ae34f540c06bedf1e7948739211556eea7dd83731a5ca52c7d687ed0fc","src/leb128.rs":"996d5c79d027f97c010ca487bc4ff5f8265f4b9e63d62b4e4fa291383c259ee9","src/lib.rs":"6863b9a9d1eddf34b4095dfe60318aae56914fbf515ba5601b29024cc963f27c","src/read/abbrev.rs":"a3f550c32f1eb880d82bdb5257d35e10d32cfd039050e8131cbeedac346cc1d9","src/read/addr.rs":"f63f289edf889e87107bb2090fb1c50b48af7015f31b7c39c3d6ea09630a38e9","src/read/aranges.rs":"ba3302f87cffb7ee15f48b0530ebd707f45ad056934223078d25ae2a1b034f1c","src/read/cfi.rs":"b1064ed9b4b87169a148cc86adc7443c5a771dc2d1799129f7883f1ef6adc165","src/read/dwarf.rs":"a39c24429b437ae3a1cd17bae2f01c973c9ce39f7b5f2b3435982d6860944e0e","src/read/endian_reader.rs":"320983a859c2bb0dd44a3e6fae55ff0a84dba3fa80c2edbc64aa8135c44eddf0","src/read/endian_slice.rs":"ae1c52499728f6a85648f1bf87c02dcf43bebecb5ad4e835a1246938ba4338bf","src/read/index.rs":"e79b8d591b8e2007a37f5ea85a6d71b69d56ca3739a85cf7bf361724c5b829fa","src/read/line.rs":"af7a1520777e56632970fc5fe7377fdcd12d078eb88eeb2b0f2cc95b73ff68a7","src/read/lists.rs":"67ca9e1a36a91feb4996d035211de845205212bfda02163685d217818567ff93","src/read/loclists.rs":"1b4ea85c0dd8c6eae492a60cb70810185d56ba579df7986cb8a36385031b10fd","src/read/lookup.rs":"0cf89ba12b9d48b1fe035dd3a497730323acb9427a9457abbc2f7c58c4c71165","src/read/mod.rs":"3bafc747c31a575bcc92d3e7d5ea5a15f5acc01918a4377cec1dced0f85b5d2b","src/read/op.rs":"e5dce6520dfc90ec74c3b070ca374b89fcf55ff23101471591458175a72c79e6","src/read/pubnames.rs":"ed752ee1a7017e6d3be42d81e4ddaaac960ef08081463a19106c9f041526d4a3","src/read/pubtypes.rs":"5e75b32c0923e827aff0bb2db456797a0e8d38ba46be992558a7990b3196bcf5","src/read/reader.rs":"b10ff3e77b54347e96b1f3cff30da104dfdd0c4d7a55b672950788f1f1ae3478","src/read/rnglists.rs":"af637d283d76514382ee0556463cccab4e6f0ea4d061db9a44a594b5d57d1fd7","src/read/str.rs":"4c2f50014451621fea45969cd313f6840fcd3a99d7a2d081bfa1f8e0e434133a","src/read/unit.rs":"6ed00ba004c329008bf295d9c7d724afe961750f0c7b08430fc213fd5d998003","src/read/util.rs":"0b7d0d2225a98618070dc472ccba49a5411aa8beed5ff6696da079d06156d363","src/read/value.rs":"5a91e03ad3d41f679b264753498434b91948c6b89955e4beb4522498386d9b1d","src/test_util.rs":"291eefa6b51c6d934ba2f4a4c9bc7c403046fc1cccf4d43487820f0154bb89e2","src/write/abbrev.rs":"fa02163389e92e804d139cf84f833ab6af932083f0eb2d74464b4a70bd3237ff","src/write/cfi.rs":"3b04b0ebd82363738199cc673f64e0ceb60506a67c4f18b435a109caa62840f3","src/write/dwarf.rs":"8a1a0893e31134ad68993994594f3024ad0c8af7c1188b29e0ffc26b42edef21","src/write/endian_vec.rs":"1d5811986648816a677580b22630f5059757a381487d73e9adbb3008c9ae0c58","src/write/line.rs":"df7d2082c71b5e523cd52745700aae3dcfa5800f0b280e831ef5d8eb8035d6a7","src/write/loc.rs":"bb5b750c04f6603e18225db72652ea00239234ba674a8a8627c99d4ab07b47a9","src/write/mod.rs":"d8aa1da854cdee629d470d00d87e00dc6998e4bec1ca951f8d2f277730ab9d69","src/write/op.rs":"7b1d49b10c8c92b2d5b259e83119ff7dc95bc552535bb7b1a82ca9556a35c589","src/write/range.rs":"5bac01e372c08e3cc19e1e07e40492d8214cdfa8881737920cb792f4aa2ba80b","src/write/section.rs":"3ce781d5e82ba365ff54fdd36e0ef58c58a2215b09a8861eb0b038efac82b77f","src/write/str.rs":"4850cc2fee55980f9cbb6b4169f9861ab9d05c2b28a85c2b790480b83a66f514","src/write/unit.rs":"213c881736f8c87fcb2f921e379791eaba2915e8d077139965a9c6211001fe44","src/write/writer.rs":"304181287f90445bbfb33349c26b34bd87002d6844fc5686bfc0756fd0a1ecd8","tests/convert_self.rs":"180909b562969e1691b64628ded8654e6e0b10b3357f39917bd8ac288c5826dd","tests/parse_self.rs":"f2da1c7daef7139545c9367c2f26199e8b4623b31d4ec6480ddd851e6980f2dc"},"package":"22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"26652aa86933dfc6d8368788a6c8b3f44bc5787cb3a9773778dbd8beb376cab5","CONTRIBUTING.md":"5f513ec06013e4f6f097e9c9492da5a47b9f25c94c6ecadfb655a77405fe912c","Cargo.lock":"526ec45724cfe5dba367ac5a6aaac87223dcc0a76393cf87582be63e349b437b","Cargo.toml":"dccff39f1b698a28646c5af7bbe35fc08606b4dbf848f9e02bbf78b0daf967e6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"8b35e78ea98d87b6b818feea83863cbf16fe5b024ba71641c6ce9fe9c68f32c7","benches/bench.rs":"e0045b989683794951563aa91b37069b2f6ae55f95e288d23f5c984b46e3a7eb","clippy.toml":"50fd0cdaae995561b1d688c9410fe99335fc7ac3916a400dafd25ff63f8215f7","examples/dwarf-validate.rs":"4aac1045e3c08bf00878eeff75c0cfc30c06171c5eab2e71d757505786729687","examples/dwarfdump.rs":"5192233994233aabc1f359f9b50746a5703f528c4e22f2573d8e39e0c35cd32a","examples/simple.rs":"4c3425e8bd1880d9522f5ed2581fb5ccd452d4be678eebc0e147c48722a7be1d","examples/simple_line.rs":"ac795f859a17650dde466b5b23b8c161b2e3b8eb57e32f5b6718a3072f6bfad0","fixtures/self/README.md":"7cfd76031ec5a4b38cc4eb56ccbfe1bb590fb54c333d037550bdeaaeacfc20cb","fixtures/self/debug_abbrev":"7c0faa940d9c68d196d03ad55a20e5c746040fa428ff323277fa381deff82bba","fixtures/self/debug_aranges":"8c2aeb2335f61d04ecb7b747070d24f83a6517cbee79dc5c96d97fb6c53d6b6d","fixtures/self/debug_info":"42028a5983006e0703f9ca9515cd27d891ae4af70279fae5011d547f581e2661","fixtures/self/debug_inlined":"89d9516f06ff835621936037f5884fc56712bf304c1dcde52251ddd510fe8710","fixtures/self/debug_line":"b29aebcca3b38bb2bb8aa708cbe74a0dce5a3b0c18916b63d6d17282c017bec7","fixtures/self/debug_loc":"8906ccb9c204f233eb74c1d069dee97a19d18c2051f9147795d7b5364a9266aa","fixtures/self/debug_pubnames":"cf58e237f89c68afba724597fa7e260448636b45f2e69dc6f1bfe34006e27c48","fixtures/self/debug_pubtypes":"d43c1bed71c9d14d1683294cdc1833f069cf131d6e95ee808547919b4f352d81","fixtures/self/debug_ranges":"6d765ac18d33accd89186d077eeb505cbdf97d990c9201d63d9463cd7787ce7a","fixtures/self/debug_str":"9ed904b68eee77b8558b80b3b7ca03e8527f6c64483e9d6d845f40270eb21183","fixtures/self/eh_frame":"6dc3d84351cac42cf73d03452fbb532470dd94d08715154c48417e3f62095f17","fixtures/self/eh_frame_hdr":"afba7a0aa233c9a8c81c986495bd2505164844adb93272d6bc0c9e592e684716","rustfmt.toml":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/arch.rs":"3a9b44d51a770b83e19c5420c7a6c090cf895fe668cbecea4e3c20acdfef871c","src/common.rs":"392f52a58db6101187ca5525bbeafca9bda2342debd058cabca37350cd9db619","src/constants.rs":"358cf7924c79bc72de59d23d1fa02b2047d6c763c8fbd8be263ab8cd3e3ba7ec","src/endianity.rs":"1f7e62ae34f540c06bedf1e7948739211556eea7dd83731a5ca52c7d687ed0fc","src/leb128.rs":"996d5c79d027f97c010ca487bc4ff5f8265f4b9e63d62b4e4fa291383c259ee9","src/lib.rs":"9a307a6d1fbebeae581df014b4f2b8bd16399fa883209fbdcc4d85eb9ec1dfd5","src/read/abbrev.rs":"31f7e9544a30e35f49dca355981730c2eb145e55ddbf281161d842a566d0a88d","src/read/addr.rs":"f63f289edf889e87107bb2090fb1c50b48af7015f31b7c39c3d6ea09630a38e9","src/read/aranges.rs":"ba3302f87cffb7ee15f48b0530ebd707f45ad056934223078d25ae2a1b034f1c","src/read/cfi.rs":"85cb294cf6a932d31769a747c58d1767b83e64831d8c633ab0b517014fe1cdec","src/read/dwarf.rs":"50cd674f17e81bca57f6ba69600a7a24bd7d13632c165f2a40f2a23fd328a2c9","src/read/endian_reader.rs":"320983a859c2bb0dd44a3e6fae55ff0a84dba3fa80c2edbc64aa8135c44eddf0","src/read/endian_slice.rs":"476055ef571d53fbb890545fe5893b9083b10b90ec9e538bd82216bbe8bc45fd","src/read/index.rs":"e79b8d591b8e2007a37f5ea85a6d71b69d56ca3739a85cf7bf361724c5b829fa","src/read/lazy.rs":"85642e886ab3a94cea53534d1e133d1c4c17d2deaf291facdc316507e499ce22","src/read/line.rs":"ff3aeb1c719a3440ece2a8b2394d790f0987640d77fc4b30bee73f291f027803","src/read/lists.rs":"67ca9e1a36a91feb4996d035211de845205212bfda02163685d217818567ff93","src/read/loclists.rs":"857701a9e86aee809bfca3fd661e283b4f05038764dfc9c3cb1a349acc00bc47","src/read/lookup.rs":"0cf89ba12b9d48b1fe035dd3a497730323acb9427a9457abbc2f7c58c4c71165","src/read/mod.rs":"e3e831fab7570c97f58fb5e3931e6965f4a361b66292944f79f85bf3fe1b1d00","src/read/op.rs":"57aba989cc3d49772a51de7d487cbd10458f688487d3ae4e5efae4c84adb4e39","src/read/pubnames.rs":"ed752ee1a7017e6d3be42d81e4ddaaac960ef08081463a19106c9f041526d4a3","src/read/pubtypes.rs":"5e75b32c0923e827aff0bb2db456797a0e8d38ba46be992558a7990b3196bcf5","src/read/reader.rs":"b10ff3e77b54347e96b1f3cff30da104dfdd0c4d7a55b672950788f1f1ae3478","src/read/rnglists.rs":"4ec166e73fdfc85efa97b3b005b514bb64d454edb1ba0f201c45df4f2127e745","src/read/str.rs":"4c2f50014451621fea45969cd313f6840fcd3a99d7a2d081bfa1f8e0e434133a","src/read/unit.rs":"4e8af3c654faf8dc42b8bc62edf2f2402c6b42b31889662b0b48753c08b9893a","src/read/util.rs":"40f07a7b6623f29d03e15e41cda625a613ab1973969a4ddbb42365a8550b7e79","src/read/value.rs":"5a91e03ad3d41f679b264753498434b91948c6b89955e4beb4522498386d9b1d","src/test_util.rs":"291eefa6b51c6d934ba2f4a4c9bc7c403046fc1cccf4d43487820f0154bb89e2","src/write/abbrev.rs":"fa02163389e92e804d139cf84f833ab6af932083f0eb2d74464b4a70bd3237ff","src/write/cfi.rs":"3b04b0ebd82363738199cc673f64e0ceb60506a67c4f18b435a109caa62840f3","src/write/dwarf.rs":"8a1a0893e31134ad68993994594f3024ad0c8af7c1188b29e0ffc26b42edef21","src/write/endian_vec.rs":"1d5811986648816a677580b22630f5059757a381487d73e9adbb3008c9ae0c58","src/write/line.rs":"73bf3bab57433fe1dc891c48303cbc4e482306a1b9425f3483ad2985a9676ee9","src/write/loc.rs":"5c1f8d97d8e871a6663ad704f5e15694bddd54b85f2d801b52a520522f1258fd","src/write/mod.rs":"d8aa1da854cdee629d470d00d87e00dc6998e4bec1ca951f8d2f277730ab9d69","src/write/op.rs":"08fec7613aaa9061aae6e31d8b49933c812a6b7609f69e611a2a953af09aa18a","src/write/range.rs":"259e21e32bebbf7cdd8027d401862dee95cb5111e45bc4ff30bf54e3306d0262","src/write/section.rs":"effefef0d5e4557cb099431a20a7304392e6bf4ce04941d72b8bd2df9100e297","src/write/str.rs":"4850cc2fee55980f9cbb6b4169f9861ab9d05c2b28a85c2b790480b83a66f514","src/write/unit.rs":"8876c88dc3529d32e9894acc3194ed99fe437bb7520821c18f9f9f638db08d81","src/write/writer.rs":"7d5dd07b82ec3becebb060c106d4ea697cbd8b9b64a5de78403511a5244e08b1","tests/convert_self.rs":"180909b562969e1691b64628ded8654e6e0b10b3357f39917bd8ac288c5826dd","tests/parse_self.rs":"f2da1c7daef7139545c9367c2f26199e8b4623b31d4ec6480ddd851e6980f2dc"},"package":"dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"} \ No newline at end of file
diff --git a/vendor/gimli/CHANGELOG.md b/vendor/gimli/CHANGELOG.md
index 9ca6d70a8..9c3817750 100644
--- a/vendor/gimli/CHANGELOG.md
+++ b/vendor/gimli/CHANGELOG.md
@@ -2,6 +2,27 @@
--------------------------------------------------------------------------------
+## 0.27.0
+
+Released 2022/11/23.
+
+### Breaking changes
+
+* Added `read::Dwarf::abbreviations_cache` to cache abbreviations at offset 0.
+ Changed `read::Dwarf::abbreviations` to return `Result<Arc<Abbreviations>>`,
+ and changed `read::Unit::abbreviations` to `Arc<Abbreviations>`.
+ [#628](https://github.com/gimli-rs/gimli/pull/628)
+
+### Added
+
+* Added LoongArch register definitions.
+ [#624](https://github.com/gimli-rs/gimli/pull/624)
+
+* Added support for tombstones in `read::LocListIter` and `read::RngListIter`.
+ [#631](https://github.com/gimli-rs/gimli/pull/631)
+
+--------------------------------------------------------------------------------
+
## 0.26.2
Released 2022/07/16.
diff --git a/vendor/gimli/Cargo.lock b/vendor/gimli/Cargo.lock
index b4a719a0c..1d8bdf9f4 100644
--- a/vendor/gimli/Cargo.lock
+++ b/vendor/gimli/Cargo.lock
@@ -10,18 +10,18 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aho-corasick"
-version = "0.7.18"
+version = "0.7.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
-version = "1.0.1"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "byteorder"
@@ -37,24 +37,24 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "compiler_builtins"
-version = "0.1.51"
+version = "0.1.84"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3587b3669d6f2c1cfd34c475272dabcfef29d52703933f6f72ebb36d6bd81a97"
+checksum = "989b2c1ca6e90ad06fdc69d1d1862fa28d27a977be6d92ae2fa762cf61fe0b10"
[[package]]
name = "crc32fast"
-version = "1.2.1"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
]
[[package]]
name = "crossbeam"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
+checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
dependencies = [
"cfg-if",
"crossbeam-channel",
@@ -66,9 +66,9 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
-version = "0.5.1"
+version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
dependencies = [
"cfg-if",
"crossbeam-utils",
@@ -76,9 +76,9 @@ dependencies = [
[[package]]
name = "crossbeam-deque"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
dependencies = [
"cfg-if",
"crossbeam-epoch",
@@ -87,22 +87,22 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.5"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
+ "autocfg",
"cfg-if",
"crossbeam-utils",
- "lazy_static",
"memoffset",
"scopeguard",
]
[[package]]
name = "crossbeam-queue"
-version = "0.3.2"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9"
+checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
dependencies = [
"cfg-if",
"crossbeam-utils",
@@ -110,19 +110,18 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
-version = "0.8.5"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "lazy_static",
]
[[package]]
name = "either"
-version = "1.6.1"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
[[package]]
name = "fallible-iterator"
@@ -132,13 +131,11 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "flate2"
-version = "1.0.22"
+version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
dependencies = [
- "cfg-if",
"crc32fast",
- "libc",
"miniz_oxide",
]
@@ -153,7 +150,7 @@ dependencies = [
[[package]]
name = "gimli"
-version = "0.26.2"
+version = "0.27.0"
dependencies = [
"compiler_builtins",
"crossbeam",
@@ -174,9 +171,9 @@ dependencies = [
[[package]]
name = "hashbrown"
-version = "0.11.2"
+version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hermit-abi"
@@ -189,65 +186,58 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.7.0"
+version = "1.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5"
+checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-
-[[package]]
name = "libc"
-version = "0.2.105"
+version = "0.2.137"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
[[package]]
name = "memchr"
-version = "2.4.1"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memmap2"
-version = "0.5.5"
+version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a79b39c93a7a5a27eeaf9a23b5ff43f1b9e0ad6b1cdd441140ae53c35613fc7"
+checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
dependencies = [
"libc",
]
[[package]]
name = "memoffset"
-version = "0.6.4"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "miniz_oxide"
-version = "0.4.4"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
dependencies = [
"adler",
- "autocfg",
]
[[package]]
name = "num_cpus"
-version = "1.13.0"
+version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
dependencies = [
"hermit-abi",
"libc",
@@ -255,9 +245,9 @@ dependencies = [
[[package]]
name = "object"
-version = "0.29.0"
+version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
dependencies = [
"flate2",
"memchr",
@@ -266,11 +256,10 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.5.1"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
+checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
dependencies = [
- "autocfg",
"crossbeam-deque",
"either",
"rayon-core",
@@ -278,22 +267,21 @@ dependencies = [
[[package]]
name = "rayon-core"
-version = "1.9.1"
+version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
+checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
- "lazy_static",
"num_cpus",
]
[[package]]
name = "regex"
-version = "1.5.4"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
+checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
dependencies = [
"aho-corasick",
"memchr",
@@ -302,9 +290,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
-version = "0.6.25"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "rustc-std-workspace-alloc"
@@ -347,9 +335,9 @@ checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
[[package]]
name = "unicode-width"
-version = "0.1.9"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "wasmparser"
diff --git a/vendor/gimli/Cargo.toml b/vendor/gimli/Cargo.toml
index f36ccd936..2c6004eb9 100644
--- a/vendor/gimli/Cargo.toml
+++ b/vendor/gimli/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "gimli"
-version = "0.26.2"
+version = "0.27.0"
exclude = [
"/releases/*",
"/.github",
@@ -105,7 +105,7 @@ version = "0.5.5"
version = "1"
[dev-dependencies.object]
-version = "0.29.0"
+version = "0.30.0"
features = ["wasm"]
[dev-dependencies.rayon]
diff --git a/vendor/gimli/README.md b/vendor/gimli/README.md
index 19e7bbd0e..f0d31fbe1 100644
--- a/vendor/gimli/README.md
+++ b/vendor/gimli/README.md
@@ -5,7 +5,7 @@
[![Build Status](https://github.com/gimli-rs/gimli/workflows/Rust/badge.svg)](https://github.com/gimli-rs/gimli/actions)
[![Coverage Status](https://coveralls.io/repos/github/gimli-rs/gimli/badge.svg?branch=master)](https://coveralls.io/github/gimli-rs/gimli?branch=master)
-`gimli` is a blazing fast library for consuming the
+`gimli` is a library for reading and writing the
[DWARF debugging format](https://dwarfstd.org/).
* **Zero copy:** everything is just a reference to the original input buffer. No
@@ -30,7 +30,7 @@ Add this to your `Cargo.toml`:
```toml
[dependencies]
-gimli = "0.26.2"
+gimli = "0.27.0"
```
The minimum supported Rust version is 1.42.0.
diff --git a/vendor/gimli/clippy.toml b/vendor/gimli/clippy.toml
new file mode 100644
index 000000000..f97e544b6
--- /dev/null
+++ b/vendor/gimli/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.42.0"
diff --git a/vendor/gimli/examples/dwarfdump.rs b/vendor/gimli/examples/dwarfdump.rs
index 4b61fd572..22458fa8f 100644
--- a/vendor/gimli/examples/dwarfdump.rs
+++ b/vendor/gimli/examples/dwarfdump.rs
@@ -1859,35 +1859,46 @@ fn dump_op<R: Reader, W: Write>(
Ok(())
}
+fn dump_range<W: Write>(w: &mut W, range: Option<gimli::Range>) -> Result<()> {
+ if let Some(range) = range {
+ write!(w, " [0x{:08x}, 0x{:08x}]", range.begin, range.end)?;
+ } else {
+ write!(w, " [ignored]")?;
+ }
+ Ok(())
+}
+
fn dump_loc_list<R: Reader, W: Write>(
w: &mut W,
offset: gimli::LocationListsOffset<R::Offset>,
unit: &gimli::Unit<R>,
dwarf: &gimli::Dwarf<R>,
) -> Result<()> {
- let raw_locations = dwarf.raw_locations(unit, offset)?;
- let raw_locations: Vec<_> = raw_locations.collect()?;
let mut locations = dwarf.locations(unit, offset)?;
writeln!(
w,
- "<loclist at {}+0x{:08x} with {} entries>",
+ "<loclist at {}+0x{:08x}>",
if unit.encoding().version < 5 {
".debug_loc"
} else {
".debug_loclists"
},
offset.0,
- raw_locations.len()
)?;
- for (i, raw) in raw_locations.iter().enumerate() {
+ let mut i = 0;
+ while let Some(raw) = locations.next_raw()? {
write!(w, "\t\t\t[{:2}]", i)?;
- match *raw {
+ i += 1;
+ let range = locations
+ .convert_raw(raw.clone())?
+ .map(|location| location.range);
+ match raw {
gimli::RawLocListEntry::BaseAddress { addr } => {
- writeln!(w, "<new base address 0x{:08x}>", addr)?;
+ writeln!(w, "<base-address 0x{:08x}>", addr)?;
}
gimli::RawLocListEntry::BaseAddressx { addr } => {
let addr_val = dwarf.address(unit, addr)?;
- writeln!(w, "<new base addressx [{}]0x{:08x}>", addr.0, addr_val)?;
+ writeln!(w, "<base-addressx [{}]0x{:08x}>", addr.0, addr_val)?;
}
gimli::RawLocListEntry::StartxEndx {
begin,
@@ -1896,14 +1907,12 @@ fn dump_loc_list<R: Reader, W: Write>(
} => {
let begin_val = dwarf.address(unit, begin)?;
let end_val = dwarf.address(unit, end)?;
- let location = locations.next()?.unwrap();
write!(
w,
- "<startx-endx \
- low-off: [{}]0x{:08x} addr 0x{:08x} \
- high-off: [{}]0x{:08x} addr 0x{:08x}>",
- begin.0, begin_val, location.range.begin, end.0, end_val, location.range.end
+ "<startx-endx [{}]0x{:08x}, [{}]0x{:08x}>",
+ begin.0, begin_val, end.0, end_val,
)?;
+ dump_range(w, range)?;
dump_exprloc(w, unit.encoding(), data)?;
writeln!(w)?;
}
@@ -1913,14 +1922,12 @@ fn dump_loc_list<R: Reader, W: Write>(
ref data,
} => {
let begin_val = dwarf.address(unit, begin)?;
- let location = locations.next()?.unwrap();
write!(
w,
- "<start-length \
- low-off: [{}]0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin.0, begin_val, location.range.begin, length, location.range.end
+ "<startx-length [{}]0x{:08x}, 0x{:08x}>",
+ begin.0, begin_val, length,
)?;
+ dump_range(w, range)?;
dump_exprloc(w, unit.encoding(), data)?;
writeln!(w)?;
}
@@ -1934,14 +1941,8 @@ fn dump_loc_list<R: Reader, W: Write>(
end,
ref data,
} => {
- let location = locations.next()?.unwrap();
- write!(
- w,
- "<offset pair \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, location.range.begin, end, location.range.end
- )?;
+ write!(w, "<offset-pair 0x{:08x}, 0x{:08x}>", begin, end)?;
+ dump_range(w, range)?;
dump_exprloc(w, unit.encoding(), data)?;
writeln!(w)?;
}
@@ -1955,14 +1956,8 @@ fn dump_loc_list<R: Reader, W: Write>(
end,
ref data,
} => {
- let location = locations.next()?.unwrap();
- write!(
- w,
- "<start-end \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, location.range.begin, end, location.range.end
- )?;
+ write!(w, "<start-end 0x{:08x}, 0x{:08x}>", begin, end)?;
+ dump_range(w, range)?;
dump_exprloc(w, unit.encoding(), data)?;
writeln!(w)?;
}
@@ -1971,14 +1966,8 @@ fn dump_loc_list<R: Reader, W: Write>(
length,
ref data,
} => {
- let location = locations.next()?.unwrap();
- write!(
- w,
- "<start-length \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, location.range.begin, length, location.range.end
- )?;
+ write!(w, "<start-length 0x{:08x}, 0x{:08x}>", begin, length)?;
+ dump_range(w, range)?;
dump_exprloc(w, unit.encoding(), data)?;
writeln!(w)?;
}
@@ -1993,33 +1982,23 @@ fn dump_range_list<R: Reader, W: Write>(
unit: &gimli::Unit<R>,
dwarf: &gimli::Dwarf<R>,
) -> Result<()> {
- let raw_ranges = dwarf.raw_ranges(unit, offset)?;
- let raw_ranges: Vec<_> = raw_ranges.collect()?;
let mut ranges = dwarf.ranges(unit, offset)?;
writeln!(
w,
- "<rnglist at {}+0x{:08x} with {} entries>",
+ "<rnglist at {}+0x{:08x}>",
if unit.encoding().version < 5 {
".debug_ranges"
} else {
".debug_rnglists"
},
offset.0,
- raw_ranges.len()
)?;
- for (i, raw) in raw_ranges.iter().enumerate() {
+ let mut i = 0;
+ while let Some(raw) = ranges.next_raw()? {
write!(w, "\t\t\t[{:2}] ", i)?;
- match *raw {
- gimli::RawRngListEntry::AddressOrOffsetPair { begin, end } => {
- let range = ranges.next()?.unwrap();
- writeln!(
- w,
- "<address pair \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, range.begin, end, range.end
- )?;
- }
+ i += 1;
+ let range = ranges.convert_raw(raw.clone())?;
+ match raw {
gimli::RawRngListEntry::BaseAddress { addr } => {
writeln!(w, "<new base address 0x{:08x}>", addr)?;
}
@@ -2030,66 +2009,39 @@ fn dump_range_list<R: Reader, W: Write>(
gimli::RawRngListEntry::StartxEndx { begin, end } => {
let begin_val = dwarf.address(unit, begin)?;
let end_val = dwarf.address(unit, end)?;
- let range = if begin_val == end_val {
- gimli::Range {
- begin: begin_val,
- end: end_val,
- }
- } else {
- ranges.next()?.unwrap()
- };
- writeln!(
+ write!(
w,
- "<startx-endx \
- low-off: [{}]0x{:08x} addr 0x{:08x} \
- high-off: [{}]0x{:08x} addr 0x{:08x}>",
- begin.0, begin_val, range.begin, end.0, end_val, range.end
+ "<startx-endx [{}]0x{:08x}, [{}]0x{:08x}>",
+ begin.0, begin_val, end.0, end_val,
)?;
+ dump_range(w, range)?;
+ writeln!(w)?;
}
gimli::RawRngListEntry::StartxLength { begin, length } => {
let begin_val = dwarf.address(unit, begin)?;
- let range = ranges.next()?.unwrap();
- writeln!(
+ write!(
w,
- "<startx-length \
- low-off: [{}]0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin.0, begin_val, range.begin, length, range.end
+ "<startx-length [{}]0x{:08x}, 0x{:08x}>",
+ begin.0, begin_val, length,
)?;
+ dump_range(w, range)?;
+ writeln!(w)?;
}
- gimli::RawRngListEntry::OffsetPair { begin, end } => {
- let range = ranges.next()?.unwrap();
- writeln!(
- w,
- "<offset pair \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, range.begin, end, range.end
- )?;
+ gimli::RawRngListEntry::AddressOrOffsetPair { begin, end }
+ | gimli::RawRngListEntry::OffsetPair { begin, end } => {
+ write!(w, "<offset-pair 0x{:08x}, 0x{:08x}>", begin, end)?;
+ dump_range(w, range)?;
+ writeln!(w)?;
}
gimli::RawRngListEntry::StartEnd { begin, end } => {
- let range = if begin == end {
- gimli::Range { begin, end }
- } else {
- ranges.next()?.unwrap()
- };
- writeln!(
- w,
- "<start-end \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, range.begin, end, range.end
- )?;
+ write!(w, "<start-end 0x{:08x}, 0x{:08x}>", begin, end)?;
+ dump_range(w, range)?;
+ writeln!(w)?;
}
gimli::RawRngListEntry::StartLength { begin, length } => {
- let range = ranges.next()?.unwrap();
- writeln!(
- w,
- "<start-length \
- low-off: 0x{:08x} addr 0x{:08x} \
- high-off: 0x{:08x} addr 0x{:08x}>",
- begin, range.begin, length, range.end
- )?;
+ write!(w, "<start-length 0x{:08x}, 0x{:08x}>", begin, length)?;
+ dump_range(w, range)?;
+ writeln!(w)?;
}
};
}
diff --git a/vendor/gimli/src/arch.rs b/vendor/gimli/src/arch.rs
index f5b2e5ed8..abc872d83 100644
--- a/vendor/gimli/src/arch.rs
+++ b/vendor/gimli/src/arch.rs
@@ -291,6 +291,154 @@ registers!(AArch64, {
V31 = (95, "V31"),
});
+/// LoongArch architecture specific definitions.
+///
+/// See [LoongArch ELF psABI specification](https://loongson.github.io/LoongArch-Documentation/LoongArch-ELF-ABI-EN.html).
+#[derive(Debug, Clone, Copy)]
+pub struct LoongArch;
+
+registers!(LoongArch, {
+ R0 = (0, "$r0"),
+ R1 = (1, "$r1"),
+ R2 = (2, "$r2"),
+ R3 = (3, "$r3"),
+ R4 = (4, "$r4"),
+ R5 = (5, "$r5"),
+ R6 = (6, "$r6"),
+ R7 = (7, "$r7"),
+ R8 = (8, "$r8"),
+ R9 = (9, "$r9"),
+ R10 = (10, "$r10"),
+ R11 = (11, "$r11"),
+ R12 = (12, "$r12"),
+ R13 = (13, "$r13"),
+ R14 = (14, "$r14"),
+ R15 = (15, "$r15"),
+ R16 = (16, "$r16"),
+ R17 = (17, "$r17"),
+ R18 = (18, "$r18"),
+ R19 = (19, "$r19"),
+ R20 = (20, "$r20"),
+ R21 = (21, "$r21"),
+ R22 = (22, "$r22"),
+ R23 = (23, "$r23"),
+ R24 = (24, "$r24"),
+ R25 = (25, "$r25"),
+ R26 = (26, "$r26"),
+ R27 = (27, "$r27"),
+ R28 = (28, "$r28"),
+ R29 = (29, "$r29"),
+ R30 = (30, "$r30"),
+ R31 = (31, "$r31"),
+
+ F0 = (32, "$f0"),
+ F1 = (33, "$f1"),
+ F2 = (34, "$f2"),
+ F3 = (35, "$f3"),
+ F4 = (36, "$f4"),
+ F5 = (37, "$f5"),
+ F6 = (38, "$f6"),
+ F7 = (39, "$f7"),
+ F8 = (40, "$f8"),
+ F9 = (41, "$f9"),
+ F10 = (42, "$f10"),
+ F11 = (43, "$f11"),
+ F12 = (44, "$f12"),
+ F13 = (45, "$f13"),
+ F14 = (46, "$f14"),
+ F15 = (47, "$f15"),
+ F16 = (48, "$f16"),
+ F17 = (49, "$f17"),
+ F18 = (50, "$f18"),
+ F19 = (51, "$f19"),
+ F20 = (52, "$f20"),
+ F21 = (53, "$f21"),
+ F22 = (54, "$f22"),
+ F23 = (55, "$f23"),
+ F24 = (56, "$f24"),
+ F25 = (57, "$f25"),
+ F26 = (58, "$f26"),
+ F27 = (59, "$f27"),
+ F28 = (60, "$f28"),
+ F29 = (61, "$f29"),
+ F30 = (62, "$f30"),
+ F31 = (63, "$f31"),
+ FCC0 = (64, "$fcc0"),
+ FCC1 = (65, "$fcc1"),
+ FCC2 = (66, "$fcc2"),
+ FCC3 = (67, "$fcc3"),
+ FCC4 = (68, "$fcc4"),
+ FCC5 = (69, "$fcc5"),
+ FCC6 = (70, "$fcc6"),
+ FCC7 = (71, "$fcc7"),
+},
+aliases {
+ ZERO = (0, "$zero"),
+ RA = (1, "$ra"),
+ TP = (2, "$tp"),
+ SP = (3, "$sp"),
+ A0 = (4, "$a0"),
+ A1 = (5, "$a1"),
+ A2 = (6, "$a2"),
+ A3 = (7, "$a3"),
+ A4 = (8, "$a4"),
+ A5 = (9, "$a5"),
+ A6 = (10, "$a6"),
+ A7 = (11, "$a7"),
+ T0 = (12, "$t0"),
+ T1 = (13, "$t1"),
+ T2 = (14, "$t2"),
+ T3 = (15, "$t3"),
+ T4 = (16, "$t4"),
+ T5 = (17, "$t5"),
+ T6 = (18, "$t6"),
+ T7 = (19, "$t7"),
+ T8 = (20, "$t8"),
+ FP = (22, "$fp"),
+ S0 = (23, "$s0"),
+ S1 = (24, "$s1"),
+ S2 = (25, "$s2"),
+ S3 = (26, "$s3"),
+ S4 = (27, "$s4"),
+ S5 = (28, "$s5"),
+ S6 = (29, "$s6"),
+ S7 = (30, "$s7"),
+ S8 = (31, "$s8"),
+
+ FA0 = (32, "$fa0"),
+ FA1 = (33, "$fa1"),
+ FA2 = (34, "$fa2"),
+ FA3 = (35, "$fa3"),
+ FA4 = (36, "$fa4"),
+ FA5 = (37, "$fa5"),
+ FA6 = (38, "$fa6"),
+ FA7 = (39, "$fa7"),
+ FT0 = (40, "$ft0"),
+ FT1 = (41, "$ft1"),
+ FT2 = (42, "$ft2"),
+ FT3 = (43, "$ft3"),
+ FT4 = (44, "$ft4"),
+ FT5 = (45, "$ft5"),
+ FT6 = (46, "$ft6"),
+ FT7 = (47, "$ft7"),
+ FT8 = (48, "$ft8"),
+ FT9 = (49, "$ft9"),
+ FT10 = (50, "$ft10"),
+ FT11 = (51, "$ft11"),
+ FT12 = (52, "$ft12"),
+ FT13 = (53, "$ft13"),
+ FT14 = (54, "$ft14"),
+ FT15 = (55, "$ft15"),
+ FS0 = (56, "$fs0"),
+ FS1 = (57, "$fs1"),
+ FS2 = (58, "$fs2"),
+ FS3 = (59, "$fs3"),
+ FS4 = (60, "$fs4"),
+ FS5 = (61, "$fs5"),
+ FS6 = (62, "$fs6"),
+ FS7 = (63, "$fs7"),
+});
+
/// RISC-V architecture specific definitions.
///
/// See [RISC-V ELF psABI specification](https://github.com/riscv/riscv-elf-psabi-doc).
diff --git a/vendor/gimli/src/lib.rs b/vendor/gimli/src/lib.rs
index ed1af9cbd..db30375aa 100644
--- a/vendor/gimli/src/lib.rs
+++ b/vendor/gimli/src/lib.rs
@@ -26,14 +26,17 @@
#![warn(ellipsis_inclusive_range_patterns)]
//#![warn(elided_lifetimes_in_paths)]
#![warn(explicit_outlives_requirements)]
-// Allow clippy warnings when we aren't building with clippy.
-#![allow(unknown_lints)]
+// Style.
+#![allow(clippy::bool_to_int_with_if)]
+#![allow(clippy::collapsible_else_if)]
+#![allow(clippy::comparison_chain)]
+#![allow(clippy::manual_range_contains)]
+#![allow(clippy::needless_late_init)]
+#![allow(clippy::too_many_arguments)]
// False positives with `fallible_iterator`.
#![allow(clippy::should_implement_trait)]
-// Many false positives involving `continue`.
-#![allow(clippy::never_loop)]
-// False positives when block expressions are used inside an assertion.
-#![allow(clippy::panic_params)]
+// False positives.
+#![allow(clippy::derive_partial_eq_without_eq)]
#![no_std]
#[allow(unused_imports)]
diff --git a/vendor/gimli/src/read/abbrev.rs b/vendor/gimli/src/read/abbrev.rs
index 1a24835a7..54f5cf8e5 100644
--- a/vendor/gimli/src/read/abbrev.rs
+++ b/vendor/gimli/src/read/abbrev.rs
@@ -1,6 +1,7 @@
//! Functions for parsing DWARF debugging abbreviations.
use alloc::collections::btree_map;
+use alloc::sync::Arc;
use alloc::vec::Vec;
use core::convert::TryFrom;
use core::fmt::{self, Debug};
@@ -10,7 +11,8 @@ use core::ops::Deref;
use crate::common::{DebugAbbrevOffset, Encoding, SectionId};
use crate::constants;
use crate::endianity::Endianity;
-use crate::read::{EndianSlice, Error, Reader, Result, Section, UnitHeader};
+use crate::read::lazy::LazyArc;
+use crate::read::{EndianSlice, Error, Reader, ReaderOffset, Result, Section, UnitHeader};
/// The `DebugAbbrev` struct represents the abbreviations describing
/// `DebuggingInformationEntry`s' attribute names and forms found in the
@@ -100,6 +102,38 @@ impl<R> From<R> for DebugAbbrev<R> {
}
}
+/// A cache of previously parsed `Abbreviations`.
+///
+/// Currently this only caches the abbreviations for offset 0,
+/// since this is a common case in which abbreviations are reused.
+/// This strategy may change in future if there is sufficient need.
+#[derive(Debug, Default)]
+pub struct AbbreviationsCache {
+ abbreviations: LazyArc<Abbreviations>,
+}
+
+impl AbbreviationsCache {
+ /// Create an empty abbreviations cache.
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Parse the abbreviations at the given offset.
+ ///
+ /// This uses or updates the cache as required.
+ pub fn get<R: Reader>(
+ &self,
+ debug_abbrev: &DebugAbbrev<R>,
+ offset: DebugAbbrevOffset<R::Offset>,
+ ) -> Result<Arc<Abbreviations>> {
+ if offset.0 != R::Offset::from_u8(0) {
+ return debug_abbrev.abbreviations(offset).map(Arc::new);
+ }
+ self.abbreviations
+ .get(|| debug_abbrev.abbreviations(offset))
+ }
+}
+
/// A set of type abbreviations.
///
/// Construct an `Abbreviations` instance with the
@@ -310,7 +344,7 @@ impl Attributes {
/// Pushes a new value onto this list of attributes.
fn push(&mut self, attr: AttributeSpecification) {
match self {
- Attributes::Heap(list) => return list.push(attr),
+ Attributes::Heap(list) => list.push(attr),
Attributes::Inline {
buf,
len: MAX_ATTRIBUTES_INLINE,
@@ -329,13 +363,13 @@ impl Attributes {
impl Debug for Attributes {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- (&**self).fmt(f)
+ (**self).fmt(f)
}
}
impl PartialEq for Attributes {
fn eq(&self, other: &Attributes) -> bool {
- &**self == &**other
+ **self == **other
}
}
@@ -360,7 +394,7 @@ impl FromIterator<AttributeSpecification> for Attributes {
for item in iter {
list.push(item);
}
- return list;
+ list
}
}
@@ -470,8 +504,7 @@ pub(crate) fn get_attribute_size(form: constants::DwForm, encoding: Encoding) ->
match form {
constants::DW_FORM_addr => Some(encoding.address_size),
- constants::DW_FORM_implicit_const |
- constants::DW_FORM_flag_present => Some(0),
+ constants::DW_FORM_implicit_const | constants::DW_FORM_flag_present => Some(0),
constants::DW_FORM_data1
| constants::DW_FORM_flag
@@ -497,7 +530,7 @@ pub(crate) fn get_attribute_size(form: constants::DwForm, encoding: Encoding) ->
| constants::DW_FORM_ref_sig8
| constants::DW_FORM_ref_sup8 => Some(8),
- constants::DW_FORM_data16 => Some(16),
+ constants::DW_FORM_data16 => Some(16),
constants::DW_FORM_sec_offset
| constants::DW_FORM_GNU_ref_alt
@@ -518,16 +551,16 @@ pub(crate) fn get_attribute_size(form: constants::DwForm, encoding: Encoding) ->
}
// Variably sized forms.
- constants::DW_FORM_block |
- constants::DW_FORM_block1 |
- constants::DW_FORM_block2 |
- constants::DW_FORM_block4 |
- constants::DW_FORM_exprloc |
- constants::DW_FORM_ref_udata |
- constants::DW_FORM_string |
- constants::DW_FORM_sdata |
- constants::DW_FORM_udata |
- constants::DW_FORM_indirect |
+ constants::DW_FORM_block
+ | constants::DW_FORM_block1
+ | constants::DW_FORM_block2
+ | constants::DW_FORM_block4
+ | constants::DW_FORM_exprloc
+ | constants::DW_FORM_ref_udata
+ | constants::DW_FORM_string
+ | constants::DW_FORM_sdata
+ | constants::DW_FORM_udata
+ | constants::DW_FORM_indirect => None,
// We don't know the size of unknown forms.
_ => None,
@@ -993,4 +1026,64 @@ pub mod tests {
.unwrap();
assert!(abbrevs.get(0).is_none());
}
+
+ #[test]
+ fn abbreviations_cache() {
+ #[rustfmt::skip]
+ let buf = Section::new()
+ .abbrev(1, constants::DW_TAG_subprogram, constants::DW_CHILDREN_no)
+ .abbrev_attr(constants::DW_AT_name, constants::DW_FORM_string)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .abbrev(1, constants::DW_TAG_compile_unit, constants::DW_CHILDREN_yes)
+ .abbrev_attr(constants::DW_AT_producer, constants::DW_FORM_strp)
+ .abbrev_attr(constants::DW_AT_language, constants::DW_FORM_data2)
+ .abbrev_attr_null()
+ .abbrev_null()
+ .get_contents()
+ .unwrap();
+
+ let abbrev1 = Abbreviation::new(
+ 1,
+ constants::DW_TAG_subprogram,
+ constants::DW_CHILDREN_no,
+ vec![AttributeSpecification::new(
+ constants::DW_AT_name,
+ constants::DW_FORM_string,
+ None,
+ )]
+ .into(),
+ );
+
+ let abbrev2 = Abbreviation::new(
+ 1,
+ constants::DW_TAG_compile_unit,
+ constants::DW_CHILDREN_yes,
+ vec![
+ AttributeSpecification::new(
+ constants::DW_AT_producer,
+ constants::DW_FORM_strp,
+ None,
+ ),
+ AttributeSpecification::new(
+ constants::DW_AT_language,
+ constants::DW_FORM_data2,
+ None,
+ ),
+ ]
+ .into(),
+ );
+
+ let debug_abbrev = DebugAbbrev::new(&buf, LittleEndian);
+ let cache = AbbreviationsCache::new();
+ let abbrevs1 = cache.get(&debug_abbrev, DebugAbbrevOffset(0)).unwrap();
+ assert_eq!(abbrevs1.get(1), Some(&abbrev1));
+ let abbrevs2 = cache.get(&debug_abbrev, DebugAbbrevOffset(8)).unwrap();
+ assert_eq!(abbrevs2.get(1), Some(&abbrev2));
+ let abbrevs3 = cache.get(&debug_abbrev, DebugAbbrevOffset(0)).unwrap();
+ assert_eq!(abbrevs3.get(1), Some(&abbrev1));
+
+ assert!(!Arc::ptr_eq(&abbrevs1, &abbrevs2));
+ assert!(Arc::ptr_eq(&abbrevs1, &abbrevs3));
+ }
}
diff --git a/vendor/gimli/src/read/cfi.rs b/vendor/gimli/src/read/cfi.rs
index 2e5167349..5e9befac1 100644
--- a/vendor/gimli/src/read/cfi.rs
+++ b/vendor/gimli/src/read/cfi.rs
@@ -1058,7 +1058,6 @@ where
Fde(PartialFrameDescriptionEntry<'bases, Section, R>),
}
-#[allow(clippy::type_complexity)]
fn parse_cfi_entry<'bases, Section, R>(
bases: &'bases BaseAddresses,
section: &Section,
@@ -1617,7 +1616,6 @@ where
}
impl<R: Reader> FrameDescriptionEntry<R> {
- #[allow(clippy::too_many_arguments)]
fn parse_rest<Section, F>(
offset: R::Offset,
length: R::Offset,
@@ -1982,7 +1980,7 @@ impl<R: Reader, A: UnwindContextStorage<R>> UnwindContext<R, A> {
}
let mut table = UnwindTable::new_for_cie(section, bases, self, cie);
- while let Some(_) = table.next_row()? {}
+ while table.next_row()?.is_some() {}
self.save_initial_rules()?;
Ok(())
@@ -2005,7 +2003,7 @@ impl<R: Reader, A: UnwindContextStorage<R>> UnwindContext<R, A> {
}
fn save_initial_rules(&mut self) -> Result<()> {
- assert_eq!(self.is_initialized, false);
+ debug_assert!(!self.is_initialized);
self.initial_rule = match *self.stack.last().unwrap().registers.rules {
// All rules are default (undefined). In this case just synthesize
// an undefined rule.
@@ -2821,10 +2819,7 @@ pub enum RegisterRule<R: Reader> {
impl<R: Reader> RegisterRule<R> {
fn is_defined(&self) -> bool {
- match *self {
- RegisterRule::Undefined => false,
- _ => true,
- }
+ !matches!(*self, RegisterRule::Undefined)
}
}
@@ -3394,10 +3389,10 @@ impl Default for Pointer {
}
}
-impl Into<u64> for Pointer {
+impl From<Pointer> for u64 {
#[inline]
- fn into(self) -> u64 {
- match self {
+ fn from(p: Pointer) -> u64 {
+ match p {
Pointer::Direct(p) | Pointer::Indirect(p) => p,
}
}
@@ -3762,8 +3757,6 @@ mod tests {
}
}
- #[allow(clippy::type_complexity)]
- #[allow(clippy::needless_pass_by_value)]
fn assert_parse_cie<'input, E>(
kind: SectionKind<DebugFrame<EndianSlice<'input, E>>>,
section: Section,
@@ -5118,7 +5111,6 @@ mod tests {
assert_eq!(iter.next(), Ok(None));
}
- #[allow(clippy::needless_pass_by_value)]
fn assert_eval<'a, I>(
mut initial_ctx: UnwindContext<EndianSlice<'a, LittleEndian>>,
expected_ctx: UnwindContext<EndianSlice<'a, LittleEndian>>,
@@ -5598,7 +5590,6 @@ mod tests {
#[test]
fn test_unwind_table_cie_no_rule() {
- #[allow(clippy::identity_op)]
let initial_instructions = Section::with_endian(Endian::Little)
// The CFA is -12 from register 4.
.D8(constants::DW_CFA_def_cfa_sf.0)
@@ -5671,7 +5662,6 @@ mod tests {
#[test]
fn test_unwind_table_cie_single_rule() {
- #[allow(clippy::identity_op)]
let initial_instructions = Section::with_endian(Endian::Little)
// The CFA is -12 from register 4.
.D8(constants::DW_CFA_def_cfa_sf.0)
@@ -5747,7 +5737,6 @@ mod tests {
#[test]
fn test_unwind_table_next_row() {
- #[allow(clippy::identity_op)]
let initial_instructions = Section::with_endian(Endian::Little)
// The CFA is -12 from register 4.
.D8(constants::DW_CFA_def_cfa_sf.0)
diff --git a/vendor/gimli/src/read/dwarf.rs b/vendor/gimli/src/read/dwarf.rs
index b63526941..cce364c2b 100644
--- a/vendor/gimli/src/read/dwarf.rs
+++ b/vendor/gimli/src/read/dwarf.rs
@@ -9,11 +9,11 @@ use crate::common::{
};
use crate::constants;
use crate::read::{
- Abbreviations, AttributeValue, DebugAbbrev, DebugAddr, DebugAranges, DebugCuIndex, DebugInfo,
- DebugInfoUnitHeadersIter, DebugLine, DebugLineStr, DebugLoc, DebugLocLists, DebugRngLists,
- DebugStr, DebugStrOffsets, DebugTuIndex, DebugTypes, DebugTypesUnitHeadersIter,
- DebuggingInformationEntry, EntriesCursor, EntriesRaw, EntriesTree, Error,
- IncompleteLineProgram, LocListIter, LocationLists, Range, RangeLists, RawLocListIter,
+ Abbreviations, AbbreviationsCache, AttributeValue, DebugAbbrev, DebugAddr, DebugAranges,
+ DebugCuIndex, DebugInfo, DebugInfoUnitHeadersIter, DebugLine, DebugLineStr, DebugLoc,
+ DebugLocLists, DebugRngLists, DebugStr, DebugStrOffsets, DebugTuIndex, DebugTypes,
+ DebugTypesUnitHeadersIter, DebuggingInformationEntry, EntriesCursor, EntriesRaw, EntriesTree,
+ Error, IncompleteLineProgram, LocListIter, LocationLists, Range, RangeLists, RawLocListIter,
RawRngListIter, Reader, ReaderOffset, ReaderOffsetId, Result, RngListIter, Section, UnitHeader,
UnitIndex, UnitIndexSectionIterator, UnitOffset, UnitType,
};
@@ -59,6 +59,9 @@ pub struct Dwarf<R> {
/// The DWARF sections for a supplementary object file.
pub sup: Option<Arc<Dwarf<R>>>,
+
+ /// A cache of previously parsed abbreviations for units in this file.
+ pub abbreviations_cache: AbbreviationsCache,
}
impl<T> Dwarf<T> {
@@ -96,6 +99,7 @@ impl<T> Dwarf<T> {
ranges: RangeLists::new(debug_ranges, debug_rnglists),
file_type: DwarfFileType::Main,
sup: None,
+ abbreviations_cache: AbbreviationsCache::new(),
})
}
@@ -157,6 +161,7 @@ impl<T> Dwarf<T> {
ranges: self.ranges.borrow(&mut borrow),
file_type: self.file_type,
sup: self.sup().map(|sup| Arc::new(sup.borrow(borrow))),
+ abbreviations_cache: AbbreviationsCache::new(),
}
}
@@ -192,10 +197,10 @@ impl<R: Reader> Dwarf<R> {
}
/// Parse the abbreviations for a compilation unit.
- // TODO: provide caching of abbreviations
#[inline]
- pub fn abbreviations(&self, unit: &UnitHeader<R>) -> Result<Abbreviations> {
- unit.abbreviations(&self.debug_abbrev)
+ pub fn abbreviations(&self, unit: &UnitHeader<R>) -> Result<Arc<Abbreviations>> {
+ self.abbreviations_cache
+ .get(&self.debug_abbrev, unit.debug_abbrev_offset())
}
/// Return the string offset at the given index.
@@ -783,6 +788,7 @@ impl<R: Reader> DwarfPackage<R> {
ranges: RangeLists::new(debug_ranges, debug_rnglists),
file_type: DwarfFileType::Dwo,
sup: None,
+ abbreviations_cache: AbbreviationsCache::new(),
})
}
}
@@ -799,7 +805,7 @@ where
pub header: UnitHeader<R, Offset>,
/// The parsed abbreviations for the unit.
- pub abbreviations: Abbreviations,
+ pub abbreviations: Arc<Abbreviations>,
/// The `DW_AT_name` attribute of the unit.
pub name: Option<R>,
@@ -833,7 +839,7 @@ impl<R: Reader> Unit<R> {
/// Construct a new `Unit` from the given unit header.
#[inline]
pub fn new(dwarf: &Dwarf<R>, header: UnitHeader<R>) -> Result<Self> {
- let abbreviations = header.abbreviations(&dwarf.debug_abbrev)?;
+ let abbreviations = dwarf.abbreviations(&header)?;
let mut unit = Unit {
abbreviations,
name: None,
diff --git a/vendor/gimli/src/read/endian_slice.rs b/vendor/gimli/src/read/endian_slice.rs
index 05262cdec..d0fd67c0b 100644
--- a/vendor/gimli/src/read/endian_slice.rs
+++ b/vendor/gimli/src/read/endian_slice.rs
@@ -197,12 +197,12 @@ where
}
}
-impl<'input, Endian> Into<&'input [u8]> for EndianSlice<'input, Endian>
+impl<'input, Endian> From<EndianSlice<'input, Endian>> for &'input [u8]
where
Endian: Endianity,
{
- fn into(self) -> &'input [u8] {
- self.slice
+ fn from(endian_slice: EndianSlice<'input, Endian>) -> &'input [u8] {
+ endian_slice.slice
}
}
diff --git a/vendor/gimli/src/read/lazy.rs b/vendor/gimli/src/read/lazy.rs
new file mode 100644
index 000000000..6138735c8
--- /dev/null
+++ b/vendor/gimli/src/read/lazy.rs
@@ -0,0 +1,116 @@
+pub(crate) use imp::*;
+
+#[cfg(not(feature = "std"))]
+mod imp {
+ use alloc::sync::Arc;
+ use core::sync::atomic::{AtomicPtr, Ordering};
+ use core::{mem, ptr};
+
+ #[derive(Debug, Default)]
+ pub(crate) struct LazyArc<T> {
+ // Only written once with a value obtained from `Arc<T>::into_raw`.
+ // This holds a ref count for the `Arc`, so it is always safe to
+ // clone the `Arc` given a reference to the `LazyArc`.
+ value: AtomicPtr<T>,
+ }
+
+ impl<T> Drop for LazyArc<T> {
+ fn drop(&mut self) {
+ let value_ptr = self.value.load(Ordering::Acquire);
+ if !value_ptr.is_null() {
+ // SAFETY: all writes to `self.value` are pointers obtained from `Arc::into_raw`.
+ drop(unsafe { Arc::from_raw(value_ptr) });
+ }
+ }
+ }
+
+ impl<T> LazyArc<T> {
+ pub(crate) fn get<E, F: FnOnce() -> Result<T, E>>(&self, f: F) -> Result<Arc<T>, E> {
+ // Clone an `Arc` given a pointer obtained from `Arc::into_raw`.
+ // SAFETY: `value_ptr` must be a valid pointer obtained from `Arc<T>::into_raw`.
+ unsafe fn clone_arc_ptr<T>(value_ptr: *const T) -> Arc<T> {
+ let value = Arc::from_raw(value_ptr);
+ let clone = Arc::clone(&value);
+ mem::forget(value);
+ clone
+ }
+
+ // Return the existing value if already computed.
+ // `Ordering::Acquire` is needed so that the content of the loaded `Arc` is
+ // visible to this thread.
+ let value_ptr = self.value.load(Ordering::Acquire);
+ if !value_ptr.is_null() {
+ // SAFETY: all writes to `self.value` are pointers obtained from `Arc::into_raw`.
+ return Ok(unsafe { clone_arc_ptr(value_ptr) });
+ }
+
+ // Race to compute and set the value.
+ let value = f().map(Arc::new)?;
+ let value_ptr = Arc::into_raw(value);
+ match self.value.compare_exchange(
+ ptr::null_mut(),
+ value_ptr as *mut T,
+ // Success: `Ordering::Release` is needed so that the content of the stored `Arc`
+ // is visible to other threads. No ordering is required for the null ptr that is
+ // loaded, but older rust versions (< 1.64) require that its ordering must not
+ // be weaker than the failure ordering, so we use `Ordering::AcqRel`.
+ Ordering::AcqRel,
+ // Failure: `Ordering::Acquire` is needed so that the content of the loaded `Arc`
+ // is visible to this thread.
+ Ordering::Acquire,
+ ) {
+ Ok(_) => {
+ // Return the value we computed.
+ // SAFETY: `value_ptr` was obtained from `Arc::into_raw`.
+ Ok(unsafe { clone_arc_ptr(value_ptr) })
+ }
+ Err(existing_value_ptr) => {
+ // We lost the race, drop unneeded `value_ptr`.
+ // SAFETY: `value_ptr` was obtained from `Arc::into_raw`.
+ drop(unsafe { Arc::from_raw(value_ptr) });
+ // Return the existing value.
+ // SAFETY: all writes to `self.value` are pointers obtained from `Arc::into_raw`.
+ Ok(unsafe { clone_arc_ptr(existing_value_ptr) })
+ }
+ }
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+mod imp {
+ use std::sync::{Arc, Mutex};
+
+ #[derive(Debug, Default)]
+ pub(crate) struct LazyArc<T> {
+ value: Mutex<Option<Arc<T>>>,
+ }
+
+ impl<T> LazyArc<T> {
+ pub(crate) fn get<E, F: FnOnce() -> Result<T, E>>(&self, f: F) -> Result<Arc<T>, E> {
+ let mut lock = self.value.lock().unwrap();
+ if let Some(value) = &*lock {
+ return Ok(value.clone());
+ }
+ let value = f().map(Arc::new)?;
+ *lock = Some(value.clone());
+ Ok(value)
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn lazy_arc() {
+ let lazy = LazyArc::default();
+ let value = lazy.get(|| Err(()));
+ assert_eq!(value, Err(()));
+ let value = lazy.get(|| Ok::<i32, ()>(3)).unwrap();
+ assert_eq!(*value, 3);
+ let value = lazy.get(|| Err(())).unwrap();
+ assert_eq!(*value, 3);
+ }
+}
diff --git a/vendor/gimli/src/read/line.rs b/vendor/gimli/src/read/line.rs
index 0e7380bb9..f7f44b2b5 100644
--- a/vendor/gimli/src/read/line.rs
+++ b/vendor/gimli/src/read/line.rs
@@ -198,7 +198,6 @@ where
R: Reader<Offset = Offset>,
Offset: ReaderOffset,
{
- #[allow(clippy::new_ret_no_self)]
fn new(program: IncompleteLineProgram<R, Offset>) -> OneShotLineRows<R, Offset> {
let row = LineRow::new(program.header());
let instructions = LineInstructions {
@@ -606,7 +605,6 @@ impl<R: Reader> LineInstructions<R> {
///
/// Unfortunately, the `header` parameter means that this cannot be a
/// `FallibleIterator`.
- #[allow(clippy::inline_always)]
#[inline(always)]
pub fn next_instruction(
&mut self,
diff --git a/vendor/gimli/src/read/loclists.rs b/vendor/gimli/src/read/loclists.rs
index 3902c181b..5cba675d2 100644
--- a/vendor/gimli/src/read/loclists.rs
+++ b/vendor/gimli/src/read/loclists.rs
@@ -233,7 +233,7 @@ impl<R: Reader> LocationLists<R> {
let (mut input, format) = if unit_encoding.version <= 4 {
(self.debug_loc.section.clone(), LocListsFormat::Bare)
} else {
- (self.debug_loclists.section.clone(), LocListsFormat::LLE)
+ (self.debug_loclists.section.clone(), LocListsFormat::Lle)
};
input.skip(offset.0)?;
Ok(RawLocListIter::new(input, unit_encoding, format))
@@ -259,7 +259,7 @@ impl<R: Reader> LocationLists<R> {
Ok(RawLocListIter::new(
input,
unit_encoding,
- LocListsFormat::LLE,
+ LocListsFormat::Lle,
))
}
@@ -300,7 +300,7 @@ enum LocListsFormat {
Bare,
/// The DW_LLE encoded range list format used in DWARF 5 and the non-standard GNU
/// split dwarf extension.
- LLE,
+ Lle,
}
/// A raw iterator over a location list.
@@ -402,10 +402,10 @@ fn parse_data<R: Reader>(input: &mut R, encoding: Encoding) -> Result<Expression
impl<R: Reader> RawLocListEntry<R> {
/// Parse a location list entry from `.debug_loclists`
fn parse(input: &mut R, encoding: Encoding, format: LocListsFormat) -> Result<Option<Self>> {
- match format {
+ Ok(match format {
LocListsFormat::Bare => {
let range = RawRange::parse(input, encoding.address_size)?;
- return Ok(if range.is_end() {
+ if range.is_end() {
None
} else if range.is_base_address(encoding.address_size) {
Some(RawLocListEntry::BaseAddress { addr: range.end })
@@ -417,9 +417,9 @@ impl<R: Reader> RawLocListEntry<R> {
end: range.end,
data,
})
- });
+ }
}
- LocListsFormat::LLE => Ok(match constants::DwLle(input.read_u8()?) {
+ LocListsFormat::Lle => match constants::DwLle(input.read_u8()?) {
constants::DW_LLE_end_of_list => None,
constants::DW_LLE_base_addressx => Some(RawLocListEntry::BaseAddressx {
addr: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
@@ -463,8 +463,8 @@ impl<R: Reader> RawLocListEntry<R> {
_ => {
return Err(Error::InvalidAddressRange);
}
- }),
- }
+ },
+ })
}
}
@@ -552,63 +552,96 @@ impl<R: Reader> LocListIter<R> {
None => return Ok(None),
};
- let (range, data) = match raw_loc {
- RawLocListEntry::BaseAddress { addr } => {
- self.base_address = addr;
- continue;
- }
- RawLocListEntry::BaseAddressx { addr } => {
- self.base_address = self.get_address(addr)?;
- continue;
- }
- RawLocListEntry::StartxEndx { begin, end, data } => {
- let begin = self.get_address(begin)?;
- let end = self.get_address(end)?;
- (Range { begin, end }, data)
- }
- RawLocListEntry::StartxLength {
- begin,
- length,
- data,
- } => {
- let begin = self.get_address(begin)?;
- let end = begin + length;
- (Range { begin, end }, data)
- }
- RawLocListEntry::DefaultLocation { data } => (
- Range {
- begin: 0,
- end: u64::max_value(),
- },
- data,
- ),
- RawLocListEntry::AddressOrOffsetPair { begin, end, data }
- | RawLocListEntry::OffsetPair { begin, end, data } => {
- let mut range = Range { begin, end };
- range.add_base_address(self.base_address, self.raw.encoding.address_size);
- (range, data)
- }
- RawLocListEntry::StartEnd { begin, end, data } => (Range { begin, end }, data),
- RawLocListEntry::StartLength {
- begin,
- length,
- data,
- } => (
- Range {
- begin,
- end: begin + length,
- },
- data,
- ),
- };
+ let loc = self.convert_raw(raw_loc)?;
+ if loc.is_some() {
+ return Ok(loc);
+ }
+ }
+ }
- if range.begin > range.end {
- self.raw.input.empty();
- return Err(Error::InvalidLocationAddressRange);
+ /// Return the next raw location.
+ ///
+ /// The raw location should be passed to `convert_raw`.
+ #[doc(hidden)]
+ pub fn next_raw(&mut self) -> Result<Option<RawLocListEntry<R>>> {
+ self.raw.next()
+ }
+
+ /// Convert a raw location into a location, and update the state of the iterator.
+ ///
+ /// The raw location should have been obtained from `next_raw`.
+ #[doc(hidden)]
+ pub fn convert_raw(
+ &mut self,
+ raw_loc: RawLocListEntry<R>,
+ ) -> Result<Option<LocationListEntry<R>>> {
+ let mask = !0 >> (64 - self.raw.encoding.address_size * 8);
+ let tombstone = if self.raw.encoding.version <= 4 {
+ mask - 1
+ } else {
+ mask
+ };
+
+ let (range, data) = match raw_loc {
+ RawLocListEntry::BaseAddress { addr } => {
+ self.base_address = addr;
+ return Ok(None);
+ }
+ RawLocListEntry::BaseAddressx { addr } => {
+ self.base_address = self.get_address(addr)?;
+ return Ok(None);
+ }
+ RawLocListEntry::StartxEndx { begin, end, data } => {
+ let begin = self.get_address(begin)?;
+ let end = self.get_address(end)?;
+ (Range { begin, end }, data)
+ }
+ RawLocListEntry::StartxLength {
+ begin,
+ length,
+ data,
+ } => {
+ let begin = self.get_address(begin)?;
+ let end = begin.wrapping_add(length) & mask;
+ (Range { begin, end }, data)
+ }
+ RawLocListEntry::DefaultLocation { data } => (
+ Range {
+ begin: 0,
+ end: u64::max_value(),
+ },
+ data,
+ ),
+ RawLocListEntry::AddressOrOffsetPair { begin, end, data }
+ | RawLocListEntry::OffsetPair { begin, end, data } => {
+ if self.base_address == tombstone {
+ return Ok(None);
+ }
+ let mut range = Range { begin, end };
+ range.add_base_address(self.base_address, self.raw.encoding.address_size);
+ (range, data)
+ }
+ RawLocListEntry::StartEnd { begin, end, data } => (Range { begin, end }, data),
+ RawLocListEntry::StartLength {
+ begin,
+ length,
+ data,
+ } => {
+ let end = begin.wrapping_add(length) & mask;
+ (Range { begin, end }, data)
}
+ };
- return Ok(Some(LocationListEntry { range, data }));
+ if range.begin == tombstone {
+ return Ok(None);
}
+
+ if range.begin > range.end {
+ self.raw.input.empty();
+ return Err(Error::InvalidLocationAddressRange);
+ }
+
+ Ok(Some(LocationListEntry { range, data }))
}
}
@@ -643,6 +676,7 @@ mod tests {
#[test]
fn test_loclists_32() {
+ let tombstone = !0u32;
let encoding = Encoding {
format: Format::Dwarf32,
version: 5,
@@ -653,7 +687,9 @@ mod tests {
.L32(0x0300_0000)
.L32(0x0301_0300)
.L32(0x0301_0400)
- .L32(0x0301_0500);
+ .L32(0x0301_0500)
+ .L32(tombstone)
+ .L32(0x0301_0600);
let buf = section.get_contents().unwrap();
let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
let debug_addr_base = DebugAddrBase(0);
@@ -697,6 +733,25 @@ mod tests {
.L8(2).uleb(1).uleb(2).uleb(4).L32(12)
// A StartxLength
.L8(3).uleb(3).uleb(0x100).uleb(4).L32(13)
+
+ // Tombstone entries, all of which should be ignored.
+ // A BaseAddressx that is a tombstone.
+ .L8(1).uleb(4)
+ .L8(4).uleb(0x11100).uleb(0x11200).uleb(4).L32(20)
+ // A BaseAddress that is a tombstone.
+ .L8(6).L32(tombstone)
+ .L8(4).uleb(0x11300).uleb(0x11400).uleb(4).L32(21)
+ // A StartxEndx that is a tombstone.
+ .L8(2).uleb(4).uleb(5).uleb(4).L32(22)
+ // A StartxLength that is a tombstone.
+ .L8(3).uleb(4).uleb(0x100).uleb(4).L32(23)
+ // A StartEnd that is a tombstone.
+ .L8(7).L32(tombstone).L32(0x201_1500).uleb(4).L32(24)
+ // A StartLength that is a tombstone.
+ .L8(8).L32(tombstone).uleb(0x100).uleb(4).L32(25)
+ // A StartEnd (not ignored)
+ .L8(7).L32(0x201_1600).L32(0x201_1700).uleb(4).L32(26)
+
// A range end.
.L8(0)
// Some extra data.
@@ -854,6 +909,18 @@ mod tests {
}))
);
+ // A StartEnd location following the tombstones
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_1600,
+ end: 0x0201_1700,
+ },
+ data: Expression(EndianSlice::new(&[26, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
// A location list end.
assert_eq!(locations.next(), Ok(None));
@@ -872,6 +939,7 @@ mod tests {
#[test]
fn test_loclists_64() {
+ let tombstone = !0u64;
let encoding = Encoding {
format: Format::Dwarf64,
version: 5,
@@ -882,7 +950,9 @@ mod tests {
.L64(0x0300_0000)
.L64(0x0301_0300)
.L64(0x0301_0400)
- .L64(0x0301_0500);
+ .L64(0x0301_0500)
+ .L64(tombstone)
+ .L64(0x0301_0600);
let buf = section.get_contents().unwrap();
let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
let debug_addr_base = DebugAddrBase(0);
@@ -927,6 +997,25 @@ mod tests {
.L8(2).uleb(1).uleb(2).uleb(4).L32(12)
// A StartxLength
.L8(3).uleb(3).uleb(0x100).uleb(4).L32(13)
+
+ // Tombstone entries, all of which should be ignored.
+ // A BaseAddressx that is a tombstone.
+ .L8(1).uleb(4)
+ .L8(4).uleb(0x11100).uleb(0x11200).uleb(4).L32(20)
+ // A BaseAddress that is a tombstone.
+ .L8(6).L64(tombstone)
+ .L8(4).uleb(0x11300).uleb(0x11400).uleb(4).L32(21)
+ // A StartxEndx that is a tombstone.
+ .L8(2).uleb(4).uleb(5).uleb(4).L32(22)
+ // A StartxLength that is a tombstone.
+ .L8(3).uleb(4).uleb(0x100).uleb(4).L32(23)
+ // A StartEnd that is a tombstone.
+ .L8(7).L64(tombstone).L64(0x201_1500).uleb(4).L32(24)
+ // A StartLength that is a tombstone.
+ .L8(8).L64(tombstone).uleb(0x100).uleb(4).L32(25)
+ // A StartEnd (not ignored)
+ .L8(7).L64(0x201_1600).L64(0x201_1700).uleb(4).L32(26)
+
// A range end.
.L8(0)
// Some extra data.
@@ -1084,6 +1173,18 @@ mod tests {
}))
);
+ // A StartEnd location following the tombstones
+ assert_eq!(
+ locations.next(),
+ Ok(Some(LocationListEntry {
+ range: Range {
+ begin: 0x0201_1600,
+ end: 0x0201_1700,
+ },
+ data: Expression(EndianSlice::new(&[26, 0, 0, 0], LittleEndian)),
+ }))
+ );
+
// A location list end.
assert_eq!(locations.next(), Ok(None));
@@ -1102,6 +1203,7 @@ mod tests {
#[test]
fn test_location_list_32() {
+ let tombstone = !0u32 - 1;
let start = Label::new();
let first = Label::new();
#[rustfmt::skip]
@@ -1123,6 +1225,11 @@ mod tests {
// A location range that ends at -1.
.L32(0xffff_ffff).L32(0x0000_0000)
.L32(0).L32(0xffff_ffff).L16(4).L32(7)
+ // A normal location with tombstone.
+ .L32(tombstone).L32(tombstone).L16(4).L32(8)
+ // A base address selection with tombstone followed by a normal location.
+ .L32(0xffff_ffff).L32(tombstone)
+ .L32(0x10a00).L32(0x10b00).L16(4).L32(9)
// A location list end.
.L32(0).L32(0)
// Some extra data.
@@ -1232,6 +1339,7 @@ mod tests {
#[test]
fn test_location_list_64() {
+ let tombstone = !0u64 - 1;
let start = Label::new();
let first = Label::new();
#[rustfmt::skip]
@@ -1253,6 +1361,11 @@ mod tests {
// A location range that ends at -1.
.L64(0xffff_ffff_ffff_ffff).L64(0x0000_0000)
.L64(0).L64(0xffff_ffff_ffff_ffff).L16(4).L32(7)
+ // A normal location with tombstone.
+ .L64(tombstone).L64(tombstone).L16(4).L32(8)
+ // A base address selection with tombstone followed by a normal location.
+ .L64(0xffff_ffff_ffff_ffff).L64(tombstone)
+ .L64(0x10a00).L64(0x10b00).L16(4).L32(9)
// A location list end.
.L64(0).L64(0)
// Some extra data.
diff --git a/vendor/gimli/src/read/mod.rs b/vendor/gimli/src/read/mod.rs
index 3110957c2..b2828d5f9 100644
--- a/vendor/gimli/src/read/mod.rs
+++ b/vendor/gimli/src/read/mod.rs
@@ -214,6 +214,9 @@ mod index;
pub use self::index::*;
#[cfg(feature = "read")]
+mod lazy;
+
+#[cfg(feature = "read")]
mod line;
#[cfg(feature = "read")]
pub use self::line::*;
diff --git a/vendor/gimli/src/read/op.rs b/vendor/gimli/src/read/op.rs
index 88ea20297..670d1ad21 100644
--- a/vendor/gimli/src/read/op.rs
+++ b/vendor/gimli/src/read/op.rs
@@ -346,10 +346,7 @@ where
{
/// Return true if the piece is empty.
pub fn is_empty(&self) -> bool {
- match *self {
- Location::Empty => true,
- _ => false,
- }
+ matches!(*self, Location::Empty)
}
}
@@ -1225,7 +1222,6 @@ impl<R: Reader, S: EvaluationStorage<R>> Evaluation<R, S> {
self.stack.try_push(value).map_err(|_| Error::StackFull)
}
- #[allow(clippy::cyclomatic_complexity)]
fn evaluate_one_operation(&mut self) -> Result<OperationEvaluationResult<R>> {
let operation = Operation::parse(&mut self.pc, self.encoding)?;
@@ -2889,7 +2885,6 @@ mod tests {
result
}
- #[allow(clippy::too_many_arguments)]
fn check_eval_with_args<F>(
program: &[AssemblerEntry],
expect: Result<&[Piece<EndianSlice<LittleEndian>>]>,
diff --git a/vendor/gimli/src/read/rnglists.rs b/vendor/gimli/src/read/rnglists.rs
index d8d49042f..12e3e04ee 100644
--- a/vendor/gimli/src/read/rnglists.rs
+++ b/vendor/gimli/src/read/rnglists.rs
@@ -232,7 +232,7 @@ impl<R: Reader> RangeLists<R> {
let (mut input, format) = if unit_encoding.version <= 4 {
(self.debug_ranges.section.clone(), RangeListsFormat::Bare)
} else {
- (self.debug_rnglists.section.clone(), RangeListsFormat::RLE)
+ (self.debug_rnglists.section.clone(), RangeListsFormat::Rle)
};
input.skip(offset.0)?;
Ok(RawRngListIter::new(input, unit_encoding, format))
@@ -277,7 +277,7 @@ enum RangeListsFormat {
/// The bare range list format used before DWARF 5.
Bare,
/// The DW_RLE encoded range list format used in DWARF 5.
- RLE,
+ Rle,
}
/// A raw iterator over an address range list.
@@ -355,10 +355,10 @@ impl<T: ReaderOffset> RawRngListEntry<T> {
encoding: Encoding,
format: RangeListsFormat,
) -> Result<Option<Self>> {
- match format {
+ Ok(match format {
RangeListsFormat::Bare => {
let range = RawRange::parse(input, encoding.address_size)?;
- return Ok(if range.is_end() {
+ if range.is_end() {
None
} else if range.is_base_address(encoding.address_size) {
Some(RawRngListEntry::BaseAddress { addr: range.end })
@@ -367,9 +367,9 @@ impl<T: ReaderOffset> RawRngListEntry<T> {
begin: range.begin,
end: range.end,
})
- });
+ }
}
- RangeListsFormat::RLE => Ok(match constants::DwRle(input.read_u8()?) {
+ RangeListsFormat::Rle => match constants::DwRle(input.read_u8()?) {
constants::DW_RLE_end_of_list => None,
constants::DW_RLE_base_addressx => Some(RawRngListEntry::BaseAddressx {
addr: DebugAddrIndex(input.read_uleb128().and_then(R::Offset::from_u64)?),
@@ -400,8 +400,8 @@ impl<T: ReaderOffset> RawRngListEntry<T> {
_ => {
return Err(Error::InvalidAddressRange);
}
- }),
- }
+ },
+ })
}
}
@@ -489,45 +489,78 @@ impl<R: Reader> RngListIter<R> {
None => return Ok(None),
};
- let range = match raw_range {
- RawRngListEntry::BaseAddress { addr } => {
- self.base_address = addr;
- continue;
- }
- RawRngListEntry::BaseAddressx { addr } => {
- self.base_address = self.get_address(addr)?;
- continue;
- }
- RawRngListEntry::StartxEndx { begin, end } => {
- let begin = self.get_address(begin)?;
- let end = self.get_address(end)?;
- Range { begin, end }
- }
- RawRngListEntry::StartxLength { begin, length } => {
- let begin = self.get_address(begin)?;
- let end = begin + length;
- Range { begin, end }
- }
- RawRngListEntry::AddressOrOffsetPair { begin, end }
- | RawRngListEntry::OffsetPair { begin, end } => {
- let mut range = Range { begin, end };
- range.add_base_address(self.base_address, self.raw.encoding.address_size);
- range
- }
- RawRngListEntry::StartEnd { begin, end } => Range { begin, end },
- RawRngListEntry::StartLength { begin, length } => Range {
- begin,
- end: begin + length,
- },
- };
+ let range = self.convert_raw(raw_range)?;
+ if range.is_some() {
+ return Ok(range);
+ }
+ }
+ }
- if range.begin > range.end {
- self.raw.input.empty();
- return Err(Error::InvalidAddressRange);
+ /// Return the next raw range.
+ ///
+ /// The raw range should be passed to `convert_range`.
+ #[doc(hidden)]
+ pub fn next_raw(&mut self) -> Result<Option<RawRngListEntry<R::Offset>>> {
+ self.raw.next()
+ }
+
+ /// Convert a raw range into a range, and update the state of the iterator.
+ ///
+ /// The raw range should have been obtained from `next_raw`.
+ #[doc(hidden)]
+ pub fn convert_raw(&mut self, raw_range: RawRngListEntry<R::Offset>) -> Result<Option<Range>> {
+ let mask = !0 >> (64 - self.raw.encoding.address_size * 8);
+ let tombstone = if self.raw.encoding.version <= 4 {
+ mask - 1
+ } else {
+ mask
+ };
+
+ let range = match raw_range {
+ RawRngListEntry::BaseAddress { addr } => {
+ self.base_address = addr;
+ return Ok(None);
+ }
+ RawRngListEntry::BaseAddressx { addr } => {
+ self.base_address = self.get_address(addr)?;
+ return Ok(None);
+ }
+ RawRngListEntry::StartxEndx { begin, end } => {
+ let begin = self.get_address(begin)?;
+ let end = self.get_address(end)?;
+ Range { begin, end }
+ }
+ RawRngListEntry::StartxLength { begin, length } => {
+ let begin = self.get_address(begin)?;
+ let end = begin.wrapping_add(length) & mask;
+ Range { begin, end }
+ }
+ RawRngListEntry::AddressOrOffsetPair { begin, end }
+ | RawRngListEntry::OffsetPair { begin, end } => {
+ if self.base_address == tombstone {
+ return Ok(None);
+ }
+ let mut range = Range { begin, end };
+ range.add_base_address(self.base_address, self.raw.encoding.address_size);
+ range
}
+ RawRngListEntry::StartEnd { begin, end } => Range { begin, end },
+ RawRngListEntry::StartLength { begin, length } => {
+ let end = begin.wrapping_add(length) & mask;
+ Range { begin, end }
+ }
+ };
+
+ if range.begin == tombstone {
+ return Ok(None);
+ }
- return Ok(Some(range));
+ if range.begin > range.end {
+ self.raw.input.empty();
+ return Err(Error::InvalidAddressRange);
}
+
+ Ok(Some(range))
}
}
@@ -553,8 +586,6 @@ pub(crate) struct RawRange {
impl RawRange {
/// Check if this is a range end entry.
- ///
- /// This will only occur for raw ranges.
#[inline]
pub fn is_end(&self) -> bool {
self.begin == 0 && self.end == 0
@@ -563,14 +594,13 @@ impl RawRange {
/// Check if this is a base address selection entry.
///
/// A base address selection entry changes the base address that subsequent
- /// range entries are relative to. This will only occur for raw ranges.
+ /// range entries are relative to.
#[inline]
pub fn is_base_address(&self, address_size: u8) -> bool {
self.begin == !0 >> (64 - address_size * 8)
}
/// Parse an address range entry from `.debug_ranges` or `.debug_loc`.
- #[doc(hidden)]
#[inline]
pub fn parse<R: Reader>(input: &mut R, address_size: u8) -> Result<RawRange> {
let begin = input.read_address(address_size)?;
@@ -610,6 +640,7 @@ mod tests {
#[test]
fn test_rnglists_32() {
+ let tombstone = !0u32;
let encoding = Encoding {
format: Format::Dwarf32,
version: 5,
@@ -619,7 +650,9 @@ mod tests {
.L32(0x0300_0000)
.L32(0x0301_0300)
.L32(0x0301_0400)
- .L32(0x0301_0500);
+ .L32(0x0301_0500)
+ .L32(tombstone)
+ .L32(0x0301_0600);
let buf = section.get_contents().unwrap();
let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
let debug_addr_base = DebugAddrBase(0);
@@ -637,7 +670,7 @@ mod tests {
.L8(0)
.L32(0)
.mark(&first)
- // OffsetPair
+ // An OffsetPair using the unit base address.
.L8(4).uleb(0x10200).uleb(0x10300)
// A base address selection followed by an OffsetPair.
.L8(5).L32(0x0200_0000)
@@ -663,6 +696,25 @@ mod tests {
.L8(2).uleb(1).uleb(2)
// A StartxLength
.L8(3).uleb(3).uleb(0x100)
+
+ // Tombstone entries, all of which should be ignored.
+ // A BaseAddressx that is a tombstone.
+ .L8(1).uleb(4)
+ .L8(4).uleb(0x11100).uleb(0x11200)
+ // A BaseAddress that is a tombstone.
+ .L8(5).L32(tombstone)
+ .L8(4).uleb(0x11300).uleb(0x11400)
+ // A StartxEndx that is a tombstone.
+ .L8(2).uleb(4).uleb(5)
+ // A StartxLength that is a tombstone.
+ .L8(3).uleb(4).uleb(0x100)
+ // A StartEnd that is a tombstone.
+ .L8(6).L32(tombstone).L32(0x201_1500)
+ // A StartLength that is a tombstone.
+ .L8(7).L32(tombstone).uleb(0x100)
+ // A StartEnd (not ignored)
+ .L8(6).L32(0x201_1600).L32(0x201_1700)
+
// A range end.
.L8(0)
// Some extra data.
@@ -784,6 +836,15 @@ mod tests {
}))
);
+ // A StartEnd range following the tombstones
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_1600,
+ end: 0x0201_1700,
+ }))
+ );
+
// A range end.
assert_eq!(ranges.next(), Ok(None));
@@ -802,6 +863,7 @@ mod tests {
#[test]
fn test_rnglists_64() {
+ let tombstone = !0u64;
let encoding = Encoding {
format: Format::Dwarf64,
version: 5,
@@ -811,7 +873,9 @@ mod tests {
.L64(0x0300_0000)
.L64(0x0301_0300)
.L64(0x0301_0400)
- .L64(0x0301_0500);
+ .L64(0x0301_0500)
+ .L64(tombstone)
+ .L64(0x0301_0600);
let buf = section.get_contents().unwrap();
let debug_addr = &DebugAddr::from(EndianSlice::new(&buf, LittleEndian));
let debug_addr_base = DebugAddrBase(0);
@@ -830,7 +894,7 @@ mod tests {
.L8(0)
.L32(0)
.mark(&first)
- // OffsetPair
+ // An OffsetPair using the unit base address.
.L8(4).uleb(0x10200).uleb(0x10300)
// A base address selection followed by an OffsetPair.
.L8(5).L64(0x0200_0000)
@@ -856,6 +920,25 @@ mod tests {
.L8(2).uleb(1).uleb(2)
// A StartxLength
.L8(3).uleb(3).uleb(0x100)
+
+ // Tombstone entries, all of which should be ignored.
+ // A BaseAddressx that is a tombstone.
+ .L8(1).uleb(4)
+ .L8(4).uleb(0x11100).uleb(0x11200)
+ // A BaseAddress that is a tombstone.
+ .L8(5).L64(tombstone)
+ .L8(4).uleb(0x11300).uleb(0x11400)
+ // A StartxEndx that is a tombstone.
+ .L8(2).uleb(4).uleb(5)
+ // A StartxLength that is a tombstone.
+ .L8(3).uleb(4).uleb(0x100)
+ // A StartEnd that is a tombstone.
+ .L8(6).L64(tombstone).L64(0x201_1500)
+ // A StartLength that is a tombstone.
+ .L8(7).L64(tombstone).uleb(0x100)
+ // A StartEnd (not ignored)
+ .L8(6).L64(0x201_1600).L64(0x201_1700)
+
// A range end.
.L8(0)
// Some extra data.
@@ -977,6 +1060,15 @@ mod tests {
}))
);
+ // A StartEnd range following the tombstones
+ assert_eq!(
+ ranges.next(),
+ Ok(Some(Range {
+ begin: 0x0201_1600,
+ end: 0x0201_1700,
+ }))
+ );
+
// A range end.
assert_eq!(ranges.next(), Ok(None));
@@ -1027,6 +1119,7 @@ mod tests {
#[test]
fn test_ranges_32() {
+ let tombstone = !0u32 - 1;
let start = Label::new();
let first = Label::new();
#[rustfmt::skip]
@@ -1048,6 +1141,11 @@ mod tests {
// A range that ends at -1.
.L32(0xffff_ffff).L32(0x0000_0000)
.L32(0).L32(0xffff_ffff)
+ // A normal range with tombstone.
+ .L32(tombstone).L32(tombstone)
+ // A base address selection with tombstone followed by a normal range.
+ .L32(0xffff_ffff).L32(tombstone)
+ .L32(0x10a00).L32(0x10b00)
// A range end.
.L32(0).L32(0)
// Some extra data.
@@ -1139,6 +1237,7 @@ mod tests {
#[test]
fn test_ranges_64() {
+ let tombstone = !0u64 - 1;
let start = Label::new();
let first = Label::new();
#[rustfmt::skip]
@@ -1160,6 +1259,11 @@ mod tests {
// A range that ends at -1.
.L64(0xffff_ffff_ffff_ffff).L64(0x0000_0000)
.L64(0).L64(0xffff_ffff_ffff_ffff)
+ // A normal range with tombstone.
+ .L64(tombstone).L64(tombstone)
+ // A base address selection with tombstone followed by a normal range.
+ .L64(0xffff_ffff_ffff_ffff).L64(tombstone)
+ .L64(0x10a00).L64(0x10b00)
// A range end.
.L64(0).L64(0)
// Some extra data.
diff --git a/vendor/gimli/src/read/unit.rs b/vendor/gimli/src/read/unit.rs
index 670e55efd..672435330 100644
--- a/vendor/gimli/src/read/unit.rs
+++ b/vendor/gimli/src/read/unit.rs
@@ -883,7 +883,6 @@ where
}
/// Return the input buffer after the last attribute.
- #[allow(clippy::inline_always)]
#[inline(always)]
fn after_attrs(&self) -> Result<R> {
if let Some(attrs_len) = self.attrs_len.get() {
@@ -892,7 +891,7 @@ where
Ok(input)
} else {
let mut attrs = self.attrs();
- while let Some(_) = attrs.next()? {}
+ while attrs.next()?.is_some() {}
Ok(attrs.input)
}
}
@@ -912,7 +911,6 @@ where
}
/// Parse an entry. Returns `Ok(None)` for null entries.
- #[allow(clippy::inline_always)]
#[inline(always)]
fn parse(
input: &mut R,
@@ -1143,8 +1141,6 @@ impl<R: Reader> Attribute<R> {
/// name.
///
/// See "Table 7.5: Attribute encodings" and "Table 7.6: Attribute form encodings".
- #[allow(clippy::cyclomatic_complexity)]
- #[allow(clippy::match_same_arms)]
pub fn value(&self) -> AttributeValue<R> {
// Table 7.5 shows the possible attribute classes for each name.
// Table 7.6 shows the possible attribute classes for each form.
@@ -1980,7 +1976,7 @@ fn allow_section_offset(name: constants::DwAt, version: u16) -> bool {
}
}
-pub(crate) fn parse_attribute<'unit, R: Reader>(
+pub(crate) fn parse_attribute<R: Reader>(
input: &mut R,
encoding: Encoding,
spec: AttributeSpecification,
@@ -2205,7 +2201,7 @@ pub(crate) fn parse_attribute<'unit, R: Reader>(
}
}
-pub(crate) fn skip_attributes<'unit, R: Reader>(
+pub(crate) fn skip_attributes<R: Reader>(
input: &mut R,
encoding: Encoding,
specs: &[AttributeSpecification],
@@ -2294,7 +2290,6 @@ impl<'abbrev, 'entry, 'unit, R: Reader> AttrsIter<'abbrev, 'entry, 'unit, R> {
/// Returns `None` when iteration is finished. If an error
/// occurs while parsing the next attribute, then this error
/// is returned, and all subsequent calls return `None`.
- #[allow(clippy::inline_always)]
#[inline(always)]
pub fn next(&mut self) -> Result<Option<Attribute<R>>> {
if self.attributes.is_empty() {
@@ -2647,7 +2642,6 @@ impl<'abbrev, 'unit, R: Reader> EntriesCursor<'abbrev, 'unit, R> {
/// println!("The first entry with no children is {:?}",
/// first_entry_with_no_children.unwrap());
/// ```
- #[allow(clippy::type_complexity)]
pub fn next_dfs(
&mut self,
) -> Result<Option<(isize, &DebuggingInformationEntry<'abbrev, 'unit, R>)>> {
@@ -4213,7 +4207,6 @@ mod tests {
#[test]
fn test_attribute_udata_sdata_value() {
- #[allow(clippy::type_complexity)]
let tests: &[(
AttributeValue<EndianSlice<LittleEndian>>,
Option<u64>,
diff --git a/vendor/gimli/src/read/util.rs b/vendor/gimli/src/read/util.rs
index 16eafdde4..747418bab 100644
--- a/vendor/gimli/src/read/util.rs
+++ b/vendor/gimli/src/read/util.rs
@@ -9,7 +9,8 @@ use core::ptr;
use core::slice;
mod sealed {
- // SAFETY: Implementer must not modify the content in storage.
+ /// # Safety
+ /// Implementer must not modify the content in storage.
pub unsafe trait Sealed {
type Storage;
@@ -161,7 +162,7 @@ impl<A: ArrayLike> ArrayVec<A> {
} else {
self.len -= 1;
// SAFETY: this element is valid and we "forget" it by setting the length.
- Some(unsafe { A::as_slice(&mut self.storage)[self.len].as_ptr().read() })
+ Some(unsafe { A::as_slice(&self.storage)[self.len].as_ptr().read() })
}
}
diff --git a/vendor/gimli/src/write/line.rs b/vendor/gimli/src/write/line.rs
index 310170d9a..c88b735bc 100644
--- a/vendor/gimli/src/write/line.rs
+++ b/vendor/gimli/src/write/line.rs
@@ -94,8 +94,6 @@ impl LineProgram {
/// Panics if `comp_dir` is empty or contains a null byte.
///
/// Panics if `comp_file` is empty or contains a null byte.
- #[allow(clippy::too_many_arguments)]
- #[allow(clippy::new_ret_no_self)]
pub fn new(
encoding: Encoding,
line_encoding: LineEncoding,
@@ -261,7 +259,7 @@ impl LineProgram {
} else {
let entry = self.files.entry(key);
let index = entry.index();
- entry.or_insert(FileInfo::default());
+ entry.or_default();
index
};
FileId::new(index)
@@ -1723,7 +1721,6 @@ mod tests {
// Test that the address/line advance is correct. We don't test for optimality.
#[test]
- #[allow(clippy::useless_vec)]
fn test_advance() {
let encoding = Encoding {
format: Format::Dwarf32,
diff --git a/vendor/gimli/src/write/loc.rs b/vendor/gimli/src/write/loc.rs
index ea0ecb1cf..6dfe45a6c 100644
--- a/vendor/gimli/src/write/loc.rs
+++ b/vendor/gimli/src/write/loc.rs
@@ -436,6 +436,7 @@ mod tests {
};
use crate::LittleEndian;
use std::collections::HashMap;
+ use std::sync::Arc;
#[test]
fn test_loc_list() {
@@ -508,7 +509,7 @@ mod tests {
DebugInfoOffset(0).into(),
read::EndianSlice::default(),
),
- abbreviations: read::Abbreviations::default(),
+ abbreviations: Arc::new(read::Abbreviations::default()),
name: None,
comp_dir: None,
low_pc: 0,
diff --git a/vendor/gimli/src/write/op.rs b/vendor/gimli/src/write/op.rs
index c70eec2dd..287083b3e 100644
--- a/vendor/gimli/src/write/op.rs
+++ b/vendor/gimli/src/write/op.rs
@@ -279,12 +279,8 @@ impl Expression {
}
offsets.push(offset);
for (operation, offset) in self.operations.iter().zip(offsets.iter().copied()) {
- let refs = match refs {
- Some(ref mut refs) => Some(&mut **refs),
- None => None,
- };
debug_assert_eq!(w.len(), offset);
- operation.write(w, refs, encoding, unit_offsets, &offsets)?;
+ operation.write(w, refs.as_deref_mut(), encoding, unit_offsets, &offsets)?;
}
Ok(())
}
@@ -630,7 +626,7 @@ impl Operation {
}
w.write_uleb128(entry_offset(base)?)?;
w.write_udata(value.len() as u64, 1)?;
- w.write(&value)?;
+ w.write(value)?;
}
Operation::FrameOffset(offset) => {
w.write_u8(constants::DW_OP_fbreg.0)?;
@@ -770,7 +766,7 @@ impl Operation {
Operation::ImplicitValue(ref data) => {
w.write_u8(constants::DW_OP_implicit_value.0)?;
w.write_uleb128(data.len() as u64)?;
- w.write(&data)?;
+ w.write(data)?;
}
Operation::ImplicitPointer { entry, byte_offset } => {
if encoding.version >= 5 {
@@ -872,7 +868,7 @@ pub(crate) mod convert {
let mut offsets = Vec::new();
let mut offset = 0;
let mut from_operations = from_expression.clone().operations(encoding);
- while let Some(_) = from_operations.next()? {
+ while from_operations.next()?.is_some() {
offsets.push(offset);
offset = from_operations.offset_from(&from_expression);
}
@@ -1071,6 +1067,7 @@ mod tests {
};
use crate::LittleEndian;
use std::collections::HashMap;
+ use std::sync::Arc;
#[test]
fn test_operation() {
@@ -1578,7 +1575,7 @@ mod tests {
DebugInfoOffset(0).into(),
read::EndianSlice::new(&[], LittleEndian),
),
- abbreviations: read::Abbreviations::default(),
+ abbreviations: Arc::new(read::Abbreviations::default()),
name: None,
comp_dir: None,
low_pc: 0,
diff --git a/vendor/gimli/src/write/range.rs b/vendor/gimli/src/write/range.rs
index b44ce1b7b..c707e1eab 100644
--- a/vendor/gimli/src/write/range.rs
+++ b/vendor/gimli/src/write/range.rs
@@ -315,6 +315,7 @@ mod tests {
};
use crate::LittleEndian;
use std::collections::HashMap;
+ use std::sync::Arc;
#[test]
fn test_range() {
@@ -375,7 +376,7 @@ mod tests {
DebugInfoOffset(0).into(),
read::EndianSlice::default(),
),
- abbreviations: read::Abbreviations::default(),
+ abbreviations: Arc::new(read::Abbreviations::default()),
name: None,
comp_dir: None,
low_pc: 0,
diff --git a/vendor/gimli/src/write/section.rs b/vendor/gimli/src/write/section.rs
index e8f3378cd..db5eb9a28 100644
--- a/vendor/gimli/src/write/section.rs
+++ b/vendor/gimli/src/write/section.rs
@@ -111,7 +111,7 @@ impl<W: Writer + Clone> Sections<W> {
debug_loclists: DebugLocLists(section.clone()),
debug_str: DebugStr(section.clone()),
debug_frame: DebugFrame(section.clone()),
- eh_frame: EhFrame(section.clone()),
+ eh_frame: EhFrame(section),
debug_info_refs: Vec::new(),
debug_loc_refs: Vec::new(),
debug_loclists_refs: Vec::new(),
diff --git a/vendor/gimli/src/write/unit.rs b/vendor/gimli/src/write/unit.rs
index bf85ff421..23027bc2c 100644
--- a/vendor/gimli/src/write/unit.rs
+++ b/vendor/gimli/src/write/unit.rs
@@ -365,7 +365,6 @@ impl Unit {
&mut unit_refs,
self,
&mut offsets,
- abbrevs,
line_program,
line_strings,
strings,
@@ -605,7 +604,6 @@ impl DebuggingInformationEntry {
}
/// Write the entry to the given sections.
- #[allow(clippy::too_many_arguments)]
fn write<W: Writer>(
&self,
w: &mut DebugInfo<W>,
@@ -613,7 +611,6 @@ impl DebuggingInformationEntry {
unit_refs: &mut Vec<(DebugInfoOffset, UnitEntryId)>,
unit: &Unit,
offsets: &mut UnitOffsets,
- abbrevs: &mut AbbreviationTable,
line_program: Option<DebugLineOffset>,
line_strings: &DebugLineStrOffsets,
strings: &DebugStrOffsets,
@@ -654,7 +651,6 @@ impl DebuggingInformationEntry {
unit_refs,
unit,
offsets,
- abbrevs,
line_program,
line_strings,
strings,
@@ -1128,7 +1124,6 @@ impl AttributeValue {
}
/// Write the attribute value to the given sections.
- #[allow(clippy::cyclomatic_complexity, clippy::too_many_arguments)]
fn write<W: Writer>(
&self,
w: &mut DebugInfo<W>,
@@ -1155,7 +1150,7 @@ impl AttributeValue {
AttributeValue::Block(ref val) => {
debug_assert_form!(constants::DW_FORM_block);
w.write_uleb128(val.len() as u64)?;
- w.write(&val)?;
+ w.write(val)?;
}
AttributeValue::Data1(val) => {
debug_assert_form!(constants::DW_FORM_data1);
@@ -1308,7 +1303,7 @@ impl AttributeValue {
}
AttributeValue::String(ref val) => {
debug_assert_form!(constants::DW_FORM_string);
- w.write(&val)?;
+ w.write(val)?;
w.write_u8(0)?;
}
AttributeValue::Encoding(val) => {
@@ -1558,7 +1553,6 @@ pub(crate) mod convert {
/// Create a unit by reading the data in the input sections.
///
/// Does not add entry attributes.
- #[allow(clippy::too_many_arguments)]
pub(crate) fn convert_entries<R: Reader<Offset = usize>>(
from_header: read::UnitHeader<R>,
unit_id: UnitId,
@@ -1931,9 +1925,9 @@ mod tests {
use crate::LittleEndian;
use std::collections::HashMap;
use std::mem;
+ use std::sync::Arc;
#[test]
- #[allow(clippy::cyclomatic_complexity)]
fn test_unit_table() {
let mut strings = StringTable::default();
@@ -2542,7 +2536,7 @@ mod tests {
let unit = read::Unit {
header: from_unit,
- abbreviations: read::Abbreviations::default(),
+ abbreviations: Arc::new(read::Abbreviations::default()),
name: None,
comp_dir: None,
low_pc: 0,
@@ -2578,7 +2572,6 @@ mod tests {
}
#[test]
- #[allow(clippy::cyclomatic_complexity)]
fn test_unit_ref() {
let mut units = UnitTable::default();
let unit_id1 = units.add(Unit::new(
@@ -3015,7 +3008,7 @@ mod tests {
let unit = read::Unit {
header: from_unit,
- abbreviations: read::Abbreviations::default(),
+ abbreviations: Arc::new(read::Abbreviations::default()),
name: None,
comp_dir: None,
low_pc: 0,
diff --git a/vendor/gimli/src/write/writer.rs b/vendor/gimli/src/write/writer.rs
index 0785d1686..1ce3641fc 100644
--- a/vendor/gimli/src/write/writer.rs
+++ b/vendor/gimli/src/write/writer.rs
@@ -93,9 +93,7 @@ pub trait Writer {
constants::DW_EH_PE_sdata2 => self.write_sdata(val as i64, 2),
constants::DW_EH_PE_sdata4 => self.write_sdata(val as i64, 4),
constants::DW_EH_PE_sdata8 => self.write_sdata(val as i64, 8),
- _ => {
- return Err(Error::UnsupportedPointerEncoding(format));
- }
+ _ => Err(Error::UnsupportedPointerEncoding(format)),
}
}
@@ -334,7 +332,6 @@ mod tests {
use std::{i64, u64};
#[test]
- #[allow(clippy::cyclomatic_complexity)]
fn test_writer() {
let mut w = write::EndianVec::new(LittleEndian);
w.write_address(Address::Constant(0x1122_3344), 4).unwrap();
diff --git a/vendor/hermit-abi/.cargo-checksum.json b/vendor/hermit-abi/.cargo-checksum.json
index 3ccd7931e..cf3c5dd1a 100644
--- a/vendor/hermit-abi/.cargo-checksum.json
+++ b/vendor/hermit-abi/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"6fdf8fefd46c33cc6492ca69c19c9f49f0704401f3b1aaf0b3fbdbb828d2ddcf","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"322fadd63e558e5a10caf980cbedf83ac1546ba40fd992f54492e21ce54205af","src/errno.rs":"1c0680ead2ddf26b12d34bd7fa3e1dab386df761d6ac1901889ece26682dc465","src/lib.rs":"f4a52715b97d947e3768368c3d8882d0d049a89e01600c4de396f3ffcc7911b9","src/tcplistener.rs":"1fb1c0c232d4f24afb6cff63a7541d00029b7159da8d25b2eb257dff078940a0","src/tcpstream.rs":"fce8a598c6331b82e40982eda079d758be324b8941bf76f1031cea8d01632823"},"package":"fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"} \ No newline at end of file
+{"files":{"Cargo.toml":"4ffc8dbf9b604af97a6da408705aab39a38ad95c89f617b7edba629f4e759456","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"322fadd63e558e5a10caf980cbedf83ac1546ba40fd992f54492e21ce54205af","src/errno.rs":"1c0680ead2ddf26b12d34bd7fa3e1dab386df761d6ac1901889ece26682dc465","src/lib.rs":"f4a52715b97d947e3768368c3d8882d0d049a89e01600c4de396f3ffcc7911b9","src/net.rs":"b0c15011336f9469859ff537a1803c004b0f41c81a967d97dbaff5bb75c71f14","src/net_old.rs":"6cec0b9b50a0602848df05ea64e3fa7cf63be4ea40a634f64211e4a5463068f3","src/tcplistener.rs":"1fb1c0c232d4f24afb6cff63a7541d00029b7159da8d25b2eb257dff078940a0","src/tcpstream.rs":"fce8a598c6331b82e40982eda079d758be324b8941bf76f1031cea8d01632823"},"package":"856b5cb0902c2b6d65d5fd97dfa30f9b70c7538e770b98eab5ed52d8db923e01"} \ No newline at end of file
diff --git a/vendor/hermit-abi/Cargo.toml b/vendor/hermit-abi/Cargo.toml
index 6ec8e2aa7..30bb616ed 100644
--- a/vendor/hermit-abi/Cargo.toml
+++ b/vendor/hermit-abi/Cargo.toml
@@ -12,9 +12,13 @@
[package]
edition = "2021"
name = "hermit-abi"
-version = "0.3.1"
+version = "0.3.0"
authors = ["Stefan Lankes"]
-description = "Hermit system calls definitions."
+description = """
+hermit-abi is small interface to call functions from the unikernel RustyHermit.
+It is used to build the target `x86_64-unknown-hermit`.
+"""
+documentation = "https://hermitcore.github.io/rusty-hermit/hermit_abi"
readme = "README.md"
keywords = [
"unikernel",
@@ -25,6 +29,10 @@ license = "MIT OR Apache-2.0"
repository = "https://github.com/hermitcore/rusty-hermit"
resolver = "1"
+[package.metadata.docs.rs]
+features = ["docs"]
+default-target = "x86_64-unknown-hermit"
+
[dependencies.alloc]
version = "1.0.0"
optional = true
@@ -41,6 +49,7 @@ package = "rustc-std-workspace-core"
[features]
default = []
+docs = []
rustc-dep-of-std = [
"core",
"alloc",
diff --git a/vendor/hermit-abi/src/net.rs b/vendor/hermit-abi/src/net.rs
new file mode 100644
index 000000000..05ee2ca58
--- /dev/null
+++ b/vendor/hermit-abi/src/net.rs
@@ -0,0 +1,232 @@
+#![allow(nonstandard_style)]
+
+pub const AF_INET: i32 = 0;
+pub const AF_INET6: i32 = 1;
+pub const IPPROTO_IP: i32 = 0;
+pub const IPPROTO_IPV6: i32 = 41;
+pub const IPPROTO_TCP: i32 = 6;
+pub const IPV6_ADD_MEMBERSHIP: i32 = 12;
+pub const IPV6_DROP_MEMBERSHIP: i32 = 13;
+pub const IPV6_MULTICAST_LOOP: i32 = 19;
+pub const IPV6_V6ONLY: i32 = 27;
+pub const IP_TTL: i32 = 2;
+pub const IP_MULTICAST_TTL: i32 = 5;
+pub const IP_MULTICAST_LOOP: i32 = 7;
+pub const IP_ADD_MEMBERSHIP: i32 = 3;
+pub const IP_DROP_MEMBERSHIP: i32 = 4;
+pub const SHUT_RD: i32 = 0;
+pub const SHUT_RDWR: i32 = 2;
+pub const SHUT_WR: i32 = 1;
+pub const SOCK_DGRAM: i32 = 2;
+pub const SOCK_STREAM: i32 = 1;
+pub const SOL_SOCKET: i32 = 4095;
+pub const SO_BROADCAST: i32 = 32;
+pub const SO_ERROR: i32 = 4103;
+pub const SO_RCVTIMEO: i32 = 4102;
+pub const SO_REUSEADDR: i32 = 4;
+pub const SO_SNDTIMEO: i32 = 4101;
+pub const SO_LINGER: i32 = 128;
+pub const TCP_NODELAY: i32 = 1;
+pub const MSG_PEEK: i32 = 1;
+pub const FIONBIO: i32 = 0x8008667eu32 as i32;
+pub const EAI_NONAME: i32 = -2200;
+pub const EAI_SERVICE: i32 = -2201;
+pub const EAI_FAIL: i32 = -2202;
+pub const EAI_MEMORY: i32 = -2203;
+pub const EAI_FAMILY: i32 = -2204;
+pub type sa_family_t = u8;
+pub type socklen_t = u32;
+pub type in_addr_t = u32;
+pub type in_port_t = u16;
+pub type nfds_t = usize;
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct in_addr {
+ pub s_addr: u32,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct in6_addr {
+ pub s6_addr: [u8; 16],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr {
+ pub sa_len: u8,
+ pub sa_family: sa_family_t,
+ pub sa_data: [u8; 14],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr_in {
+ pub sin_len: u8,
+ pub sin_family: sa_family_t,
+ pub sin_port: in_port_t,
+ pub sin_addr: in_addr,
+ pub sin_zero: [u8; 8],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr_in6 {
+ pub sin6_family: sa_family_t,
+ pub sin6_port: in_port_t,
+ pub sin6_addr: in6_addr,
+ pub sin6_flowinfo: u32,
+ pub sin6_scope_id: u32,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct ip_mreq {
+ pub imr_multiaddr: in_addr,
+ pub imr_interface: in_addr,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct ipv6_mreq {
+ pub ipv6mr_multiaddr: in6_addr,
+ pub ipv6mr_interface: u32,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct addrinfo {
+ pub ai_flags: i32,
+ pub ai_family: i32,
+ pub ai_socktype: i32,
+ pub ai_protocol: i32,
+ pub ai_addrlen: socklen_t,
+ pub ai_addr: *mut sockaddr,
+ pub ai_canonname: *mut u8,
+ pub ai_next: *mut addrinfo,
+}
+
+extern "C" {
+ #[link_name = "sys_accept"]
+ pub fn accept(s: i32, addr: *mut sockaddr, addrlen: *mut socklen_t) -> i32;
+
+ #[link_name = "sys_bind"]
+ pub fn bind(s: i32, name: *const sockaddr, namelen: socklen_t) -> i32;
+
+ #[link_name = "sys_connect"]
+ pub fn connect(s: i32, name: *const sockaddr, namelen: socklen_t) -> i32;
+
+ #[link_name = "sys_close"]
+ pub fn close(s: i32) -> i32;
+
+ #[link_name = "sys_dup"]
+ pub fn dup(s: i32) -> i32;
+
+ #[link_name = "sys:getpeername"]
+ pub fn getpeername(s: i32, name: *mut sockaddr, namelen: *mut socklen_t) -> i32;
+
+ #[link_name = "sys_getsockname"]
+ pub fn getsockname(s: i32, name: *mut sockaddr, namelen: *mut socklen_t) -> i32;
+
+ #[link_name = "sys_getsockopt"]
+ pub fn getsockopt(
+ s: i32,
+ level: i32,
+ optname: i32,
+ optval: *mut c_void,
+ optlen: *mut socklen_t,
+ ) -> i32;
+
+ #[link_name = "sys_setsockopt"]
+ pub fn setsockopt(
+ s: i32,
+ level: i32,
+ optname: i32,
+ optval: *const c_void,
+ optlen: socklen_t,
+ ) -> i32;
+
+ #[link_name = "sys_ioctl"]
+ pub fn ioctl(s: i32, cmd: c_long, argp: *mut c_void) -> i32;
+
+ #[link_name = "sys_listen"]
+ pub fn listen(s: i32, backlog: c_int) -> i32;
+
+ #[link_name = "sys_poll"]
+ pub fn pollfd(fds: *mut pollfd, nfds: nfds_t, timeout: i32);
+
+ #[link_name = "sys_recv"]
+ pub fn recv(s: c_int, mem: *mut c_void, len: size_t, flags: c_int) -> isize;
+
+ #[link_name = "sys_read"]
+ pub fn read(s: c_int, mem: *mut c_void, len: size_t) -> isize;
+
+ #[link_name = "sys_readv"]
+ pub fn readv(s: c_int, bufs: *const iovec, bufcnt: c_int) -> isize;
+
+ #[link_name = "sys_recvfrom"]
+ pub fn recvfrom(
+ s: c_int,
+ mem: *mut c_void,
+ len: size_t,
+ flags: c_int,
+ from: *mut sockaddr,
+ fromlen: *mut socklen_t,
+ ) -> isize;
+
+ #[link_name = "sys_send"]
+ pub fn send(s: i32, mem: *const c_void, len: usize, flags: i32) -> isize;
+
+ #[link_name = "sys_sendmsg"]
+ pub fn sendmsg(s: c_int, message: *const msghdr, flags: c_int) -> isize;
+
+ #[link_name = "sys_sendto"]
+ pub fn sendto(
+ s: c_int,
+ mem: *const c_void,
+ len: size_t,
+ flags: c_int,
+ to: *const sockaddr,
+ tolen: socklen_t,
+ ) -> ssize_t;
+
+ #[link_name = "sys_shutdown"]
+ pub fn shutdown(s: i32, how: i32) -> i32;
+
+ #[link_name = "sys_socket"]
+ pub fn socket(domain: i32, type_: i32, protocol: i32) -> i32;
+
+ #[link_name = "sys_write"]
+ pub fn write(s: c_int, mem: *const c_void, len: size_t) -> ssize_t;
+
+ #[link_name = "sys_writev"]
+ pub fn writev(s: c_int, bufs: *const iovec, bufcnt: c_int) -> ssize_t;
+
+ #[link_name = "sys_freeaddrinfo"]
+ pub fn freeaddrinfo(ai: *mut addrinfo);
+
+ #[link_name = "sys_getaddrinfo"]
+ pub fn getaddrinfo(
+ nodename: *const u8,
+ servname: *const u8,
+ hints: *const addrinfo,
+ res: *mut *mut addrinfo,
+ ) -> i32;
+
+ #[link_name = "sys_select"]
+ pub fn select(
+ maxfdp1: i32,
+ readset: *mut fd_set,
+ writeset: *mut fd_set,
+ exceptset: *mut fd_set,
+ timeout: *mut timeval,
+ ) -> i32;
+
+ #[link_name = "sys_pool"]
+ pub fn poll(
+ fds: *mut pollfd,
+ nfds: nfds_t,
+ timeout: i32
+ ) -> i32;
+}
diff --git a/vendor/hermit-abi/src/net_old.rs b/vendor/hermit-abi/src/net_old.rs
new file mode 100644
index 000000000..93bfd55f0
--- /dev/null
+++ b/vendor/hermit-abi/src/net_old.rs
@@ -0,0 +1,302 @@
+#![allow(nonstandard_style)]
+use crate::errno::EINVAL;
+use crate::FileDescriptor;
+use libc::{c_char, c_int, c_uint, c_void, size_t, ssize_t};
+
+extern "C" {
+ fn sys_hermit_socket(domain: i32, type_: i32, protocol: i32) -> FileDescriptor;
+ fn sys_hermit_accept(
+ s: FileDescriptor,
+ addr: *mut sockaddr,
+ addrlen: *mut socklen_t,
+ ) -> FileDescriptor;
+ fn sys_hermit_bind(s: FileDescriptor, name: *const sockaddr, namelen: socklen_t) -> i32;
+ fn sys_hermit_listen(s: FileDescriptor, backlog: i32) -> i32;
+ fn sys_hermit_shutdown(s: FileDescriptor, how: i32) -> i32;
+ fn sys_hermit_getpeername(
+ s: FileDescriptor,
+ name: *mut sockaddr,
+ namelen: *mut socklen_t,
+ ) -> i32;
+ fn sys_hermit_getsockname(
+ s: FileDescriptor,
+ name: *mut sockaddr,
+ namelen: *mut socklen_t,
+ ) -> i32;
+ fn sys_hermit_getsockopt(
+ s: FileDescriptor,
+ level: i32,
+ optname: i32,
+ optval: *mut c_void,
+ optlen: *const socklen_t,
+ ) -> i32;
+ fn sys_hermit_setsockopt(
+ s: FileDescriptor,
+ level: i32,
+ optname: i32,
+ optval: *const c_void,
+ optlen: socklen_t,
+ ) -> i32;
+ fn sys_hermit_connect(s: FileDescriptor, name: *const sockaddr, namelen: socklen_t) -> i32;
+ fn sys_hermit_getaddrinfo(
+ nodename: *const c_char,
+ servname: *const i8,
+ hints: *const addrinfo,
+ res: *mut *mut addrinfo,
+ ) -> i32;
+}
+
+pub const AF_INET: i32 = 10;
+pub const AF_INET6: i32 = 2;
+pub const IPPROTO_IP: i32 = 0;
+pub const IPPROTO_IPV6: i32 = 41;
+pub const IPPROTO_TCP: i32 = 6;
+pub const IPV6_ADD_MEMBERSHIP: i32 = 12;
+pub const IPV6_DROP_MEMBERSHIP: i32 = 13;
+pub const IPV6_MULTICAST_LOOP: i32 = 19;
+pub const IPV6_V6ONLY: i32 = 27;
+pub const IP_TTL: i32 = 2;
+pub const IP_MULTICAST_TTL: i32 = 5;
+pub const IP_MULTICAST_LOOP: i32 = 7;
+pub const IP_ADD_MEMBERSHIP: i32 = 3;
+pub const IP_DROP_MEMBERSHIP: i32 = 4;
+pub const SHUT_READ: i32 = 0;
+pub const SHUT_WRITE: i32 = 1;
+pub const SHUT_BOTH: i32 = 2;
+pub const SOCK_DGRAM: i32 = 2;
+pub const SOCK_STREAM: i32 = 1;
+pub const SOL_SOCKET: i32 = 4095;
+pub const SO_BROADCAST: i32 = 32;
+pub const SO_ERROR: i32 = 4103;
+pub const SO_RCVTIMEO: i32 = 4102;
+pub const SO_REUSEADDR: i32 = 4;
+pub const SO_SNDTIMEO: i32 = 4101;
+pub const SO_LINGER: i32 = 128;
+pub const TCP_NODELAY: i32 = 1;
+pub const MSG_PEEK: i32 = 1;
+
+pub type sa_family_t = u8;
+pub type socklen_t = usize;
+pub type in_addr_t = u32;
+pub type in_port_t = u16;
+
+#[derive(Debug, Copy, Clone)]
+#[repr(C)]
+pub struct in_addr {
+ pub s_addr: u32,
+}
+
+#[derive(Debug, Copy, Clone)]
+#[repr(C)]
+pub struct in6_addr {
+ pub s6_addr: [u8; 16],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr {
+ pub sa_len: u8,
+ pub sa_family: sa_family_t,
+ pub sa_data: [u8; 14usize],
+}
+
+#[derive(Copy, Clone)]
+#[repr(C)]
+pub struct sockaddr_in6 {
+ pub sin6_len: u8,
+ pub sin6_family: sa_family_t,
+ pub sin6_port: in_port_t,
+ pub sin6_flowinfo: u32,
+ pub sin6_addr: in6_addr,
+ pub sin6_scope_id: u32,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr_in {
+ pub sin_len: u8,
+ pub sin_family: sa_family_t,
+ pub sin_port: in_port_t,
+ pub sin_addr: in_addr,
+ pub sin_zero: [u8; 8usize],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct iovec {
+ pub iov_base: *mut c_void,
+ pub iov_len: usize,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct ip_mreq {
+ pub imr_multiaddr: in_addr,
+ pub imr_interface: in_addr,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct ipv6_mreq {
+ pub ipv6mr_multiaddr: in6_addr,
+ pub ipv6mr_interface: c_uint,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct msghdr {
+ pub msg_name: *mut c_void,
+ pub msg_namelen: socklen_t,
+ pub msg_iov: *mut iovec,
+ pub msg_iovlen: c_int,
+ pub msg_control: *mut c_void,
+ pub msg_controllen: socklen_t,
+ pub msg_flags: c_int,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct sockaddr_storage {
+ pub s2_len: u8,
+ pub ss_family: sa_family_t,
+ pub s2_data1: [c_char; 2usize],
+ pub s2_data2: [u32; 3usize],
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct addrinfo {
+ pub ai_flags: c_int,
+ pub ai_family: c_int,
+ pub ai_socktype: c_int,
+ pub ai_protocol: c_int,
+ pub ai_addrlen: socklen_t,
+ pub ai_addr: *mut sockaddr,
+ pub ai_canonname: *mut c_char,
+ pub ai_next: *mut addrinfo,
+}
+
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct linger {
+ pub l_onoff: i32,
+ pub l_linger: i32,
+}
+
+#[inline]
+pub unsafe fn socket(domain: c_int, type_: c_int, protocol: c_int) -> FileDescriptor {
+ sys_hermit_socket(domain, type_, protocol)
+}
+
+#[inline]
+pub unsafe fn accept(
+ s: FileDescriptor,
+ addr: *mut sockaddr,
+ addrlen: *mut socklen_t,
+) -> FileDescriptor {
+ sys_hermit_accept(s, addr, addrlen)
+}
+
+#[inline]
+pub unsafe fn bind(s: FileDescriptor, name: *const sockaddr, namelen: socklen_t) -> i32 {
+ sys_hermit_bind(s, name, namelen)
+}
+
+#[inline]
+pub unsafe fn shutdown(s: FileDescriptor, how: c_int) -> i32 {
+ sys_hermit_shutdown(s, how)
+}
+
+#[inline]
+pub unsafe fn getpeername(s: FileDescriptor, name: *mut sockaddr, namelen: *mut socklen_t) -> i32 {
+ sys_hermit_getpeername(s, name, namelen)
+}
+
+#[inline]
+pub unsafe fn getsockname(s: FileDescriptor, name: *mut sockaddr, namelen: *mut socklen_t) -> i32 {
+ sys_hermit_getsockname(s, name, namelen)
+}
+
+#[inline]
+pub unsafe fn getsockopt(
+ s: FileDescriptor,
+ level: c_int,
+ optname: c_int,
+ optval: *mut c_void,
+ optlen: *const socklen_t,
+) -> i32 {
+ sys_hermit_getsockopt(s, level, optname, optval, optlen)
+}
+
+#[inline]
+pub unsafe fn setsockopt(
+ s: FileDescriptor,
+ level: c_int,
+ optname: c_int,
+ optval: *const c_void,
+ optlen: socklen_t,
+) -> i32 {
+ sys_hermit_setsockopt(s, level, optname, optval, optlen)
+}
+
+#[inline]
+pub unsafe fn connect(s: FileDescriptor, name: *const sockaddr, namelen: socklen_t) -> i32 {
+ sys_hermit_connect(s, name, namelen)
+}
+
+#[inline]
+pub unsafe fn listen(s: FileDescriptor, backlog: c_int) -> i32 {
+ sys_hermit_listen(s, backlog)
+}
+
+#[inline]
+pub unsafe fn recv(s: FileDescriptor, mem: *mut c_void, len: size_t, flags: c_int) -> ssize_t {
+ (-EINVAL).try_into().unwrap()
+}
+
+#[inline]
+pub unsafe fn recvfrom(
+ s: FileDescriptor,
+ mem: *mut c_void,
+ len: size_t,
+ flags: c_int,
+ from: *mut sockaddr,
+ fromlen: *mut socklen_t,
+) -> ssize_t {
+ (-EINVAL).try_into().unwrap()
+}
+
+#[inline]
+pub unsafe fn send(s: FileDescriptor, mem: *const c_void, len: size_t, flags: c_int) -> ssize_t {
+ (-EINVAL).try_into().unwrap()
+}
+
+#[inline]
+pub unsafe fn sendmsg(s: FileDescriptor, message: *const msghdr, flags: c_int) -> ssize_t {
+ (-EINVAL).try_into().unwrap()
+}
+
+#[inline]
+pub unsafe fn sendto(
+ s: FileDescriptor,
+ mem: *const c_void,
+ len: size_t,
+ flags: c_int,
+ to: *const sockaddr,
+ tolen: socklen_t,
+) -> ssize_t {
+ (-EINVAL).try_into().unwrap()
+}
+
+#[inline]
+pub unsafe fn freeaddrinfo(ai: *mut addrinfo) {}
+
+#[inline]
+pub unsafe fn getaddrinfo(
+ nodename: *const c_char,
+ servname: *const c_char,
+ hints: *const addrinfo,
+ res: *mut *mut addrinfo,
+) -> i32 {
+ sys_hermit_getaddrinfo(nodename, servname, hints, res)
+}
diff --git a/vendor/icu_list/.cargo-checksum.json b/vendor/icu_list/.cargo-checksum.json
index 31667c731..0a33455b4 100644
--- a/vendor/icu_list/.cargo-checksum.json
+++ b/vendor/icu_list/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"f650478383a2f1a7aed551b2d4650d83a2e8cc3d44c749a0e4fcfc649130b9da","Cargo.toml":"ac843eda0523eadb2d7c3bb39ea34ca5c45648f2e00c1aca87607a427f583e73","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"33b424bdceea5edc4d3638592b007bf825e25a96e7fe9101a3ad04ea63637150","examples/and_list.rs":"2cf046f8af9892949db8c8b259ac060b9de5781a5941bec5d346cf82e9371774","src/error.rs":"3cf8a7962af6b43a91d9227e4d97824e4b2852d3d50fc00fb29c779768c1b915","src/lib.rs":"c0bd00a3d83d4285d33cef2af12bd9cae380ad9c81702ba7552ce14283e9651d","src/list_formatter.rs":"a5f6f9c8bf35c6e1d4cb712a73b31454c0d081572bbc0efd9997bf6013346825","src/provider.rs":"1ca4026dbc00c901763a41a4b4ad0083c32888b95d11f464a9e3389c7d976fec","src/string_matcher.rs":"6ce2a72cd61e3d87715dafb396d8e6ea4189a3e7dcb84d4188657a477b67b1e6"},"package":"c40218275f081c4493f190357c5395647b06734c2dc3dcb41cc099a0f60168b1"} \ No newline at end of file
+{"files":{"Cargo.lock":"11c80ad227499af3696bc71b715a91d202e3b178d24fd37776ca4221438d36ce","Cargo.toml":"2a69f60d480d23b4cfcba46039d83efe1eba24bd7ecf9cb6e7b08e7448a57b65","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"33b424bdceea5edc4d3638592b007bf825e25a96e7fe9101a3ad04ea63637150","examples/and_list.rs":"76af2fbe31c6641a360726b201bfb043a35ee8dcf37b626640221fe889b194e4","src/error.rs":"3cf8a7962af6b43a91d9227e4d97824e4b2852d3d50fc00fb29c779768c1b915","src/lazy_automaton.rs":"eff2e95ae5c889908ba71480e73fc31d9cb2e793e9dc6dee8683c686e8ed8019","src/lib.rs":"24090cea0e42067e06fc9d5248c1ae6a03f12bd62ec0e9e2c237d2fc1251dcdb","src/list_formatter.rs":"a21de479a5ec86ed46ce0ad508e8059efb9ecd95ce869523be30118ed6a86660","src/patterns.rs":"3013aca1dd51d1b9a98e8573abac9116650635b2931157e9fb16ee10c8d3ab9c","src/provider/mod.rs":"0dbd37b8b83d40a2f5d7945cc55b4aa8477cec98442472c4b7a6875b226c8195","src/provider/serde_dfa.rs":"4f0489d176e87ad463efd36b4ead93b0b6bb0a39d26b1f96413bfb00576f7bac"},"package":"01a65ff0cab77c33c7e165c858eaa6e84a09f1e485dd495d9d0ae61083c6f786"} \ No newline at end of file
diff --git a/vendor/icu_list/Cargo.lock b/vendor/icu_list/Cargo.lock
index 8d049ce78..759405aa8 100644
--- a/vendor/icu_list/Cargo.lock
+++ b/vendor/icu_list/Cargo.lock
@@ -3,258 +3,43 @@
version = 3
[[package]]
-name = "aho-corasick"
-version = "0.7.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
-dependencies = [
- "memchr",
-]
-
-[[package]]
name = "atomic-polyfill"
-version = "0.1.10"
+version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c041a8d9751a520ee19656232a18971f18946a7900f1520ee4400002244dd89"
+checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28"
dependencies = [
"critical-section",
]
[[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi",
- "libc",
- "winapi",
-]
-
-[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
-name = "bare-metal"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5deb64efa5bd81e31fcd1938615a6d98c82eafcbcd787162b6f63b91d6bac5b3"
-dependencies = [
- "rustc_version 0.2.3",
-]
-
-[[package]]
-name = "bare-metal"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8fe8f5a8a398345e52358e18ff07cc17a568fbca5c6f73873d3a62056309603"
-
-[[package]]
-name = "bit_field"
-version = "0.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcb6dd1c2376d2e096796e234a70e17e94cc2d5d54ff8ce42b28cef1d0d359a4"
-
-[[package]]
-name = "bitfield"
-version = "0.13.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46afbd2983a5d5a7bd740ccb198caf5b82f45c40c09c0eed36052d91cb92e719"
-
-[[package]]
-name = "bitflags"
-version = "1.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
-
-[[package]]
-name = "bstr"
-version = "0.2.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
-dependencies = [
- "lazy_static",
- "memchr",
- "regex-automata 0.1.10",
- "serde",
-]
-
-[[package]]
-name = "bumpalo"
-version = "3.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
-
-[[package]]
name = "byteorder"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
-name = "cast"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "clap"
-version = "2.34.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
-dependencies = [
- "bitflags",
- "textwrap",
- "unicode-width",
-]
-
-[[package]]
name = "cobs"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]]
-name = "cortex-m"
-version = "0.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70858629a458fdfd39f9675c4dc309411f2a3f83bede76988d81bf1a0ecee9e0"
-dependencies = [
- "bare-metal 0.2.5",
- "bitfield",
- "embedded-hal",
- "volatile-register",
-]
-
-[[package]]
-name = "criterion"
-version = "0.3.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f"
-dependencies = [
- "atty",
- "cast",
- "clap",
- "criterion-plot",
- "csv",
- "itertools",
- "lazy_static",
- "num-traits",
- "oorandom",
- "plotters",
- "rayon",
- "regex",
- "serde",
- "serde_cbor",
- "serde_derive",
- "serde_json",
- "tinytemplate",
- "walkdir",
-]
-
-[[package]]
-name = "criterion-plot"
-version = "0.4.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876"
-dependencies = [
- "cast",
- "itertools",
-]
-
-[[package]]
name = "critical-section"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95da181745b56d4bd339530ec393508910c909c784e8962d15d722bacf0bcbcd"
-dependencies = [
- "bare-metal 1.0.0",
- "cfg-if",
- "cortex-m",
- "riscv",
-]
-
-[[package]]
-name = "crossbeam-channel"
-version = "0.5.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
-dependencies = [
- "cfg-if",
- "crossbeam-utils",
-]
-
-[[package]]
-name = "crossbeam-deque"
-version = "0.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
-dependencies = [
- "cfg-if",
- "crossbeam-epoch",
- "crossbeam-utils",
-]
-
-[[package]]
-name = "crossbeam-epoch"
-version = "0.9.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
-dependencies = [
- "autocfg",
- "cfg-if",
- "crossbeam-utils",
- "memoffset",
- "once_cell",
- "scopeguard",
-]
-
-[[package]]
-name = "crossbeam-utils"
-version = "0.8.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
-dependencies = [
- "cfg-if",
- "once_cell",
-]
-
-[[package]]
-name = "csv"
-version = "1.1.6"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1"
-dependencies = [
- "bstr",
- "csv-core",
- "itoa 0.4.8",
- "ryu",
- "serde",
-]
-
-[[package]]
-name = "csv-core"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
-dependencies = [
- "memchr",
-]
+checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52"
[[package]]
name = "databake"
-version = "0.1.2"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c87777d6d7bde863ba217aa87521dc857239de1f36d66aac46fd173fb0495858"
+checksum = "df626c4717e455cd7a70a82c4358630554a07e4341f86dd095c625f1474a2857"
dependencies = [
"databake-derive",
"proc-macro2",
@@ -264,9 +49,9 @@ dependencies = [
[[package]]
name = "databake-derive"
-version = "0.1.1"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "905c7a060fc0c84c0452d97473b1177dd7a5cbc7670cfbae4a7fe22e42f6432e"
+checksum = "be51a53c468489ae1ef0efa9f6b10706f426c0dde06d66122ffef1f0c51e87dc"
dependencies = [
"proc-macro2",
"quote",
@@ -276,9 +61,9 @@ dependencies = [
[[package]]
name = "deduplicating_array"
-version = "0.1.2"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e7f0807b2feeeda87369e8b4cf467f250f39841c8f9427bf3a972b878588937"
+checksum = "135a278b07263e55438c15a3021b4947288f981ae387666f5015add8fbc76f5b"
dependencies = [
"serde",
]
@@ -295,28 +80,6 @@ dependencies = [
]
[[package]]
-name = "either"
-version = "1.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
-
-[[package]]
-name = "embedded-hal"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "35949884794ad573cf46071e41c9b60efb0cb311e3ca01f7af807af1debc66ff"
-dependencies = [
- "nb 0.1.3",
- "void",
-]
-
-[[package]]
-name = "half"
-version = "1.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
-
-[[package]]
name = "hash32"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -333,51 +96,32 @@ checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743"
dependencies = [
"atomic-polyfill",
"hash32",
- "rustc_version 0.4.0",
+ "rustc_version",
"serde",
"spin",
"stable_deref_trait",
]
[[package]]
-name = "hermit-abi"
-version = "0.1.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
-
-[[package]]
-name = "icu_benchmark_macros"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c867656f2d9c90b13709ac88e710a9d6afe33998c1dfa22384bab8804e8b3d4"
-
-[[package]]
name = "icu_list"
-version = "1.0.0"
+version = "1.1.0"
dependencies = [
- "criterion",
"databake",
"deduplicating_array",
"displaydoc",
- "icu_benchmark_macros",
- "icu_locid",
"icu_provider",
"postcard",
- "regex-automata 0.2.0",
+ "regex-automata",
"serde",
"serde_json",
"writeable",
- "zerovec",
]
[[package]]
name = "icu_locid"
-version = "1.0.0"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34b3de5d99a0e275fe6193b9586dbf37364daebc0d39c89b5cf8376a53b789e8"
+checksum = "71d7a98ecb812760b5f077e55a4763edeefa7ccc30d6eb5680a70841ede81928"
dependencies = [
"displaydoc",
"litemap",
@@ -387,9 +131,9 @@ dependencies = [
[[package]]
name = "icu_provider"
-version = "1.0.0"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e629bc2b6591ed9e4467d8a0fa2a597b70cff64ff8170e54a3f0f3257b99873f"
+checksum = "a86816c97bc4e613086497f9479f63e120315e056763e8c4435604f98d21d82d"
dependencies = [
"displaydoc",
"icu_locid",
@@ -404,9 +148,9 @@ dependencies = [
[[package]]
name = "icu_provider_macros"
-version = "1.0.0"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38cf6f5b65cf81f0b4298da647101acbfe6ae0e25263f92bd7a22597e9d6d606"
+checksum = "9ddb07844c2ffc4c28840e799e9e54ff054393cf090740decf25624e9d94b93a"
dependencies = [
"proc-macro2",
"quote",
@@ -414,52 +158,16 @@ dependencies = [
]
[[package]]
-name = "itertools"
-version = "0.10.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itoa"
-version = "0.4.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
-
-[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
-
-[[package]]
-name = "js-sys"
-version = "0.3.60"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
-dependencies = [
- "wasm-bindgen",
-]
-
-[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-
-[[package]]
-name = "libc"
-version = "0.2.133"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "litemap"
-version = "0.6.0"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f34a3f4798fac63fb48cf277eefa38f94d3443baff555bb98e4f56bc9092368e"
+checksum = "575d8a551c59104b4df91269921e5eab561aa1b77c618dac0414b5d44a4617de"
[[package]]
name = "lock_api"
@@ -472,104 +180,12 @@ dependencies = [
]
[[package]]
-name = "log"
-version = "0.4.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
-name = "memoffset"
-version = "0.6.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
-dependencies = [
- "autocfg",
-]
-
-[[package]]
-name = "nb"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "801d31da0513b6ec5214e9bf433a77966320625a37860f910be265be6e18d06f"
-dependencies = [
- "nb 1.0.0",
-]
-
-[[package]]
-name = "nb"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "546c37ac5d9e56f55e73b677106873d9d9f5190605e41a856503623648488cae"
-
-[[package]]
-name = "num-traits"
-version = "0.2.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
-dependencies = [
- "autocfg",
-]
-
-[[package]]
-name = "num_cpus"
-version = "1.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
-dependencies = [
- "hermit-abi",
- "libc",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
-
-[[package]]
-name = "oorandom"
-version = "11.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
-
-[[package]]
-name = "plotters"
-version = "0.3.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97"
-dependencies = [
- "num-traits",
- "plotters-backend",
- "plotters-svg",
- "wasm-bindgen",
- "web-sys",
-]
-
-[[package]]
-name = "plotters-backend"
-version = "0.3.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142"
-
-[[package]]
-name = "plotters-svg"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f"
-dependencies = [
- "plotters-backend",
-]
-
-[[package]]
name = "postcard"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -582,64 +198,23 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
-name = "rayon"
-version = "1.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
-dependencies = [
- "autocfg",
- "crossbeam-deque",
- "either",
- "rayon-core",
-]
-
-[[package]]
-name = "rayon-core"
-version = "1.9.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
-dependencies = [
- "crossbeam-channel",
- "crossbeam-deque",
- "crossbeam-utils",
- "num_cpus",
-]
-
-[[package]]
-name = "regex"
-version = "1.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
-dependencies = [
- "aho-corasick",
- "memchr",
- "regex-syntax",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-
-[[package]]
name = "regex-automata"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -651,39 +226,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
-version = "0.6.27"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
-
-[[package]]
-name = "riscv"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6907ccdd7a31012b70faf2af85cd9e5ba97657cc3987c4f13f8e4d2c2a088aba"
-dependencies = [
- "bare-metal 1.0.0",
- "bit_field",
- "riscv-target",
-]
-
-[[package]]
-name = "riscv-target"
-version = "0.1.2"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88aa938cda42a0cf62a20cfe8d139ff1af20c2e681212b5b34adb5a58333f222"
-dependencies = [
- "lazy_static",
- "regex",
-]
-
-[[package]]
-name = "rustc_version"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
-dependencies = [
- "semver 0.9.0",
-]
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "rustc_version"
@@ -691,23 +236,14 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
- "semver 1.0.14",
+ "semver",
]
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
-
-[[package]]
-name = "same-file"
-version = "1.0.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
-dependencies = [
- "winapi-util",
-]
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "scopeguard"
@@ -717,49 +253,24 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "semver"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
-dependencies = [
- "semver-parser",
-]
-
-[[package]]
-name = "semver"
-version = "1.0.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
-
-[[package]]
-name = "semver-parser"
-version = "0.7.0"
+version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
[[package]]
-name = "serde_cbor"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
-dependencies = [
- "half",
- "serde",
-]
-
-[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -768,11 +279,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
- "itoa 1.0.3",
+ "itoa",
"ryu",
"serde",
]
@@ -794,9 +305,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -816,44 +327,19 @@ dependencies = [
]
[[package]]
-name = "textwrap"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
-dependencies = [
- "unicode-width",
-]
-
-[[package]]
name = "tinystr"
-version = "0.7.0"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8aeafdfd935e4a7fe16a91ab711fa52d54df84f9c8f7ca5837a9d1d902ef4c2"
+checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef"
dependencies = [
"displaydoc",
]
[[package]]
-name = "tinytemplate"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
-dependencies = [
- "serde",
- "serde_json",
-]
-
-[[package]]
name = "unicode-ident"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
-
-[[package]]
-name = "unicode-width"
-version = "0.1.10"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-xid"
@@ -862,143 +348,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
-name = "vcell"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77439c1b53d2303b20d9459b1ade71a83c716e3f9c34f3228c00e6f185d6c002"
-
-[[package]]
-name = "void"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
-
-[[package]]
-name = "volatile-register"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ee8f19f9d74293faf70901bc20ad067dc1ad390d2cbf1e3f75f721ffee908b6"
-dependencies = [
- "vcell",
-]
-
-[[package]]
-name = "walkdir"
-version = "2.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
-dependencies = [
- "same-file",
- "winapi",
- "winapi-util",
-]
-
-[[package]]
-name = "wasm-bindgen"
-version = "0.2.83"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
-dependencies = [
- "cfg-if",
- "wasm-bindgen-macro",
-]
-
-[[package]]
-name = "wasm-bindgen-backend"
-version = "0.2.83"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
-dependencies = [
- "bumpalo",
- "log",
- "once_cell",
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-macro"
-version = "0.2.83"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
-dependencies = [
- "quote",
- "wasm-bindgen-macro-support",
-]
-
-[[package]]
-name = "wasm-bindgen-macro-support"
-version = "0.2.83"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-backend",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-shared"
-version = "0.2.83"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
-
-[[package]]
-name = "web-sys"
-version = "0.3.60"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
-dependencies = [
- "js-sys",
- "wasm-bindgen",
-]
-
-[[package]]
-name = "winapi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
-dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
-name = "winapi-util"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
-dependencies = [
- "winapi",
-]
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
-
-[[package]]
name = "writeable"
-version = "0.5.0"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8e6ab4f5da1b24daf2c590cfac801bacb27b15b4f050e84eb60149ea726f06b"
+checksum = "92d74a687e3b9a7a129db0a8c82b4d464eb9c36f5a66ca68572a7e5f1cfdb5bc"
[[package]]
name = "yoke"
-version = "0.6.2"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fe1d55ca72c32d573bfbd5cb2f0ca65a497854c44762957a6d3da96041a5184"
+checksum = "222180af14a6b54ef2c33493c1eff77ae95a3687a21b243e752624006fb8f26e"
dependencies = [
"serde",
"stable_deref_trait",
@@ -1008,9 +367,9 @@ dependencies = [
[[package]]
name = "yoke-derive"
-version = "0.6.0"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58c2c5bb7c929b85c1b9ec69091b0d835f0878b4fd9eb67973b25936e06c4374"
+checksum = "ca800d73d6b7a7ee54f2608205c98b549fca71c9500c1abcb3abdc7708b4a8cb"
dependencies = [
"proc-macro2",
"quote",
@@ -1029,9 +388,9 @@ dependencies = [
[[package]]
name = "zerofrom-derive"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8785f47d6062c1932866147f91297286a9f350b3070e9d9f0b6078e37d623c1a"
+checksum = "2e8aa86add9ddbd2409c1ed01e033cd457d79b1b1229b64922c25095c595e829"
dependencies = [
"proc-macro2",
"quote",
@@ -1041,21 +400,19 @@ dependencies = [
[[package]]
name = "zerovec"
-version = "0.9.0"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9d919a74c17749ccb17beaf6405562e413cd94e98ba52ca1e64bbe7eefbd8b8"
+checksum = "154df60c74c4a844bc04a53cef4fc18a909d3ea07e19f5225eaba86209da3aa6"
dependencies = [
- "serde",
- "yoke",
"zerofrom",
"zerovec-derive",
]
[[package]]
name = "zerovec-derive"
-version = "0.9.0"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "490e5f878c2856225e884c35927e7ea6db3c24cdb7229b72542c7526ad7ed49e"
+checksum = "c630983d26a5f0c061dad3bf22df69a7329b4939a9752bc5f19f1cbd8e2263db"
dependencies = [
"proc-macro2",
"quote",
diff --git a/vendor/icu_list/Cargo.toml b/vendor/icu_list/Cargo.toml
index 805f2aea4..2d3f3feb1 100644
--- a/vendor/icu_list/Cargo.toml
+++ b/vendor/icu_list/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "icu_list"
-version = "1.0.0"
+version = "1.1.0"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -30,30 +30,30 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
-[lib]
-path = "src/lib.rs"
+[package.metadata.docs.rs]
+all-features = true
+
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
[[example]]
name = "and_list"
[dependencies.databake]
-version = "0.1.0"
+version = "0.1.3"
features = ["derive"]
optional = true
[dependencies.deduplicating_array]
-version = "0.1"
+version = "0.1.3"
optional = true
[dependencies.displaydoc]
version = "0.2.3"
default-features = false
-[dependencies.icu_locid]
-version = "1.0.0"
-
[dependencies.icu_provider]
-version = "1.0.0"
+version = "1.1.0"
features = ["macros"]
[dependencies.regex-automata]
@@ -70,17 +70,7 @@ optional = true
default-features = false
[dependencies.writeable]
-version = "0.5"
-
-[dependencies.zerovec]
-version = "0.9"
-features = ["yoke"]
-
-[dev-dependencies.criterion]
-version = "0.3.3"
-
-[dev-dependencies.icu_benchmark_macros]
-version = "0.7"
+version = "0.5.1"
[dev-dependencies.postcard]
version = "1.0.0"
@@ -94,21 +84,16 @@ bench = []
datagen = [
"serde",
"std",
- "databake",
+ "dep:databake",
+ "regex-automata/alloc",
]
serde = [
+ "dep:deduplicating_array",
"dep:serde",
"icu_provider/serde",
- "zerovec/serde",
- "deduplicating_array",
]
serde_human = [
"serde",
"regex-automata/alloc",
]
-std = [
- "icu_provider/std",
- "icu_locid/std",
- "regex-automata/std",
- "regex-automata/alloc",
-]
+std = ["icu_provider/std"]
diff --git a/vendor/icu_list/examples/and_list.rs b/vendor/icu_list/examples/and_list.rs
index 9d869d9fb..08cfa3bda 100644
--- a/vendor/icu_list/examples/and_list.rs
+++ b/vendor/icu_list/examples/and_list.rs
@@ -6,8 +6,8 @@
icu_benchmark_macros::static_setup!();
-use icu_list::{ListFormatter, ListLength};
-use icu_locid::locale;
+use icu::list::{ListFormatter, ListLength};
+use icu::locid::locale;
#[no_mangle]
fn main(_argc: isize, _argv: *const *const u8) -> isize {
diff --git a/vendor/icu_list/src/lazy_automaton.rs b/vendor/icu_list/src/lazy_automaton.rs
new file mode 100644
index 000000000..3431b3c9d
--- /dev/null
+++ b/vendor/icu_list/src/lazy_automaton.rs
@@ -0,0 +1,79 @@
+// This file is part of ICU4X. For terms of use, please see the file
+// called LICENSE at the top level of the ICU4X source tree
+// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
+
+use regex_automata::dfa::sparse::DFA;
+use regex_automata::dfa::Automaton;
+use regex_automata::util::id::StateID;
+use writeable::Writeable;
+
+pub trait LazyAutomaton: Automaton {
+ // Like Automaton::find_earliest_fwd, but doesn't require a materialized string.
+ fn matches_earliest_fwd_lazy<S: Writeable + ?Sized>(&self, haystack: &S) -> bool;
+}
+
+impl<T: AsRef<[u8]>> LazyAutomaton for DFA<T> {
+ fn matches_earliest_fwd_lazy<S: Writeable + ?Sized>(&self, haystack: &S) -> bool {
+ struct DFAStepper<'a> {
+ dfa: &'a DFA<&'a [u8]>,
+ state: StateID,
+ }
+
+ impl core::fmt::Write for DFAStepper<'_> {
+ fn write_str(&mut self, s: &str) -> core::fmt::Result {
+ for &byte in s.as_bytes() {
+ self.state = self.dfa.next_state(self.state, byte);
+ if self.dfa.is_match_state(self.state) || self.dfa.is_dead_state(self.state) {
+ // We matched or are in a no-match-cycle, return early
+ return Err(core::fmt::Error);
+ }
+ }
+ Ok(())
+ }
+ }
+
+ let mut stepper = DFAStepper {
+ // If start == 0 the start state does not depend on the actual string, so
+ // we can just pass an empty slice.
+ state: self.start_state_forward(None, &[], 0, 0),
+ dfa: &self.as_ref(),
+ };
+
+ if haystack.write_to(&mut stepper).is_ok() {
+ stepper.state = self.next_eoi_state(stepper.state);
+ }
+
+ self.is_match_state(stepper.state)
+ }
+}
+
+#[cfg(test)]
+#[test]
+fn test() {
+ use crate::provider::SerdeDFA;
+ use alloc::borrow::Cow;
+
+ let matcher = SerdeDFA::new(Cow::Borrowed("11(000)*$")).unwrap();
+
+ for writeable in [1i32, 11, 110, 11000, 211000] {
+ assert_eq!(
+ matcher
+ .deref()
+ .find_earliest_fwd(writeable.write_to_string().as_bytes())
+ .unwrap()
+ .is_some(),
+ matcher.deref().matches_earliest_fwd_lazy(&writeable)
+ );
+ }
+
+ struct ExitEarlyTest;
+
+ impl writeable::Writeable for ExitEarlyTest {
+ fn write_to<W: core::fmt::Write + ?Sized>(&self, sink: &mut W) -> core::fmt::Result {
+ sink.write_str("12")?;
+ unreachable!()
+ }
+ }
+
+ assert!(!matcher.deref().matches_earliest_fwd_lazy(&ExitEarlyTest));
+}
diff --git a/vendor/icu_list/src/lib.rs b/vendor/icu_list/src/lib.rs
index 18f2156a6..61aec0fa3 100644
--- a/vendor/icu_list/src/lib.rs
+++ b/vendor/icu_list/src/lib.rs
@@ -93,8 +93,9 @@
extern crate alloc;
mod error;
+mod lazy_automaton;
mod list_formatter;
-mod string_matcher;
+mod patterns;
pub mod provider;
diff --git a/vendor/icu_list/src/list_formatter.rs b/vendor/icu_list/src/list_formatter.rs
index 36f5fbb7b..93f035eab 100644
--- a/vendor/icu_list/src/list_formatter.rs
+++ b/vendor/icu_list/src/list_formatter.rs
@@ -72,8 +72,39 @@ impl ListFormatter {
);
/// Returns a [`Writeable`] composed of the input [`Writeable`]s and the language-dependent
- /// formatting. The first layer of parts contains [`parts::ELEMENT`] for input
- /// elements, and [`parts::LITERAL`] for list literals.
+ /// formatting.
+ ///
+ /// The [`Writeable`] is annotated with [`parts::ELEMENT`] for input elements,
+ /// and [`parts::LITERAL`] for list literals.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use icu::list::*;
+ /// # use icu::locid::locale;
+ /// # use writeable::*;
+ /// let formatteur = ListFormatter::try_new_and_with_length_unstable(
+ /// &icu_testdata::unstable(),
+ /// &locale!("fr").into(),
+ /// ListLength::Wide,
+ /// )
+ /// .unwrap();
+ /// let pays = ["Italie", "France", "Espagne", "Allemagne"];
+ ///
+ /// assert_writeable_parts_eq!(
+ /// formatteur.format(pays.iter()),
+ /// "Italie, France, Espagne et Allemagne",
+ /// [
+ /// (0, 6, parts::ELEMENT),
+ /// (6, 8, parts::LITERAL),
+ /// (8, 14, parts::ELEMENT),
+ /// (14, 16, parts::LITERAL),
+ /// (16, 23, parts::ELEMENT),
+ /// (23, 27, parts::LITERAL),
+ /// (27, 36, parts::ELEMENT),
+ /// ]
+ /// );
+ /// ```
pub fn format<'a, W: Writeable + 'a, I: Iterator<Item = W> + Clone + 'a>(
&'a self,
values: I,
@@ -99,6 +130,9 @@ pub mod parts {
use writeable::Part;
/// The [`Part`] used by [`FormattedList`](super::FormattedList) to mark the part of the string that is an element.
+ ///
+ /// * `category`: `"list"`
+ /// * `value`: `"element"`
pub const ELEMENT: Part = Part {
category: "list",
value: "element",
@@ -106,6 +140,9 @@ pub mod parts {
/// The [`Part`] used by [`FormattedList`](super::FormattedList) to mark the part of the string that is a list literal,
/// such as ", " or " and ".
+ ///
+ /// * `category`: `"list"`
+ /// * `value`: `"literal"`
pub const LITERAL: Part = Part {
category: "list",
value: "literal",
@@ -234,7 +271,7 @@ mod tests {
fn formatter(length: ListLength) -> ListFormatter {
ListFormatter {
- data: DataPayload::from_owned(crate::provider::test::test_patterns()),
+ data: DataPayload::from_owned(crate::patterns::test::test_patterns()),
length,
}
}
diff --git a/vendor/icu_list/src/patterns.rs b/vendor/icu_list/src/patterns.rs
new file mode 100644
index 000000000..8cfcb98c1
--- /dev/null
+++ b/vendor/icu_list/src/patterns.rs
@@ -0,0 +1,283 @@
+// This file is part of ICU4X. For terms of use, please see the file
+// called LICENSE at the top level of the ICU4X source tree
+// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
+
+use crate::lazy_automaton::LazyAutomaton;
+use crate::provider::*;
+use crate::ListLength;
+#[cfg(feature = "datagen")]
+use alloc::borrow::Cow;
+#[cfg(feature = "datagen")]
+use icu_provider::DataError;
+use writeable::{LengthHint, Writeable};
+
+impl<'data> ListFormatterPatternsV1<'data> {
+ /// Creates a new [`ListFormatterPatternsV1`] from the given patterns. Fails if any pattern is invalid.
+ ///
+ /// See [`ListJoinerPattern::from_str`]. `allow_prefix` will be true for `pair` and `end` patterns,
+ /// `allow_suffix` for `start` and `pair` patterns.
+ #[cfg(feature = "datagen")]
+ pub fn try_new(
+ [start, middle, end, pair, short_start, short_middle, short_end, short_pair, narrow_start, narrow_middle, narrow_end, narrow_pair]: [&str; 12],
+ ) -> Result<Self, DataError> {
+ Ok(Self([
+ ListJoinerPattern::from_str(start, true, false)?.into(),
+ ListJoinerPattern::from_str(middle, false, false)?.into(),
+ ListJoinerPattern::from_str(end, false, true)?.into(),
+ ListJoinerPattern::from_str(pair, true, true)?.into(),
+ ListJoinerPattern::from_str(short_start, true, false)?.into(),
+ ListJoinerPattern::from_str(short_middle, false, false)?.into(),
+ ListJoinerPattern::from_str(short_end, false, true)?.into(),
+ ListJoinerPattern::from_str(short_pair, true, true)?.into(),
+ ListJoinerPattern::from_str(narrow_start, true, false)?.into(),
+ ListJoinerPattern::from_str(narrow_middle, false, false)?.into(),
+ ListJoinerPattern::from_str(narrow_end, false, true)?.into(),
+ ListJoinerPattern::from_str(narrow_pair, true, true)?.into(),
+ ]))
+ }
+
+ /// Adds a special case to all `pattern`s that will evaluate to
+ /// `alternative_pattern` when `regex` matches the following element.
+ /// The regex is interpreted case-insensitive and anchored to the beginning, but
+ /// to improve efficiency does not search for full matches. If a full match is
+ /// required, use `$`.
+ #[cfg(feature = "datagen")]
+ pub fn make_conditional(
+ &mut self,
+ pattern: &str,
+ regex: &SerdeDFA<'static>,
+ alternative_pattern: &str,
+ ) -> Result<(), DataError> {
+ let old = ListJoinerPattern::from_str(pattern, true, true)?;
+ for i in 0..12 {
+ #[allow(clippy::indexing_slicing)] // self.0 is &[_; 12]
+ if self.0[i].default == old {
+ self.0[i].special_case = Some(SpecialCasePattern {
+ condition: regex.clone(),
+ pattern: ListJoinerPattern::from_str(
+ alternative_pattern,
+ i % 4 == 0 || i % 4 == 3, // allow_prefix = start or pair
+ i % 4 == 2 || i % 4 == 3, // allow_suffix = end or pair
+ )?,
+ });
+ }
+ }
+ Ok(())
+ }
+
+ /// The range of the number of bytes required by the list literals to join a
+ /// list of length `len`. If none of the patterns are conditional, this is exact.
+ pub(crate) fn size_hint(&self, style: ListLength, len: usize) -> LengthHint {
+ match len {
+ 0 | 1 => LengthHint::exact(0),
+ 2 => self.pair(style).size_hint(),
+ n => {
+ self.start(style).size_hint()
+ + self.middle(style).size_hint() * (n - 3)
+ + self.end(style).size_hint()
+ }
+ }
+ }
+}
+
+type PatternParts<'a> = (&'a str, &'a str, &'a str);
+
+impl<'a> ConditionalListJoinerPattern<'a> {
+ pub(crate) fn parts<'b, W: Writeable + ?Sized>(
+ &'a self,
+ following_value: &'b W,
+ ) -> PatternParts<'a> {
+ match &self.special_case {
+ Some(SpecialCasePattern { condition, pattern })
+ if condition.deref().matches_earliest_fwd_lazy(following_value) =>
+ {
+ pattern.borrow_tuple()
+ }
+ _ => self.default.borrow_tuple(),
+ }
+ }
+
+ /// The expected length of this pattern
+ fn size_hint(&'a self) -> LengthHint {
+ let mut hint = self.default.size_hint();
+ if let Some(special_case) = &self.special_case {
+ hint |= special_case.pattern.size_hint()
+ }
+ hint
+ }
+}
+
+impl<'data> ListJoinerPattern<'data> {
+ /// Construct the pattern from a CLDR pattern string
+ #[cfg(feature = "datagen")]
+ pub fn from_str(
+ pattern: &str,
+ allow_prefix: bool,
+ allow_suffix: bool,
+ ) -> Result<Self, DataError> {
+ match (pattern.find("{0}"), pattern.find("{1}")) {
+ (Some(index_0), Some(index_1))
+ if index_0 < index_1
+ && (allow_prefix || index_0 == 0)
+ && (allow_suffix || index_1 == pattern.len() - 3) =>
+ {
+ if (index_0 > 0 && !cfg!(test)) || index_1 - 3 >= 256 {
+ return Err(DataError::custom(
+ "Found valid pattern that cannot be stored in ListFormatterPatternsV1",
+ )
+ .with_debug_context(pattern));
+ }
+ #[allow(clippy::indexing_slicing)] // find
+ Ok(ListJoinerPattern {
+ string: Cow::Owned(alloc::format!(
+ "{}{}{}",
+ &pattern[0..index_0],
+ &pattern[index_0 + 3..index_1],
+ &pattern[index_1 + 3..]
+ )),
+ index_0: index_0 as u8,
+ index_1: (index_1 - 3) as u8,
+ })
+ }
+ _ => Err(DataError::custom("Invalid list pattern").with_debug_context(pattern)),
+ }
+ }
+
+ fn borrow_tuple(&'data self) -> PatternParts<'data> {
+ #![allow(clippy::indexing_slicing)] // by invariant
+ let index_0 = self.index_0 as usize;
+ let index_1 = self.index_1 as usize;
+ (
+ &self.string[0..index_0],
+ &self.string[index_0..index_1],
+ &self.string[index_1..],
+ )
+ }
+
+ fn size_hint(&self) -> LengthHint {
+ LengthHint::exact(self.string.len())
+ }
+}
+
+#[cfg(feature = "datagen")]
+impl<'data> From<ListJoinerPattern<'data>> for ConditionalListJoinerPattern<'data> {
+ fn from(default: ListJoinerPattern<'data>) -> Self {
+ Self {
+ default,
+ special_case: None,
+ }
+ }
+}
+
+#[cfg(all(test, feature = "datagen"))]
+pub mod test {
+ use super::*;
+
+ pub fn test_patterns() -> ListFormatterPatternsV1<'static> {
+ let mut patterns = ListFormatterPatternsV1::try_new([
+ // Wide: general
+ "@{0}:{1}",
+ "{0},{1}",
+ "{0}.{1}!",
+ "${0};{1}+",
+ // Short: different pattern lengths
+ "{0}1{1}",
+ "{0}12{1}",
+ "{0}12{1}34",
+ "{0}123{1}456",
+ // Narrow: conditionals
+ "{0}: {1}",
+ "{0}, {1}",
+ "{0}. {1}",
+ "{0}. {1}",
+ ])
+ .unwrap();
+ patterns
+ .make_conditional(
+ "{0}. {1}",
+ &SerdeDFA::new(Cow::Borrowed("A")).unwrap(),
+ "{0} :o {1}",
+ )
+ .unwrap();
+ patterns
+ }
+
+ #[test]
+ fn rejects_bad_patterns() {
+ assert!(ListJoinerPattern::from_str("{0} and", true, true).is_err());
+ assert!(ListJoinerPattern::from_str("and {1}", true, true).is_err());
+ assert!(ListJoinerPattern::from_str("{1} and {0}", true, true).is_err());
+ assert!(ListJoinerPattern::from_str("{1{0}}", true, true).is_err());
+ assert!(ListJoinerPattern::from_str("{0\u{202e}} and {1}", true, true).is_err());
+ assert!(ListJoinerPattern::from_str("{{0}} {{1}}", true, true).is_ok());
+
+ assert!(ListJoinerPattern::from_str("{0} and {1} ", true, true).is_ok());
+ assert!(ListJoinerPattern::from_str("{0} and {1} ", true, false).is_err());
+ assert!(ListJoinerPattern::from_str(" {0} and {1}", true, true).is_ok());
+ assert!(ListJoinerPattern::from_str(" {0} and {1}", false, true).is_err());
+ }
+
+ #[test]
+ fn produces_correct_parts() {
+ assert_eq!(
+ test_patterns().pair(ListLength::Wide).parts(""),
+ ("$", ";", "+")
+ );
+ }
+
+ #[test]
+ fn produces_correct_parts_conditionally() {
+ assert_eq!(
+ test_patterns().end(ListLength::Narrow).parts("A"),
+ ("", " :o ", "")
+ );
+ assert_eq!(
+ test_patterns().end(ListLength::Narrow).parts("a"),
+ ("", " :o ", "")
+ );
+ assert_eq!(
+ test_patterns().end(ListLength::Narrow).parts("ab"),
+ ("", " :o ", "")
+ );
+ assert_eq!(
+ test_patterns().end(ListLength::Narrow).parts("B"),
+ ("", ". ", "")
+ );
+ assert_eq!(
+ test_patterns().end(ListLength::Narrow).parts("BA"),
+ ("", ". ", "")
+ );
+ }
+
+ #[test]
+ fn size_hint_works() {
+ let pattern = test_patterns();
+
+ assert_eq!(
+ pattern.size_hint(ListLength::Short, 0),
+ LengthHint::exact(0)
+ );
+ assert_eq!(
+ pattern.size_hint(ListLength::Short, 1),
+ LengthHint::exact(0)
+ );
+
+ // pair pattern "{0}123{1}456"
+ assert_eq!(
+ pattern.size_hint(ListLength::Short, 2),
+ LengthHint::exact(6)
+ );
+
+ // patterns "{0}1{1}", "{0}12{1}" (x197), and "{0}12{1}34"
+ assert_eq!(
+ pattern.size_hint(ListLength::Short, 200),
+ LengthHint::exact(1 + 2 * 197 + 4)
+ );
+
+ // patterns "{0}: {1}", "{0}, {1}" (x197), and "{0} :o {1}" or "{0}. {1}"
+ assert_eq!(
+ pattern.size_hint(ListLength::Narrow, 200),
+ LengthHint::exact(2 + 197 * 2) + LengthHint::between(2, 4)
+ );
+ }
+}
diff --git a/vendor/icu_list/src/provider.rs b/vendor/icu_list/src/provider.rs
deleted file mode 100644
index 27f3e4fec..000000000
--- a/vendor/icu_list/src/provider.rs
+++ /dev/null
@@ -1,465 +0,0 @@
-// This file is part of ICU4X. For terms of use, please see the file
-// called LICENSE at the top level of the ICU4X source tree
-// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
-
-// Provider structs must be stable
-#![allow(clippy::exhaustive_structs, clippy::exhaustive_enums)]
-
-//! Data provider struct definitions for this ICU4X component.
-//!
-//! Read more about data providers: [`icu_provider`]
-
-use crate::ListLength;
-use alloc::borrow::Cow;
-use icu_provider::DataMarker;
-use icu_provider::{yoke, zerofrom};
-use writeable::{LengthHint, Writeable};
-
-pub use crate::string_matcher::StringMatcher;
-
-/// Symbols and metadata required for [`ListFormatter`](crate::ListFormatter).
-#[icu_provider::data_struct(
- AndListV1Marker = "list/and@1",
- OrListV1Marker = "list/or@1",
- UnitListV1Marker = "list/unit@1"
-)]
-#[derive(Clone, Debug)]
-#[cfg_attr(
- feature = "datagen",
- derive(serde::Serialize, databake::Bake),
- databake(path = icu_list::provider),
-)]
-pub struct ListFormatterPatternsV1<'data>(
- #[cfg_attr(feature = "datagen", serde(with = "deduplicating_array"))]
- /// The patterns in the order start, middle, end, pair, short_start, short_middle,
- /// short_end, short_pair, narrow_start, narrow_middle, narrow_end, narrow_pair,
- pub [ConditionalListJoinerPattern<'data>; 12],
-);
-
-#[cfg(feature = "serde")]
-impl<'de> serde::Deserialize<'de> for ListFormatterPatternsV1<'de> {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::de::Deserializer<'de>,
- {
- #[cfg(not(feature = "serde_human"))]
- if deserializer.is_human_readable() {
- use serde::de::Error;
- return Err(D::Error::custom(
- "Deserializing human-readable ListFormatter data requires the 'serde_human' feature",
- ));
- }
-
- Ok(ListFormatterPatternsV1(deduplicating_array::deserialize(
- deserializer,
- )?))
- }
-}
-
-pub(crate) struct ErasedListV1Marker;
-
-impl DataMarker for ErasedListV1Marker {
- type Yokeable = ListFormatterPatternsV1<'static>;
-}
-
-impl<'data> ListFormatterPatternsV1<'data> {
- pub(crate) fn start(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
- #![allow(clippy::indexing_slicing)] // style as usize < 3
- &self.0[4 * (style as usize)]
- }
-
- pub(crate) fn middle(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
- #![allow(clippy::indexing_slicing)] // style as usize < 3
- &self.0[4 * (style as usize) + 1]
- }
-
- pub(crate) fn end(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
- #![allow(clippy::indexing_slicing)] // style as usize < 3
- &self.0[4 * (style as usize) + 2]
- }
-
- pub(crate) fn pair(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
- #![allow(clippy::indexing_slicing)] // style as usize < 3
- &self.0[4 * (style as usize) + 3]
- }
-
- /// The range of the number of bytes required by the list literals to join a
- /// list of length `len`. If none of the patterns are conditional, this is exact.
- pub(crate) fn size_hint(&self, style: ListLength, len: usize) -> LengthHint {
- match len {
- 0 | 1 => LengthHint::exact(0),
- 2 => self.pair(style).size_hint(),
- n => {
- self.start(style).size_hint()
- + self.middle(style).size_hint() * (n - 3)
- + self.end(style).size_hint()
- }
- }
- }
-}
-
-/// A pattern that can behave conditionally on the next element.
-#[derive(Clone, Debug, PartialEq, yoke::Yokeable, zerofrom::ZeroFrom)]
-#[cfg_attr(
- feature = "datagen",
- derive(serde::Serialize, databake::Bake),
- databake(path = icu_list::provider),
-)]
-#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
-pub struct ConditionalListJoinerPattern<'data> {
- /// The default pattern
- #[cfg_attr(feature = "serde", serde(borrow))]
- pub default: ListJoinerPattern<'data>,
- /// And optional special case
- #[cfg_attr(feature = "serde", serde(borrow))]
- pub special_case: Option<SpecialCasePattern<'data>>,
-}
-
-/// The special case of a [`ConditionalListJoinerPattern`]
-#[derive(Clone, Debug, PartialEq, yoke::Yokeable, zerofrom::ZeroFrom)]
-#[cfg_attr(
- feature = "datagen",
- derive(serde::Serialize, databake::Bake),
- databake(path = icu_list::provider),
-)]
-#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
-pub struct SpecialCasePattern<'data> {
- /// The condition on the following element
- #[cfg_attr(feature = "serde", serde(borrow))]
- pub condition: StringMatcher<'data>,
- /// The pattern if the condition matches
- #[cfg_attr(feature = "serde", serde(borrow))]
- pub pattern: ListJoinerPattern<'data>,
-}
-
-/// A pattern containing two numeric placeholders ("{0}, and {1}.")
-#[derive(Clone, Debug, PartialEq, yoke::Yokeable, zerofrom::ZeroFrom)]
-#[cfg_attr(feature = "datagen", derive(serde::Serialize))]
-pub struct ListJoinerPattern<'data> {
- /// The pattern string without the placeholders
- string: Cow<'data, str>,
- /// The index of the first placeholder. Always <= index_1.
- // Always 0 for CLDR data, so we don't need to serialize it.
- // In-memory we have free space for it as index_1 doesn't
- // fill a word.
- #[cfg_attr(feature = "datagen", serde(skip))]
- index_0: u8,
- /// The index of the second placeholder. Always < string.len().
- index_1: u8,
-}
-
-#[cfg(feature = "serde")]
-impl<'de: 'data, 'data> serde::Deserialize<'de> for ListJoinerPattern<'data> {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- #[derive(serde::Deserialize)]
- struct Dummy<'data> {
- #[cfg_attr(feature = "serde", serde(borrow))]
- string: Cow<'data, str>,
- index_1: u8,
- }
- let Dummy { string, index_1 } = Dummy::deserialize(deserializer)?;
-
- if index_1 as usize > string.len() {
- use serde::de::Error;
- Err(D::Error::custom("invalid index_1"))
- } else {
- Ok(ListJoinerPattern {
- string,
- index_0: 0,
- index_1,
- })
- }
- }
-}
-
-impl<'a> ListJoinerPattern<'a> {
- /// Constructs a [`ListJoinerPattern`] from raw parts. Used by databake.
- ///
- /// # Safety
- /// index_1 may be at most string.len()
- pub const unsafe fn from_parts_unchecked(string: &'a str, index_1: u8) -> Self {
- Self {
- string: Cow::Borrowed(string),
- index_0: 0,
- index_1,
- }
- }
-}
-
-pub(crate) type PatternParts<'a> = (&'a str, &'a str, &'a str);
-
-impl<'a> ConditionalListJoinerPattern<'a> {
- pub(crate) fn parts<'b, W: Writeable + ?Sized>(
- &'a self,
- following_value: &'b W,
- ) -> PatternParts<'a> {
- match &self.special_case {
- Some(SpecialCasePattern { condition, pattern })
- // TODO: Implement lookahead instead of materializing here.
- if condition.test(&*following_value.write_to_string()) =>
- {
- pattern.borrow_tuple()
- }
- _ => self.default.borrow_tuple(),
- }
- }
-
- /// The expected length of this pattern
- pub fn size_hint(&'a self) -> LengthHint {
- let mut hint = self.default.size_hint();
- if let Some(special_case) = &self.special_case {
- hint |= special_case.pattern.size_hint()
- }
- hint
- }
-}
-
-impl<'data> ListJoinerPattern<'data> {
- fn borrow_tuple(&'data self) -> PatternParts<'data> {
- #![allow(clippy::indexing_slicing)] // by invariant
- let index_0 = self.index_0 as usize;
- let index_1 = self.index_1 as usize;
- (
- &self.string[0..index_0],
- &self.string[index_0..index_1],
- &self.string[index_1..],
- )
- }
-
- fn size_hint(&self) -> LengthHint {
- LengthHint::exact(self.string.len())
- }
-}
-
-#[cfg(feature = "datagen")]
-mod datagen {
- #![allow(clippy::indexing_slicing)] // datagen
-
- use super::*;
- use icu_provider::DataError;
-
- impl<'data> ListFormatterPatternsV1<'data> {
- /// The patterns in the order start, middle, end, pair, short_start, short_middle,
- /// short_end, short_pair, narrow_start, narrow_middle, narrow_end, narrow_pair,
- pub fn try_new(patterns: [&str; 12]) -> Result<Self, DataError> {
- Ok(Self([
- ListJoinerPattern::from_str(patterns[0], true, false)?.into(),
- ListJoinerPattern::from_str(patterns[1], false, false)?.into(),
- ListJoinerPattern::from_str(patterns[2], false, true)?.into(),
- ListJoinerPattern::from_str(patterns[3], true, true)?.into(),
- ListJoinerPattern::from_str(patterns[4], true, false)?.into(),
- ListJoinerPattern::from_str(patterns[5], false, false)?.into(),
- ListJoinerPattern::from_str(patterns[6], false, true)?.into(),
- ListJoinerPattern::from_str(patterns[7], true, true)?.into(),
- ListJoinerPattern::from_str(patterns[8], true, false)?.into(),
- ListJoinerPattern::from_str(patterns[9], false, false)?.into(),
- ListJoinerPattern::from_str(patterns[10], false, true)?.into(),
- ListJoinerPattern::from_str(patterns[11], true, true)?.into(),
- ]))
- }
-
- /// Adds a special case to all `pattern`s that will evaluate to
- /// `alternative_pattern` when `regex` matches the following element.
- /// The regex is interpreted case-insensitive and anchored to the beginning, but
- /// to improve efficiency does not search for full matches. If a full match is
- /// required, use `$`.
- pub fn make_conditional(
- &mut self,
- pattern: &str,
- regex: &StringMatcher<'static>,
- alternative_pattern: &str,
- ) -> Result<(), DataError> {
- let old = ListJoinerPattern::from_str(pattern, true, true)?;
- for i in 0..12 {
- if self.0[i].default == old {
- self.0[i].special_case = Some(SpecialCasePattern {
- condition: regex.clone(),
- pattern: ListJoinerPattern::from_str(
- alternative_pattern,
- i % 4 == 0 || i % 4 == 3, // allow_prefix = start or pair
- i % 4 == 2 || i % 4 == 3, // allow_suffix = end or pair
- )?,
- });
- }
- }
- Ok(())
- }
- }
-
- impl<'data> ListJoinerPattern<'data> {
- /// Construct the pattern from a CLDR pattern string
- pub fn from_str(
- pattern: &str,
- allow_prefix: bool,
- allow_suffix: bool,
- ) -> Result<Self, DataError> {
- match (pattern.find("{0}"), pattern.find("{1}")) {
- (Some(index_0), Some(index_1))
- if index_0 < index_1
- && (allow_prefix || index_0 == 0)
- && (allow_suffix || index_1 == pattern.len() - 3) =>
- {
- if (index_0 > 0 && !cfg!(test)) || index_1 - 3 >= 256 {
- return Err(DataError::custom(
- "Found valid pattern that cannot be stored in ListFormatterPatternsV1",
- )
- .with_debug_context(pattern));
- }
- Ok(ListJoinerPattern {
- string: Cow::Owned(alloc::format!(
- "{}{}{}",
- &pattern[0..index_0],
- &pattern[index_0 + 3..index_1],
- &pattern[index_1 + 3..]
- )),
- index_0: index_0 as u8,
- index_1: (index_1 - 3) as u8,
- })
- }
- _ => Err(DataError::custom("Invalid list pattern").with_debug_context(pattern)),
- }
- }
- }
-
- impl<'data> From<ListJoinerPattern<'data>> for ConditionalListJoinerPattern<'data> {
- fn from(default: ListJoinerPattern<'data>) -> Self {
- Self {
- default,
- special_case: None,
- }
- }
- }
-
- impl databake::Bake for ListJoinerPattern<'_> {
- fn bake(&self, env: &databake::CrateEnv) -> databake::TokenStream {
- env.insert("icu_list");
- let string = (&*self.string).bake(env);
- let index_1 = self.index_1.bake(env);
- // Safe because our own data is safe
- databake::quote! { unsafe {
- ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(#string, #index_1)
- }}
- }
- }
-}
-
-#[cfg(all(test, feature = "datagen"))]
-pub(crate) mod test {
- use super::*;
-
- pub fn test_patterns() -> ListFormatterPatternsV1<'static> {
- let mut patterns = ListFormatterPatternsV1::try_new([
- // Wide: general
- "@{0}:{1}",
- "{0},{1}",
- "{0}.{1}!",
- "${0};{1}+",
- // Short: different pattern lengths
- "{0}1{1}",
- "{0}12{1}",
- "{0}12{1}34",
- "{0}123{1}456",
- // Narrow: conditionals
- "{0}: {1}",
- "{0}, {1}",
- "{0}. {1}",
- "{0}. {1}",
- ])
- .unwrap();
- patterns
- .make_conditional("{0}. {1}", &StringMatcher::new("A").unwrap(), "{0} :o {1}")
- .unwrap();
- patterns
- }
-
- #[test]
- fn rejects_bad_patterns() {
- assert!(ListJoinerPattern::from_str("{0} and", true, true).is_err());
- assert!(ListJoinerPattern::from_str("and {1}", true, true).is_err());
- assert!(ListJoinerPattern::from_str("{1} and {0}", true, true).is_err());
- assert!(ListJoinerPattern::from_str("{1{0}}", true, true).is_err());
- assert!(ListJoinerPattern::from_str("{0\u{202e}} and {1}", true, true).is_err());
- assert!(ListJoinerPattern::from_str("{{0}} {{1}}", true, true).is_ok());
-
- assert!(ListJoinerPattern::from_str("{0} and {1} ", true, true).is_ok());
- assert!(ListJoinerPattern::from_str("{0} and {1} ", true, false).is_err());
- assert!(ListJoinerPattern::from_str(" {0} and {1}", true, true).is_ok());
- assert!(ListJoinerPattern::from_str(" {0} and {1}", false, true).is_err());
- }
-
- #[test]
- fn produces_correct_parts() {
- assert_eq!(
- test_patterns().pair(ListLength::Wide).parts(""),
- ("$", ";", "+")
- );
- }
-
- #[test]
- fn produces_correct_parts_conditionally() {
- assert_eq!(
- test_patterns().end(ListLength::Narrow).parts("A"),
- ("", " :o ", "")
- );
- assert_eq!(
- test_patterns().end(ListLength::Narrow).parts("a"),
- ("", " :o ", "")
- );
- assert_eq!(
- test_patterns().end(ListLength::Narrow).parts("ab"),
- ("", " :o ", "")
- );
- assert_eq!(
- test_patterns().end(ListLength::Narrow).parts("B"),
- ("", ". ", "")
- );
- assert_eq!(
- test_patterns().end(ListLength::Narrow).parts("BA"),
- ("", ". ", "")
- );
- }
-
- #[test]
- fn size_hint_works() {
- let pattern = test_patterns();
-
- assert_eq!(
- pattern.size_hint(ListLength::Short, 0),
- LengthHint::exact(0)
- );
- assert_eq!(
- pattern.size_hint(ListLength::Short, 1),
- LengthHint::exact(0)
- );
-
- // pair pattern "{0}123{1}456"
- assert_eq!(
- pattern.size_hint(ListLength::Short, 2),
- LengthHint::exact(6)
- );
-
- // patterns "{0}1{1}", "{0}12{1}" (x197), and "{0}12{1}34"
- assert_eq!(
- pattern.size_hint(ListLength::Short, 200),
- LengthHint::exact(1 + 2 * 197 + 4)
- );
-
- // patterns "{0}: {1}", "{0}, {1}" (x197), and "{0} :o {1}" or "{0}. {1}"
- assert_eq!(
- pattern.size_hint(ListLength::Narrow, 200),
- LengthHint::exact(2 + 197 * 2) + LengthHint::between(2, 4)
- );
- }
-
- #[test]
- fn databake() {
- databake::test_bake!(
- ListJoinerPattern,
- const: unsafe { crate::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) },
- icu_list
- );
- }
-}
diff --git a/vendor/icu_list/src/provider/mod.rs b/vendor/icu_list/src/provider/mod.rs
new file mode 100644
index 000000000..efab7c8bc
--- /dev/null
+++ b/vendor/icu_list/src/provider/mod.rs
@@ -0,0 +1,261 @@
+// This file is part of ICU4X. For terms of use, please see the file
+// called LICENSE at the top level of the ICU4X source tree
+// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
+
+// Provider structs must be stable
+#![allow(clippy::exhaustive_structs, clippy::exhaustive_enums)]
+
+//! 🚧 \[Unstable\] Data provider struct definitions for this ICU4X component.
+//!
+//! <div class="stab unstable">
+//! 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+//! including in SemVer minor releases. While the serde representation of data structs is guaranteed
+//! to be stable, their Rust representation might not be. Use with caution.
+//! </div>
+//!
+//! Read more about data providers: [`icu_provider`]
+
+use crate::ListLength;
+use alloc::borrow::Cow;
+use icu_provider::DataMarker;
+use icu_provider::{yoke, zerofrom};
+
+mod serde_dfa;
+pub use serde_dfa::SerdeDFA;
+
+/// Symbols and metadata required for [`ListFormatter`](crate::ListFormatter).
+///
+/// <div class="stab unstable">
+/// 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+/// including in SemVer minor releases. While the serde representation of data structs is guaranteed
+/// to be stable, their Rust representation might not be. Use with caution.
+/// </div>
+#[icu_provider::data_struct(
+ AndListV1Marker = "list/and@1",
+ OrListV1Marker = "list/or@1",
+ UnitListV1Marker = "list/unit@1"
+)]
+#[derive(Clone, Debug)]
+#[cfg_attr(
+ feature = "datagen",
+ derive(serde::Serialize, databake::Bake),
+ databake(path = icu_list::provider),
+)]
+pub struct ListFormatterPatternsV1<'data>(
+ #[cfg_attr(feature = "datagen", serde(with = "deduplicating_array"))]
+ /// The patterns in the order start, middle, end, pair, short_start, short_middle,
+ /// short_end, short_pair, narrow_start, narrow_middle, narrow_end, narrow_pair,
+ pub [ConditionalListJoinerPattern<'data>; 12],
+);
+
+#[cfg(feature = "serde")]
+impl<'de> serde::Deserialize<'de> for ListFormatterPatternsV1<'de> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ #[cfg(not(feature = "serde_human"))]
+ if deserializer.is_human_readable() {
+ use serde::de::Error;
+ return Err(D::Error::custom(
+ "Deserializing human-readable ListFormatter data requires the 'serde_human' feature",
+ ));
+ }
+
+ Ok(ListFormatterPatternsV1(deduplicating_array::deserialize(
+ deserializer,
+ )?))
+ }
+}
+
+pub(crate) struct ErasedListV1Marker;
+
+impl DataMarker for ErasedListV1Marker {
+ type Yokeable = ListFormatterPatternsV1<'static>;
+}
+
+impl<'data> ListFormatterPatternsV1<'data> {
+ pub(crate) fn start(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
+ #![allow(clippy::indexing_slicing)] // style as usize < 3
+ &self.0[4 * (style as usize)]
+ }
+
+ pub(crate) fn middle(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
+ #![allow(clippy::indexing_slicing)] // style as usize < 3
+ &self.0[4 * (style as usize) + 1]
+ }
+
+ pub(crate) fn end(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
+ #![allow(clippy::indexing_slicing)] // style as usize < 3
+ &self.0[4 * (style as usize) + 2]
+ }
+
+ pub(crate) fn pair(&self, style: ListLength) -> &ConditionalListJoinerPattern<'data> {
+ #![allow(clippy::indexing_slicing)] // style as usize < 3
+ &self.0[4 * (style as usize) + 3]
+ }
+}
+
+/// A pattern that can behave conditionally on the next element.
+///
+/// <div class="stab unstable">
+/// 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+/// including in SemVer minor releases. While the serde representation of data structs is guaranteed
+/// to be stable, their Rust representation might not be. Use with caution.
+/// </div>
+#[derive(Clone, Debug, yoke::Yokeable, zerofrom::ZeroFrom)]
+#[cfg_attr(
+ feature = "datagen",
+ derive(PartialEq, serde::Serialize, databake::Bake),
+ databake(path = icu_list::provider),
+)]
+#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
+pub struct ConditionalListJoinerPattern<'data> {
+ /// The default pattern
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ pub default: ListJoinerPattern<'data>,
+ /// And optional special case
+ #[cfg_attr(
+ feature = "serde",
+ serde(borrow, deserialize_with = "SpecialCasePattern::deserialize_option")
+ )]
+ pub special_case: Option<SpecialCasePattern<'data>>,
+}
+
+/// The special case of a [`ConditionalListJoinerPattern`]
+///
+/// <div class="stab unstable">
+/// 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+/// including in SemVer minor releases. While the serde representation of data structs is guaranteed
+/// to be stable, their Rust representation might not be. Use with caution.
+/// </div>
+#[derive(Clone, Debug, yoke::Yokeable, zerofrom::ZeroFrom)]
+#[cfg_attr(
+ feature = "datagen",
+ derive(PartialEq, serde::Serialize, databake::Bake),
+ databake(path = icu_list::provider),
+)]
+pub struct SpecialCasePattern<'data> {
+ /// The condition on the following element
+ pub condition: SerdeDFA<'data>,
+ /// The pattern if the condition matches
+ pub pattern: ListJoinerPattern<'data>,
+}
+
+#[cfg(feature = "serde")]
+impl<'data> SpecialCasePattern<'data> {
+ // If the condition doesn't deserialize, the whole special case becomes `None`
+ fn deserialize_option<'de: 'data, D>(deserializer: D) -> Result<Option<Self>, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ use serde::Deserialize;
+
+ #[derive(Deserialize)]
+ struct SpecialCasePatternOptionalDfa<'data> {
+ #[cfg_attr(
+ feature = "serde",
+ serde(borrow, deserialize_with = "SerdeDFA::maybe_deserialize")
+ )]
+ pub condition: Option<SerdeDFA<'data>>,
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ pub pattern: ListJoinerPattern<'data>,
+ }
+
+ Ok(
+ match Option::<SpecialCasePatternOptionalDfa<'data>>::deserialize(deserializer)? {
+ Some(SpecialCasePatternOptionalDfa {
+ condition: Some(condition),
+ pattern,
+ }) => Some(SpecialCasePattern { condition, pattern }),
+ _ => None,
+ },
+ )
+ }
+}
+
+/// A pattern containing two numeric placeholders ("{0}, and {1}.")
+///
+/// <div class="stab unstable">
+/// 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+/// including in SemVer minor releases. While the serde representation of data structs is guaranteed
+/// to be stable, their Rust representation might not be. Use with caution.
+/// </div>
+#[derive(Clone, Debug, PartialEq, yoke::Yokeable, zerofrom::ZeroFrom)]
+#[cfg_attr(feature = "datagen", derive(serde::Serialize))]
+pub struct ListJoinerPattern<'data> {
+ /// The pattern string without the placeholders
+ pub(crate) string: Cow<'data, str>,
+ /// The index of the first placeholder. Always <= index_1.
+ // Always 0 for CLDR data, so we don't need to serialize it.
+ // In-memory we have free space for it as index_1 doesn't
+ // fill a word.
+ #[cfg_attr(feature = "datagen", serde(skip))]
+ pub(crate) index_0: u8,
+ /// The index of the second placeholder. Always < string.len().
+ pub(crate) index_1: u8,
+}
+
+#[cfg(feature = "serde")]
+impl<'de: 'data, 'data> serde::Deserialize<'de> for ListJoinerPattern<'data> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ #[derive(serde::Deserialize)]
+ struct Dummy<'data> {
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ string: Cow<'data, str>,
+ index_1: u8,
+ }
+ let Dummy { string, index_1 } = Dummy::deserialize(deserializer)?;
+
+ if index_1 as usize > string.len() {
+ use serde::de::Error;
+ Err(D::Error::custom("invalid index_1"))
+ } else {
+ Ok(ListJoinerPattern {
+ string,
+ index_0: 0,
+ index_1,
+ })
+ }
+ }
+}
+
+impl<'a> ListJoinerPattern<'a> {
+ /// Constructs a [`ListJoinerPattern`] from raw parts. Used by databake.
+ ///
+ /// # Safety
+ /// index_1 may be at most string.len()
+ pub const unsafe fn from_parts_unchecked(string: &'a str, index_1: u8) -> Self {
+ Self {
+ string: Cow::Borrowed(string),
+ index_0: 0,
+ index_1,
+ }
+ }
+}
+
+#[cfg(feature = "datagen")]
+impl databake::Bake for ListJoinerPattern<'_> {
+ fn bake(&self, env: &databake::CrateEnv) -> databake::TokenStream {
+ env.insert("icu_list");
+ let string = (&*self.string).bake(env);
+ let index_1 = self.index_1.bake(env);
+ // Safe because our own data is safe
+ databake::quote! { unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(#string, #index_1)
+ }}
+ }
+}
+
+#[cfg(all(test, feature = "datagen"))]
+#[test]
+fn databake() {
+ databake::test_bake!(
+ ListJoinerPattern,
+ const: unsafe { crate::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) },
+ icu_list
+ );
+}
diff --git a/vendor/icu_list/src/provider/serde_dfa.rs b/vendor/icu_list/src/provider/serde_dfa.rs
new file mode 100644
index 000000000..e2424e1e9
--- /dev/null
+++ b/vendor/icu_list/src/provider/serde_dfa.rs
@@ -0,0 +1,244 @@
+// This file is part of ICU4X. For terms of use, please see the file
+// called LICENSE at the top level of the ICU4X source tree
+// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
+
+use alloc::borrow::Cow;
+use icu_provider::{yoke, zerofrom};
+use regex_automata::dfa::sparse::DFA;
+
+/// A serde-compatible version of [regex_automata::dfa::sparse::DFA]. This does not implement
+/// [`serde::Deserialize`] directly, as binary deserialization is not supported in big-endian
+/// platforms. `Self::maybe_deserialize` can be used to deserialize to `Option<SerdeDFA>`.
+///
+/// <div class="stab unstable">
+/// 🚧 This code is considered unstable; it may change at any time, in breaking or non-breaking ways,
+/// including in SemVer minor releases. While the serde representation of data structs is guaranteed
+/// to be stable, their Rust representation might not be. Use with caution.
+/// </div>
+#[derive(Clone, Debug, yoke::Yokeable, zerofrom::ZeroFrom)]
+pub struct SerdeDFA<'data> {
+ // Safety: These always represent a valid DFA (DFA::from_bytes(dfa_bytes).is_ok())
+ dfa_bytes: Cow<'data, [u8]>,
+ pattern: Option<Cow<'data, str>>,
+}
+
+#[cfg(feature = "datagen")]
+impl PartialEq for SerdeDFA<'_> {
+ fn eq(&self, other: &Self) -> bool {
+ self.dfa_bytes == other.dfa_bytes
+ }
+}
+
+#[cfg(feature = "datagen")]
+impl databake::Bake for SerdeDFA<'_> {
+ fn bake(&self, env: &databake::CrateEnv) -> databake::TokenStream {
+ env.insert("icu_list");
+ let le_bytes = self.deref().to_bytes_little_endian().as_slice().bake(env);
+ let be_bytes = self.deref().to_bytes_big_endian().as_slice().bake(env);
+ // Safe because of `to_bytes_little_endian`/`to_bytes_big_endian`'s invariant.
+ databake::quote! {
+ unsafe {
+ ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(
+ if cfg!(target_endian = "little") {
+ &#le_bytes
+ } else {
+ &#be_bytes
+ }
+ )
+ }
+ }
+ }
+}
+
+#[cfg(feature = "datagen")]
+impl serde::Serialize for SerdeDFA<'_> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::ser::Serializer,
+ {
+ if serializer.is_human_readable() {
+ self.pattern
+ .as_ref()
+ .map(|pattern| pattern.serialize(serializer))
+ .unwrap_or_else(|| {
+ use serde::ser::Error;
+ Err(S::Error::custom(
+ "cannot serialize a deserialized bincode SerdeDFA to JSON",
+ ))
+ })
+ } else {
+ self.deref().to_bytes_little_endian().serialize(serializer)
+ }
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'data> SerdeDFA<'data> {
+ /// Deserializes to `Option<Self>`. Will return `None` for non-human-readable serialization
+ /// formats on big-endian systems, as `regex_automata` serialization is endian-sensitive.
+ pub fn maybe_deserialize<'de: 'data, D>(deserializer: D) -> Result<Option<Self>, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ use icu_provider::serde::borrow_de_utils::CowBytesWrap;
+ use serde::Deserialize;
+
+ #[cfg(feature = "serde_human")]
+ if deserializer.is_human_readable() {
+ #[cfg(not(feature = "std"))]
+ use alloc::string::ToString;
+ use serde::de::Error;
+ return SerdeDFA::new(Cow::<str>::deserialize(deserializer)?)
+ .map(Some)
+ .map_err(|e| D::Error::custom(e.to_string()));
+ }
+
+ let dfa_bytes = <CowBytesWrap<'de>>::deserialize(deserializer)?.0;
+
+ if cfg!(target_endian = "big") {
+ return Ok(None);
+ }
+
+ // Verify safety invariant
+ DFA::from_bytes(&dfa_bytes).map_err(|e| {
+ use serde::de::Error;
+ D::Error::custom(alloc::format!("Invalid DFA bytes: {}", e))
+ })?;
+
+ Ok(Some(SerdeDFA {
+ dfa_bytes,
+ pattern: None,
+ }))
+ }
+}
+
+impl<'data> SerdeDFA<'data> {
+ /// Creates a `SerdeDFA` from raw bytes. Used internally by databake.
+ ///
+ /// # Safety
+ ///
+ /// `dfa_bytes` has to be a valid DFA (regex_automata::dfa::sparse::DFA::from_bytes(dfa_bytes).is_ok())
+ pub const unsafe fn from_dfa_bytes_unchecked(dfa_bytes: &'data [u8]) -> Self {
+ Self {
+ dfa_bytes: Cow::Borrowed(dfa_bytes),
+ pattern: None,
+ }
+ }
+
+ /// Creates a `SerdeDFA` from a regex.
+ #[cfg(any(feature = "datagen", feature = "serde_human",))]
+ pub fn new(pattern: Cow<'data, str>) -> Result<Self, icu_provider::DataError> {
+ use regex_automata::{
+ dfa::dense::{Builder, Config},
+ SyntaxConfig,
+ };
+
+ let mut builder = Builder::new();
+ let dfa = builder
+ .syntax(SyntaxConfig::new().case_insensitive(true))
+ .configure(Config::new().anchored(true).minimize(true))
+ .build(&pattern)
+ .map_err(|_| {
+ icu_provider::DataError::custom("Cannot build DFA").with_display_context(&pattern)
+ })?
+ .to_sparse()
+ .map_err(|_| {
+ icu_provider::DataError::custom("Cannot sparsify DFA")
+ .with_display_context(&pattern)
+ })?;
+
+ Ok(Self {
+ dfa_bytes: dfa.to_bytes_native_endian().into(),
+ pattern: Some(pattern),
+ })
+ }
+
+ /// Returns the represented [`DFA`]
+ #[allow(clippy::unwrap_used)] // by invariant
+ pub fn deref(&'data self) -> DFA<&'data [u8]> {
+ // Safe due to struct invariant.
+ unsafe { DFA::from_bytes_unchecked(&self.dfa_bytes).unwrap().0 }
+ }
+}
+
+#[cfg(all(test, feature = "datagen"))]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_serde_dfa() {
+ use regex_automata::dfa::Automaton;
+
+ let matcher = SerdeDFA::new(Cow::Borrowed("abc")).unwrap();
+
+ assert!(matcher.deref().find_earliest_fwd(b"ab").unwrap().is_none());
+ assert!(matcher.deref().find_earliest_fwd(b"abc").unwrap().is_some());
+ assert!(matcher
+ .deref()
+ .find_earliest_fwd(b"abcde")
+ .unwrap()
+ .is_some());
+ assert!(matcher
+ .deref()
+ .find_earliest_fwd(b" abcde")
+ .unwrap()
+ .is_none());
+ }
+
+ #[derive(serde::Deserialize)]
+ struct OptionSerdeDFA<'data>(
+ #[serde(borrow, deserialize_with = "SerdeDFA::maybe_deserialize")] Option<SerdeDFA<'data>>,
+ );
+
+ #[test]
+ #[cfg(target_endian = "little")]
+ fn test_postcard_serialization() {
+ let matcher = SerdeDFA::new(Cow::Borrowed("abc*")).unwrap();
+
+ let mut bytes = postcard::to_stdvec(&matcher).unwrap();
+ assert_eq!(
+ postcard::from_bytes::<OptionSerdeDFA>(&bytes).unwrap().0,
+ Some(matcher)
+ );
+
+ // A corrupted byte leads to an error
+ bytes[17] ^= 255;
+ assert!(postcard::from_bytes::<OptionSerdeDFA>(&bytes).is_err());
+ bytes[17] ^= 255;
+
+ // An extra byte leads to an error
+ bytes.insert(123, 40);
+ assert!(postcard::from_bytes::<OptionSerdeDFA>(&bytes).is_err());
+ bytes.remove(123);
+
+ // Missing bytes lead to an error
+ assert!(postcard::from_bytes::<OptionSerdeDFA>(&bytes[0..bytes.len() - 5]).is_err());
+ }
+
+ #[test]
+ #[cfg(feature = "serde_human")]
+ fn test_json_serialization() {
+ let matcher = SerdeDFA::new(Cow::Borrowed("abc*")).unwrap();
+
+ let json = serde_json::to_string(&matcher).unwrap();
+ assert_eq!(
+ serde_json::from_str::<OptionSerdeDFA>(&json).unwrap().0,
+ Some(matcher)
+ );
+ assert!(serde_json::from_str::<OptionSerdeDFA>(".*[").is_err());
+ }
+
+ #[test]
+ #[ignore] // https://github.com/rust-lang/rust/issues/98906
+ fn databake() {
+ databake::test_bake!(
+ SerdeDFA,
+ const: unsafe { crate::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") {
+ &[1] // TODO: set this when activating the test
+ } else {
+ &[2] // TODO: set this when activating the test
+ })},
+ icu_list
+ );
+ }
+}
diff --git a/vendor/icu_list/src/string_matcher.rs b/vendor/icu_list/src/string_matcher.rs
deleted file mode 100644
index ba4833605..000000000
--- a/vendor/icu_list/src/string_matcher.rs
+++ /dev/null
@@ -1,213 +0,0 @@
-// This file is part of ICU4X. For terms of use, please see the file
-// called LICENSE at the top level of the ICU4X source tree
-// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
-
-use alloc::borrow::Cow;
-#[cfg(any(feature = "serde_human", feature = "datagen"))]
-use alloc::string::ToString;
-use icu_provider::{yoke, zerofrom};
-use regex_automata::dfa::sparse::DFA;
-use regex_automata::dfa::Automaton;
-
-/// A precompiled regex
-#[derive(Clone, Debug, yoke::Yokeable, zerofrom::ZeroFrom)]
-#[allow(clippy::exhaustive_structs)] // not a public API
-pub struct StringMatcher<'data> {
- // Safety: These always represent a valid DFA (DFA::from_bytes(dfa_bytes).is_ok())
- dfa_bytes: Cow<'data, [u8]>,
- pattern: Option<Cow<'data, str>>,
-}
-
-impl PartialEq for StringMatcher<'_> {
- fn eq(&self, other: &Self) -> bool {
- self.dfa_bytes == other.dfa_bytes
- }
-}
-
-#[cfg(feature = "datagen")]
-impl databake::Bake for StringMatcher<'_> {
- fn bake(&self, env: &databake::CrateEnv) -> databake::TokenStream {
- env.insert("icu_list");
- let bytes = (&&*self.dfa_bytes).bake(env);
- // Safe because our own data is safe
- databake::quote! {
- unsafe { ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(#bytes) }
- }
- }
-}
-
-#[cfg(feature = "datagen")]
-impl serde::Serialize for StringMatcher<'_> {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: serde::ser::Serializer,
- {
- if serializer.is_human_readable() {
- self.pattern
- .as_ref()
- .map(|pattern| pattern.serialize(serializer))
- .unwrap_or_else(|| {
- use serde::ser::Error;
- Err(S::Error::custom(
- "cannot serialize a deserialized bincode StringMatcher to JSON",
- ))
- })
- } else {
- self.dfa_bytes.serialize(serializer)
- }
- }
-}
-
-#[cfg(feature = "serde")]
-impl<'de: 'data, 'data> serde::Deserialize<'de> for StringMatcher<'data> {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::de::Deserializer<'de>,
- {
- use icu_provider::serde::borrow_de_utils::CowBytesWrap;
-
- #[cfg(feature = "serde_human")]
- if deserializer.is_human_readable() {
- use serde::de::Error;
- return StringMatcher::new(<&str>::deserialize(deserializer)?)
- .map_err(|e| D::Error::custom(e.to_string()));
- }
-
- if cfg!(target_endian = "big") {
- // TODO: Convert LE to BE. For now we just behave like the
- // accept-nothing DFA on BE systems.
- return Ok(StringMatcher {
- dfa_bytes: Cow::Borrowed(&[]),
- pattern: None,
- });
- }
-
- let dfa_bytes = <CowBytesWrap<'de>>::deserialize(deserializer)?.0;
-
- // Verify safety invariant
- DFA::from_bytes(&dfa_bytes).map_err(|e| {
- use serde::de::Error;
- D::Error::custom(alloc::format!("Invalid DFA bytes: {}", e))
- })?;
-
- Ok(StringMatcher {
- dfa_bytes,
- pattern: None,
- })
- }
-}
-
-impl<'data> StringMatcher<'data> {
- /// Creates a `StringMatcher` from a serialized DFA. Used internally by databake.
- ///
- /// # Safety
- ///
- /// `dfa_bytes` has to be a valid DFA (regex_automata::dfa::sparse::DFA::from_bytes(dfa_bytes).is_ok())
- pub const unsafe fn from_dfa_bytes_unchecked(dfa_bytes: &'data [u8]) -> Self {
- Self {
- dfa_bytes: Cow::Borrowed(dfa_bytes),
- pattern: None,
- }
- }
-
- /// Creates a `StringMatcher` from regex.
- #[cfg(any(feature = "datagen", feature = "serde_human",))]
- pub fn new(pattern: &str) -> Result<Self, icu_provider::DataError> {
- use regex_automata::{
- dfa::dense::{Builder, Config},
- SyntaxConfig,
- };
-
- let mut builder = Builder::new();
- let dfa = builder
- .syntax(SyntaxConfig::new().case_insensitive(true))
- .configure(Config::new().anchored(true).minimize(true))
- .build(pattern)
- .map_err(|_| {
- icu_provider::DataError::custom("Cannot build DFA").with_display_context(&pattern)
- })?
- .to_sparse()
- .map_err(|_| {
- icu_provider::DataError::custom("Cannot sparsify DFA")
- .with_display_context(&pattern)
- })?;
-
- Ok(Self {
- dfa_bytes: dfa.to_bytes_little_endian().into(),
- pattern: Some(pattern.to_string().into()),
- })
- }
-
- #[allow(clippy::unwrap_used)] // by invariant
- pub(crate) fn test(&self, string: &str) -> bool {
- cfg!(target_endian = "little")
- && matches!(
- // Safe due to struct invariant.
- unsafe { DFA::from_bytes_unchecked(&self.dfa_bytes).unwrap().0 }
- .find_earliest_fwd(string.as_bytes()),
- Ok(Some(_))
- )
- }
-}
-
-#[cfg(all(test, feature = "datagen"))]
-mod test {
- use super::*;
-
- #[test]
- fn test_string_matcher() {
- let matcher = StringMatcher::new("abc.*").unwrap();
- assert!(!matcher.test("ab"));
- assert!(matcher.test("abc"));
- assert!(matcher.test("abcde"));
- }
-
- #[test]
- fn test_postcard_serialization() {
- let matcher = StringMatcher::new("abc*").unwrap();
-
- let mut bytes = postcard::to_stdvec(&matcher).unwrap();
- assert_eq!(
- postcard::from_bytes::<StringMatcher>(&bytes).unwrap(),
- matcher
- );
-
- // A corrupted byte leads to an error
- bytes[17] ^= 255;
- assert!(postcard::from_bytes::<StringMatcher>(&bytes).is_err());
- bytes[17] ^= 255;
-
- // An extra byte leads to an error
- bytes.insert(123, 40);
- assert!(postcard::from_bytes::<StringMatcher>(&bytes).is_err());
- bytes.remove(123);
-
- // Missing bytes lead to an error
- assert!(postcard::from_bytes::<StringMatcher>(&bytes[0..bytes.len() - 5]).is_err());
- }
-
- #[test]
- #[cfg(feature = "serde_human")]
- fn test_json_serialization() {
- let matcher = StringMatcher::new("abc*").unwrap();
-
- let json = serde_json::to_string(&matcher).unwrap();
- assert_eq!(
- serde_json::from_str::<StringMatcher>(&json).unwrap(),
- matcher
- );
- assert!(serde_json::from_str::<StringMatcher>(".*[").is_err());
- }
-
- #[test]
- #[ignore] // https://github.com/rust-lang/rust/issues/98906
- fn databake() {
- databake::test_bake!(
- StringMatcher,
- const: unsafe {
- crate::provider::StringMatcher::from_dfa_bytes_unchecked(&[49u8, 50u8, 51u8, ])
- },
- icu_list
- );
- }
-}
diff --git a/vendor/icu_locid/.cargo-checksum.json b/vendor/icu_locid/.cargo-checksum.json
index 1f1097bd7..1ea6414e8 100644
--- a/vendor/icu_locid/.cargo-checksum.json
+++ b/vendor/icu_locid/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"6bf9c8304a3fe9f99d7189f9a082be2c7859ea164976975069f8fd2f7f80bbbd","Cargo.toml":"44c6bcdc448226df67e425cb00bf02596c96d0a0bfcb3951d3a5d0998afaa60d","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"d0e5ced27519cf715a66dc4fece18c8cacece8dbb81eb1e03ab82dd57f0bc7f5","benches/fixtures/langid.json":"373c11527653c63c685c9e229a8de5ae2b557c25b686a9d891c59e1f603232d8","benches/fixtures/locale.json":"669b19db933094290a45bf856559920f4e92401072e364ac82c482119dc9233a","benches/fixtures/mod.rs":"9a9671eddcf38a6faa10cb814949f8abc15d89f5e70f3ad6f684f1bc3ffe72ea","benches/fixtures/subtags.json":"28be3a639e452d713e807d5779b6819e06277e2dbbf67801ef34964fb9b074b6","benches/helpers/macros.rs":"bba0945a826bc083156bc302507c48c0c99c4d965e2a84352644d768591b0339","benches/helpers/mod.rs":"c98167d866fdb7f66c8cab41e8d57b5aab9e9707dfc66c37ef136e088dac6fef","benches/iai_langid.rs":"675ab67edc2820894e1179e97e3aad6037957084efa07e494c17c40f3c0bbe35","benches/langid.rs":"4e3d307d48fd9071308a567a0ef927b229814978abd2ba29f57c65edd51f38e4","benches/locale.rs":"b8d5b1e3f8b5578c549a5149229656fb60de26b76a1bf66b6c1abce75042d674","benches/subtags.rs":"e7e80dabaf31bf031779456614f139cafcdadb805986e71b49133ac964928432","examples/filter_langids.rs":"28bea5b7dc715d6c00694437c3f12a72cf68dc984bb13acbb7b1ce5f97c5726a","examples/syntatically_canonicalize_locales.rs":"de97579c82f1670629d077a6216ecce06761da28715387f46250f81b8172ae6b","src/extensions/mod.rs":"76efffe1c99da3ef61a93f8174267e4b0b63abc3283ec8e0c5170ebc582263fe","src/extensions/other/mod.rs":"4216cd8a4dcef13105b48e507659920feaaa3fa3aebc2ba8d7702b40bbec2881","src/extensions/other/subtag.rs":"cb52ec1acec55e4c0e1d37cc5a552d11010051d827786202702257c8fcd96c49","src/extensions/private/mod.rs":"961bfb455114ad7166beb5acb36a1b182d2e81d99cccbfd3b3bf68853cae490d","src/extensions/private/other.rs":"586fd24398e78c5fda0afdb98de28a6467afd2d702683daf5dfab2a6c45af1e9","src/extensions/transform/fields.rs":"376ae5862329709d54b262a6d91be97bb02fc5e0198f30be8a2f4b0adc420c8b","src/extensions/transform/key.rs":"53e8c9ce13f00f678c2322855cc1d90afd91cd33a2af3758d098b7bbcc7090e5","src/extensions/transform/mod.rs":"c932d7e4484ac3bf3c9fe0c63b17847d8cb29f8874d71cd17070e63b8bca5998","src/extensions/transform/value.rs":"153c4edeb987e052dafe0790bcda560da4dcfa6897e5aaf3f62ae772b0605009","src/extensions/unicode/attribute.rs":"d558a193b72f54cdb03afe8e023a145ac74832c8416ca55401cd417ebba2431c","src/extensions/unicode/attributes.rs":"f2f13714750035ff805455b43ba665710978d13b90a53358314e798662c436b6","src/extensions/unicode/key.rs":"6c8694527079c5dd5f03f8e85f23ae6b5aa4b47899d1047036960e8400dca7de","src/extensions/unicode/keywords.rs":"58a2eca7c5e6ac6ad6812538a5b8118e35274c6b5de8029d55cbe1b4cd0a4abb","src/extensions/unicode/mod.rs":"e81db13fdb2db8d6cf7cfcd7c0d926b929fceca500894e688768b3494d02d0c3","src/extensions/unicode/value.rs":"02876ed95059d21d09ff2b986776d6bf0cb14c698237a86a9be24886ffd7a1cd","src/helpers.rs":"a6b8c22ef40a57339e4051fad54e724320851f827bc6f888187f30371024d04a","src/langid.rs":"b3258b1be6566dc117295a525dcb04237f0049c59dc91f460d939cd162ef8b39","src/lib.rs":"6f6248e20709be74b9e186b45810a4963ffa91c680be4ad78be9a6af5a10da5c","src/locale.rs":"a1ff7500d17581fe06524f6d70d59f0765c5d5ca89cb64b42953b286b20727b4","src/macros.rs":"f7154fc103ea1120a55bb5898540b20df80de6eec42e70ce15f339d997f2bf52","src/ordering.rs":"c70aa4e33d5cbab8d75d2833641141b71984a93648634cfc57fc25c3f79a5f58","src/parser/errors.rs":"ccea5e49c109db3766a71ac4aab1d759e2351c4cd31816b6abdca166699c7f3e","src/parser/langid.rs":"ef5c3dc233a5cea1953688e69152c601a3260196caa9327dd07edc7b6be7b0b8","src/parser/locale.rs":"b7d4cd4ed80b0acae9e77046a3b4943ee19e4aec395e36951750da32366b9a8e","src/parser/mod.rs":"c65268221fc67a692a2a647b08dd81b244a9186c04f5ab0837383dcaa983b740","src/serde.rs":"06e940e4f2d15f02d313b4e2b233aea3e74c93c6c43076f5ffe52d49c133608f","src/subtags/language.rs":"e9dc6de6c6aebb6d8bf6e55f1ae9fab41844a52e681b4309e625a5076c02f9f3","src/subtags/mod.rs":"0257f746ed368ea3fa675054c9e7e40d972ec31cd7cc525be655a16a83c9d17b","src/subtags/region.rs":"4f4120f4910d0a4496f29c193d00313e71be4c646867d97ebd0e9a7438693847","src/subtags/script.rs":"6b1a68783cb90409bdd39b0184dfb2cb1c302fdee7202e3b6f7c7c8941bc7dfe","src/subtags/variant.rs":"956f1ea3d98172b6ead333411f010cf4e84404584a3051cb775d148d79beb4f8","src/subtags/variants.rs":"7740d1b20f596b04f509db917e9c2fffba80a013ffc42f0046cdc2d32b088aeb","src/zerovec.rs":"9d01a235d18296fbf0c2e89d188459e9446df0e63aaedc7e150165604af885b9","tests/fixtures/canonicalize.json":"9f2b7cbef72c24944cd4dc50de368c6e3ef69949f29c9ce1aa8807de767a4d0a","tests/fixtures/invalid-extensions.json":"0af95f38e458c8f52760f76c6540993beb9ba9421a3967df0cd6abb9fe2ce21a","tests/fixtures/invalid.json":"1f1ae207f1ce886b3f57cfcdfb2525aa3e58d538f997b2bda4088062de7aa68d","tests/fixtures/langid.json":"960fd01722217ef1ea9077e2e0821d7089fe318a241bd7fb7918f50bf8f3f5c3","tests/fixtures/locale.json":"8606e0569fc6ea0e50a1fecb9295b911fbef7d8dbfde3c585476284a751baccf","tests/fixtures/mod.rs":"28dec3e5c9d766e148adbff6857dce884d9ff94f7ef8aee17fde0084cc78a7ee","tests/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","tests/langid.rs":"2e21d576a6eaba000fbe88d52362384f460ba350cac1e7034a1661302000ac58","tests/locale.rs":"91af0a738ca5def89fdb4d7f8d3504ad7b757e1d7c8e4d24dc246de610b46a04"},"package":"34b3de5d99a0e275fe6193b9586dbf37364daebc0d39c89b5cf8376a53b789e8"} \ No newline at end of file
+{"files":{"Cargo.lock":"332fcd0f371d9ef54006d7525bfc6a8adae7433754a3fbc0328530f421d92d0d","Cargo.toml":"c60a23e3795ec4820118fc5dec6ffb9bdca684d95511de01ea5a04d6d8272bc8","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"16472983782c836d9e97b4df4754baab7bb247d0a945d1a97cafb3210e951d8f","benches/fixtures/langid.json":"373c11527653c63c685c9e229a8de5ae2b557c25b686a9d891c59e1f603232d8","benches/fixtures/locale.json":"669b19db933094290a45bf856559920f4e92401072e364ac82c482119dc9233a","benches/fixtures/mod.rs":"9a9671eddcf38a6faa10cb814949f8abc15d89f5e70f3ad6f684f1bc3ffe72ea","benches/fixtures/subtags.json":"28be3a639e452d713e807d5779b6819e06277e2dbbf67801ef34964fb9b074b6","benches/helpers/macros.rs":"bba0945a826bc083156bc302507c48c0c99c4d965e2a84352644d768591b0339","benches/helpers/mod.rs":"c98167d866fdb7f66c8cab41e8d57b5aab9e9707dfc66c37ef136e088dac6fef","benches/iai_langid.rs":"7984d12b78a0e2ecfa1eac74ccf7310627285de821c13fab2fe000f0e961a136","benches/langid.rs":"4e3d307d48fd9071308a567a0ef927b229814978abd2ba29f57c65edd51f38e4","benches/locale.rs":"b8d5b1e3f8b5578c549a5149229656fb60de26b76a1bf66b6c1abce75042d674","benches/subtags.rs":"e7e80dabaf31bf031779456614f139cafcdadb805986e71b49133ac964928432","examples/filter_langids.rs":"f36f6732b08a954d41ea95dfb3f07963f7c120e80ed29f4de7c9ec562c0151d6","examples/syntatically_canonicalize_locales.rs":"de97579c82f1670629d077a6216ecce06761da28715387f46250f81b8172ae6b","src/extensions/mod.rs":"106af2b8186202aa8c654acc085619d99fdbdbc467c276ead8283b9938c75ba7","src/extensions/other/mod.rs":"ee377c2eeaa6b622a2c80807bffdd307800030fe2ec8a99a9729bdde45452635","src/extensions/other/subtag.rs":"431d27a0a5adca7d56c7ea3a6de2a0412e1e14ad2dd8a8e09a548849984b84b6","src/extensions/private/mod.rs":"5d53d32adb79386416b6eb4a9de218423f3bee4000e96e4899b78462f609531c","src/extensions/private/other.rs":"586fd24398e78c5fda0afdb98de28a6467afd2d702683daf5dfab2a6c45af1e9","src/extensions/transform/fields.rs":"9221478ce7565738bb27951a6be25b3ebc5c11d63afb2ca744fd4c587d155e9b","src/extensions/transform/key.rs":"53e8c9ce13f00f678c2322855cc1d90afd91cd33a2af3758d098b7bbcc7090e5","src/extensions/transform/mod.rs":"111ebf59ad6cd9a09a8eb84367a0053ff03fff8329f07310131784a457d07b61","src/extensions/transform/value.rs":"577b642b32f7a74e98ba5bee8e30700021c8b0e6da63538398aaf95d13edfd65","src/extensions/unicode/attribute.rs":"d558a193b72f54cdb03afe8e023a145ac74832c8416ca55401cd417ebba2431c","src/extensions/unicode/attributes.rs":"ddc0361968151e28cc1e6a3d91056a0f71f2c42f22dacecd339aaa67dfdcf899","src/extensions/unicode/key.rs":"6c8694527079c5dd5f03f8e85f23ae6b5aa4b47899d1047036960e8400dca7de","src/extensions/unicode/keywords.rs":"d98b0799c171557c9d042bcc06389ac9742ae0a4910d9ceb1612d1ac5045222c","src/extensions/unicode/mod.rs":"e066cbdabf567a40c777428d071e2e82389a043bd552bc1e83202401c86e0b2e","src/extensions/unicode/value.rs":"38b96501db9ebc3da583162d68279de30096b896209874ff052dcc10f874d98a","src/helpers.rs":"54272463a938a04fd2cf5a663128ea08f36744180f0eb49fa2ad7de105c0c19a","src/langid.rs":"77dce95dd5549c15cbfa9f34f3521f7ad1d1c1b16c3d972f28023f59283bd56f","src/lib.rs":"661efd6459894a1821861a8b7e0a7e73484c49f5d297810aed401f7a66c45985","src/locale.rs":"98c5389226e3dd2ae9378225c129d0eb264d5b1d712111f2587489d01feeb546","src/macros.rs":"f7154fc103ea1120a55bb5898540b20df80de6eec42e70ce15f339d997f2bf52","src/ordering.rs":"c70aa4e33d5cbab8d75d2833641141b71984a93648634cfc57fc25c3f79a5f58","src/parser/errors.rs":"44a25385a2dc7d537b3ce482fc02169eda1e5e727ee99b00f0fd85cb501ee939","src/parser/langid.rs":"749ac36945e7b5e24cbc82f04900f10f770fc24f7ce007af4c3be7a325ccc631","src/parser/locale.rs":"075c74803891894ad50bbedc69366931b8e76c0992b3caa1a5632f0a6816ccfd","src/parser/mod.rs":"5182392624876a419b1469d135d175aba680bb13d14e4f6ea0cfc4e071fbc743","src/serde.rs":"06e940e4f2d15f02d313b4e2b233aea3e74c93c6c43076f5ffe52d49c133608f","src/subtags/language.rs":"2ebc98952bd4a6b4077c77da1895225faacc17020af8a47675b8b41b05b9e7eb","src/subtags/mod.rs":"0257f746ed368ea3fa675054c9e7e40d972ec31cd7cc525be655a16a83c9d17b","src/subtags/region.rs":"4f4120f4910d0a4496f29c193d00313e71be4c646867d97ebd0e9a7438693847","src/subtags/script.rs":"6b1a68783cb90409bdd39b0184dfb2cb1c302fdee7202e3b6f7c7c8941bc7dfe","src/subtags/variant.rs":"956f1ea3d98172b6ead333411f010cf4e84404584a3051cb775d148d79beb4f8","src/subtags/variants.rs":"511aca7f5b75509b6b1b095e3465ab096430cc97b38e0bcb5956e71fa01c3189","src/zerovec.rs":"9d01a235d18296fbf0c2e89d188459e9446df0e63aaedc7e150165604af885b9","tests/fixtures/canonicalize.json":"9f2b7cbef72c24944cd4dc50de368c6e3ef69949f29c9ce1aa8807de767a4d0a","tests/fixtures/invalid-extensions.json":"4b7888006360b216030597257de8c301e22877e75216818967bbd8c83b6dbb0b","tests/fixtures/invalid.json":"5247849a6eb805619b8e70254c855227f7bdaf71431b071c91c6cc378ae9766e","tests/fixtures/langid.json":"960fd01722217ef1ea9077e2e0821d7089fe318a241bd7fb7918f50bf8f3f5c3","tests/fixtures/locale.json":"8606e0569fc6ea0e50a1fecb9295b911fbef7d8dbfde3c585476284a751baccf","tests/fixtures/mod.rs":"aea619960540b92199345cbd20ff03d2cb451aa2ce9aa6cf7915223ee9f812a3","tests/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","tests/langid.rs":"43a0d381bdd9a8567898c137337a1563bea6db6fb36ecb853f496366faf8ff79","tests/locale.rs":"0cd3f09e83f6c093bca9676845612343a1e179d8584735e069008248e126eccf"},"package":"71d7a98ecb812760b5f077e55a4763edeefa7ccc30d6eb5680a70841ede81928"} \ No newline at end of file
diff --git a/vendor/icu_locid/Cargo.lock b/vendor/icu_locid/Cargo.lock
index 9940858d2..401f5d44d 100644
--- a/vendor/icu_locid/Cargo.lock
+++ b/vendor/icu_locid/Cargo.lock
@@ -8,7 +8,7 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
"winapi",
]
@@ -39,9 +39,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.11.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "cast"
@@ -131,26 +131,24 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.10"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
- "once_cell",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.11"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "once_cell",
]
[[package]]
@@ -177,9 +175,9 @@ dependencies = [
[[package]]
name = "databake"
-version = "0.1.2"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c87777d6d7bde863ba217aa87521dc857239de1f36d66aac46fd173fb0495858"
+checksum = "df626c4717e455cd7a70a82c4358630554a07e4341f86dd095c625f1474a2857"
dependencies = [
"databake-derive",
"proc-macro2",
@@ -189,9 +187,9 @@ dependencies = [
[[package]]
name = "databake-derive"
-version = "0.1.1"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "905c7a060fc0c84c0452d97473b1177dd7a5cbc7670cfbae4a7fe22e42f6432e"
+checksum = "be51a53c468489ae1ef0efa9f6b10706f426c0dde06d66122ffef1f0c51e87dc"
dependencies = [
"proc-macro2",
"quote",
@@ -232,26 +230,28 @@ dependencies = [
]
[[package]]
-name = "iai"
-version = "0.1.1"
+name = "hermit-abi"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71a816c97c42258aa5834d07590b718b4c9a598944cd39a52dc25b351185d678"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
[[package]]
-name = "icu_benchmark_macros"
-version = "0.7.0"
+name = "iai"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c867656f2d9c90b13709ac88e710a9d6afe33998c1dfa22384bab8804e8b3d4"
+checksum = "71a816c97c42258aa5834d07590b718b4c9a598944cd39a52dc25b351185d678"
[[package]]
name = "icu_locid"
-version = "1.0.0"
+version = "1.1.0"
dependencies = [
"criterion",
"databake",
"displaydoc",
"iai",
- "icu_benchmark_macros",
"litemap",
"postcard",
"serde",
@@ -278,9 +278,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "js-sys"
@@ -299,15 +299,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.133"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "litemap"
-version = "0.6.0"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f34a3f4798fac63fb48cf277eefa38f94d3443baff555bb98e4f56bc9092368e"
+checksum = "575d8a551c59104b4df91269921e5eab561aa1b77c618dac0414b5d44a4617de"
[[package]]
name = "log"
@@ -326,9 +326,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
-version = "0.6.5"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
@@ -344,19 +344,19 @@ dependencies = [
[[package]]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.2.6",
"libc",
]
[[package]]
name = "once_cell"
-version = "1.15.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
[[package]]
name = "oorandom"
@@ -404,39 +404,37 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rayon"
-version = "1.5.3"
+version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
- "autocfg",
- "crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
-version = "1.9.3"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -446,9 +444,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [
"regex-syntax",
]
@@ -461,15 +459,15 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "same-file"
@@ -488,9 +486,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
@@ -507,9 +505,9 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -518,20 +516,20 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
- "itoa 1.0.3",
+ "itoa 1.0.5",
"ryu",
"serde",
]
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -561,9 +559,9 @@ dependencies = [
[[package]]
name = "tinystr"
-version = "0.7.0"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8aeafdfd935e4a7fe16a91ab711fa52d54df84f9c8f7ca5837a9d1d902ef4c2"
+checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef"
dependencies = [
"displaydoc",
"serde",
@@ -581,9 +579,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-width"
@@ -705,9 +703,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "writeable"
-version = "0.5.0"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8e6ab4f5da1b24daf2c590cfac801bacb27b15b4f050e84eb60149ea726f06b"
+checksum = "92d74a687e3b9a7a129db0a8c82b4d464eb9c36f5a66ca68572a7e5f1cfdb5bc"
[[package]]
name = "zerofrom"
@@ -717,9 +715,9 @@ checksum = "79e9355fccf72b04b7deaa99ce7a0f6630530acf34045391b74460fcd714de54"
[[package]]
name = "zerovec"
-version = "0.9.0"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9d919a74c17749ccb17beaf6405562e413cd94e98ba52ca1e64bbe7eefbd8b8"
+checksum = "154df60c74c4a844bc04a53cef4fc18a909d3ea07e19f5225eaba86209da3aa6"
dependencies = [
"zerofrom",
]
diff --git a/vendor/icu_locid/Cargo.toml b/vendor/icu_locid/Cargo.toml
index 3ce7066e7..dbbe6ed2b 100644
--- a/vendor/icu_locid/Cargo.toml
+++ b/vendor/icu_locid/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "icu_locid"
-version = "1.0.0"
+version = "1.1.0"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -30,14 +30,12 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
-[package.metadata.cargo-all-features]
-skip_optional_dependencies = true
-denylist = ["bench"]
-extra_features = ["serde"]
-
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
+
[lib]
path = "src/lib.rs"
bench = false
@@ -69,7 +67,7 @@ harness = false
required-features = ["bench"]
[dependencies.databake]
-version = "0.1.0"
+version = "0.1.3"
features = ["derive"]
optional = true
@@ -78,7 +76,7 @@ version = "0.2.3"
default-features = false
[dependencies.litemap]
-version = "0.6"
+version = "0.6.1"
[dependencies.serde]
version = "1.0"
@@ -90,15 +88,15 @@ optional = true
default-features = false
[dependencies.tinystr]
-version = "0.7"
+version = "0.7.1"
features = ["alloc"]
default-features = false
[dependencies.writeable]
-version = "0.5"
+version = "0.5.1"
[dependencies.zerovec]
-version = "0.9"
+version = "0.9.2"
optional = true
[dev-dependencies.criterion]
@@ -107,13 +105,6 @@ version = "0.3.3"
[dev-dependencies.iai]
version = "0.1.1"
-[dev-dependencies.icu_benchmark_macros]
-version = "0.7"
-
-[dev-dependencies.litemap]
-version = "0.6"
-features = ["testing"]
-
[dev-dependencies.postcard]
version = "1.0.0"
features = ["use-std"]
@@ -128,9 +119,10 @@ version = "1.0"
[features]
bench = ["serde"]
-default = []
+databake = ["dep:databake"]
serde = [
"dep:serde",
"tinystr/serde",
]
std = []
+zerovec = ["dep:zerovec"]
diff --git a/vendor/icu_locid/README.md b/vendor/icu_locid/README.md
index cc2a0b023..5f49c35f5 100644
--- a/vendor/icu_locid/README.md
+++ b/vendor/icu_locid/README.md
@@ -20,36 +20,21 @@ If in doubt, use [`Locale`].
## Examples
```rust
-use icu::locid::subtags::{Language, Region};
use icu::locid::Locale;
+use icu::locid::{
+ locale, subtags_language as language, subtags_region as region,
+};
-let mut loc: Locale = "en-US".parse().expect("Parsing failed.");
-
-let lang: Language = "en".parse().expect("Parsing failed.");
-let region: Region = "US".parse().expect("Parsing failed.");
+let mut loc: Locale = locale!("en-US");
-assert_eq!(loc.id.language, lang);
+assert_eq!(loc.id.language, language!("en"));
assert_eq!(loc.id.script, None);
-assert_eq!(loc.id.region, Some(region));
+assert_eq!(loc.id.region, Some(region!("US")));
assert_eq!(loc.id.variants.len(), 0);
-let region: Region = "GB".parse().expect("Parsing failed.");
-loc.id.region = Some(region);
-
-assert_eq!(loc.to_string(), "en-GB");
-```
-
-### Macros
-
-```rust
-use icu::locid::{
- langid, subtags_language as language, subtags_region as region,
-};
-
-let lid = langid!("EN_US");
+loc.id.region = Some(region!("GB"));
-assert_eq!(lid.language, language!("en"));
-assert_eq!(lid.region, Some(region!("US")));
+assert_eq!(loc, locale!("en-GB"));
```
For more details, see [`Locale`] and [`LanguageIdentifier`].
diff --git a/vendor/icu_locid/benches/iai_langid.rs b/vendor/icu_locid/benches/iai_langid.rs
index f964d1462..bf3b911cf 100644
--- a/vendor/icu_locid/benches/iai_langid.rs
+++ b/vendor/icu_locid/benches/iai_langid.rs
@@ -5,6 +5,7 @@
use icu_locid::{
langid, subtags_language as language, subtags_region as region, LanguageIdentifier,
};
+use writeable::Writeable;
const LIDS: &[LanguageIdentifier] = &[
langid!("en"),
@@ -97,6 +98,12 @@ fn bench_langid_serialize() {
let _: Vec<String> = LIDS.iter().map(|l| l.to_string()).collect();
}
+fn bench_langid_serialize_writeable() {
+ // Tests serialization of LIDs.
+
+ let _: Vec<_> = LIDS.iter().map(|l| l.write_to_string()).collect();
+}
+
fn bench_langid_canonicalize() {
// Tests canonicalization of strings.
@@ -114,5 +121,6 @@ iai::main!(
bench_langid_matching,
bench_langid_matching_str,
bench_langid_serialize,
+ bench_langid_serialize_writeable,
bench_langid_canonicalize,
);
diff --git a/vendor/icu_locid/examples/filter_langids.rs b/vendor/icu_locid/examples/filter_langids.rs
index 9e5b54e39..215df4eb3 100644
--- a/vendor/icu_locid/examples/filter_langids.rs
+++ b/vendor/icu_locid/examples/filter_langids.rs
@@ -16,6 +16,7 @@ icu_benchmark_macros::static_setup!();
use std::env;
use icu_locid::{subtags, LanguageIdentifier};
+use writeable::Writeable;
const DEFAULT_INPUT: &str =
"de, en-us, zh-hant, sr-cyrl, fr-ca, es-cl, pl, en-latn-us, ca-valencia, und-arab";
@@ -30,7 +31,9 @@ fn filter_input(input: &str) -> String {
let en_langids = langids.filter(|langid: &LanguageIdentifier| langid.language == en_lang);
// 3. Serialize the output.
- let en_strs: Vec<String> = en_langids.map(|langid| langid.to_string()).collect();
+ let en_strs: Vec<String> = en_langids
+ .map(|langid| langid.write_to_string().into_owned())
+ .collect();
en_strs.join(", ")
}
diff --git a/vendor/icu_locid/src/extensions/mod.rs b/vendor/icu_locid/src/extensions/mod.rs
index 42bfcd3c9..a6a189b11 100644
--- a/vendor/icu_locid/src/extensions/mod.rs
+++ b/vendor/icu_locid/src/extensions/mod.rs
@@ -102,11 +102,11 @@ impl ExtensionType {
#[derive(Debug, Default, PartialEq, Eq, Clone, Hash)]
#[non_exhaustive]
pub struct Extensions {
- /// A representation of the data for a Unicode extension, when present in the locale identifer.
+ /// A representation of the data for a Unicode extension, when present in the locale identifier.
pub unicode: Unicode,
- /// A representation of the data for a transform extension, when present in the locale identifer.
+ /// A representation of the data for a transform extension, when present in the locale identifier.
pub transform: Transform,
- /// A representation of the data for a private-use extension, when present in the locale identifer.
+ /// A representation of the data for a private-use extension, when present in the locale identifier.
pub private: Private,
/// A sequence of any other extensions that are present in the locale identifier but are not formally
/// [defined](https://unicode.org/reports/tr35/) and represented explicitly as [`Unicode`], [`Transform`],
@@ -210,19 +210,33 @@ impl Extensions {
let mut private = None;
let mut other = Vec::new();
- let mut st = iter.next();
- while let Some(subtag) = st {
+ while let Some(subtag) = iter.next() {
+ if subtag.is_empty() {
+ return Err(ParserError::InvalidExtension);
+ }
match subtag.get(0).map(|b| ExtensionType::try_from_byte(*b)) {
Some(Ok(ExtensionType::Unicode)) => {
+ if unicode.is_some() {
+ return Err(ParserError::DuplicatedExtension);
+ }
unicode = Some(Unicode::try_from_iter(iter)?);
}
Some(Ok(ExtensionType::Transform)) => {
+ if transform.is_some() {
+ return Err(ParserError::DuplicatedExtension);
+ }
transform = Some(Transform::try_from_iter(iter)?);
}
Some(Ok(ExtensionType::Private)) => {
+ if private.is_some() {
+ return Err(ParserError::DuplicatedExtension);
+ }
private = Some(Private::try_from_iter(iter)?);
}
Some(Ok(ExtensionType::Other(ext))) => {
+ if other.iter().any(|o: &Other| o.get_ext_byte() == ext) {
+ return Err(ParserError::DuplicatedExtension);
+ }
let parsed = Other::try_from_iter(ext, iter)?;
if let Err(idx) = other.binary_search(&parsed) {
other.insert(idx, parsed);
@@ -230,11 +244,8 @@ impl Extensions {
return Err(ParserError::InvalidExtension);
}
}
- None => {}
_ => return Err(ParserError::InvalidExtension),
}
-
- st = iter.next();
}
Ok(Self {
@@ -283,7 +294,7 @@ impl_writeable_for_each_subtag_str_no_test!(Extensions);
fn test_writeable() {
use crate::Locale;
use writeable::assert_writeable_eq;
- assert_writeable_eq!(Extensions::new(), "",);
+ assert_writeable_eq!(Extensions::new(), "");
assert_writeable_eq!(
"my-t-my-d0-zawgyi".parse::<Locale>().unwrap().extensions,
"t-my-d0-zawgyi",
diff --git a/vendor/icu_locid/src/extensions/other/mod.rs b/vendor/icu_locid/src/extensions/other/mod.rs
index 36dbc49b6..44d5c9cf8 100644
--- a/vendor/icu_locid/src/extensions/other/mod.rs
+++ b/vendor/icu_locid/src/extensions/other/mod.rs
@@ -41,13 +41,16 @@ pub use subtag::Subtag;
/// let subtag2: Subtag = "bar".parse().expect("Failed to parse a Subtag.");
///
/// let other = Other::from_vec_unchecked(b'a', vec![subtag1, subtag2]);
-/// assert_eq!(&other.to_string(), "-a-foo-bar");
+/// assert_eq!(&other.to_string(), "a-foo-bar");
/// ```
///
/// [`Other Use Extensions`]: https://unicode.org/reports/tr35/#other_extensions
/// [`Unicode Locale Identifier`]: https://unicode.org/reports/tr35/#Unicode_locale_identifier
#[derive(Clone, PartialEq, Eq, Debug, Default, Hash, PartialOrd, Ord)]
-pub struct Other((u8, Vec<Subtag>));
+pub struct Other {
+ ext: u8,
+ keys: Vec<Subtag>,
+}
impl Other {
/// A constructor which takes a pre-sorted list of [`Subtag`].
@@ -65,11 +68,11 @@ impl Other {
/// let subtag2: Subtag = "bar".parse().expect("Failed to parse a Subtag.");
///
/// let other = Other::from_vec_unchecked(b'a', vec![subtag1, subtag2]);
- /// assert_eq!(&other.to_string(), "-a-foo-bar");
+ /// assert_eq!(&other.to_string(), "a-foo-bar");
/// ```
- pub fn from_vec_unchecked(ext: u8, input: Vec<Subtag>) -> Self {
+ pub fn from_vec_unchecked(ext: u8, keys: Vec<Subtag>) -> Self {
assert!(ext.is_ascii_alphabetic());
- Self((ext, input))
+ Self { ext, keys }
}
pub(crate) fn try_from_iter(ext: u8, iter: &mut SubtagIterator) -> Result<Self, ParserError> {
@@ -89,6 +92,22 @@ impl Other {
Ok(Self::from_vec_unchecked(ext, keys))
}
+ /// Gets the tag character for this extension as a &str.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use icu::locid::Locale;
+ ///
+ /// let loc: Locale = "und-a-hello-world".parse().unwrap();
+ /// let other_ext = &loc.extensions.other[0];
+ /// assert_eq!(other_ext.get_ext_str(), "a");
+ /// ```
+ pub fn get_ext_str(&self) -> &str {
+ debug_assert!(self.ext.is_ascii_alphabetic());
+ unsafe { core::str::from_utf8_unchecked(core::slice::from_ref(&self.ext)) }
+ }
+
/// Gets the tag character for this extension as a char.
///
/// # Examples
@@ -101,7 +120,7 @@ impl Other {
/// assert_eq!(other_ext.get_ext(), 'a');
/// ```
pub fn get_ext(&self) -> char {
- self.get_ext_byte() as char
+ self.ext as char
}
/// Gets the tag character for this extension as a byte.
@@ -116,19 +135,15 @@ impl Other {
/// assert_eq!(other_ext.get_ext_byte(), b'a');
/// ```
pub fn get_ext_byte(&self) -> u8 {
- self.0 .0
+ self.ext
}
pub(crate) fn for_each_subtag_str<E, F>(&self, f: &mut F) -> Result<(), E>
where
F: FnMut(&str) -> Result<(), E>,
{
- let (ext, keys) = &self.0;
- debug_assert!(ext.is_ascii_alphabetic());
- // Safety: ext is ascii_alphabetic, so it is valid UTF-8
- let ext_str = unsafe { core::str::from_utf8_unchecked(core::slice::from_ref(ext)) };
- f(ext_str)?;
- keys.iter().map(|t| t.as_str()).try_for_each(f)
+ f(self.get_ext_str())?;
+ self.keys.iter().map(|t| t.as_str()).try_for_each(f)
}
}
@@ -136,10 +151,8 @@ writeable::impl_display_with_writeable!(Other);
impl writeable::Writeable for Other {
fn write_to<W: core::fmt::Write + ?Sized>(&self, sink: &mut W) -> core::fmt::Result {
- let (ext, keys) = &self.0;
- sink.write_char('-')?;
- sink.write_char(*ext as char)?;
- for key in keys.iter() {
+ sink.write_str(self.get_ext_str())?;
+ for key in self.keys.iter() {
sink.write_char('-')?;
writeable::Writeable::write_to(key, sink)?;
}
@@ -148,10 +161,20 @@ impl writeable::Writeable for Other {
}
fn writeable_length_hint(&self) -> writeable::LengthHint {
- let mut result = writeable::LengthHint::exact(2);
- for key in self.0 .1.iter() {
+ let mut result = writeable::LengthHint::exact(1);
+ for key in self.keys.iter() {
result += writeable::Writeable::writeable_length_hint(key) + 1;
}
result
}
+
+ fn write_to_string(&self) -> alloc::borrow::Cow<str> {
+ if self.keys.is_empty() {
+ return alloc::borrow::Cow::Borrowed(self.get_ext_str());
+ }
+ let mut string =
+ alloc::string::String::with_capacity(self.writeable_length_hint().capacity());
+ let _ = self.write_to(&mut string);
+ alloc::borrow::Cow::Owned(string)
+ }
}
diff --git a/vendor/icu_locid/src/extensions/other/subtag.rs b/vendor/icu_locid/src/extensions/other/subtag.rs
index 60995c395..ad4d6a0f2 100644
--- a/vendor/icu_locid/src/extensions/other/subtag.rs
+++ b/vendor/icu_locid/src/extensions/other/subtag.rs
@@ -11,11 +11,9 @@ impl_tinystr_subtag!(
/// # Examples
///
/// ```
- /// use icu::locid::extensions::other::Subtag;
+ /// use icu::locid::extensions_other_subtag as subtag;
///
- /// let subtag: Subtag = "Foo".parse().expect("Failed to parse a Subtag.");
- ///
- /// assert_eq!(subtag.as_str(), "foo");
+ /// assert_eq!(subtag!("Foo").as_str(), "foo");
/// ```
Subtag,
extensions::other::Subtag,
diff --git a/vendor/icu_locid/src/extensions/private/mod.rs b/vendor/icu_locid/src/extensions/private/mod.rs
index 13090c94a..8382d166f 100644
--- a/vendor/icu_locid/src/extensions/private/mod.rs
+++ b/vendor/icu_locid/src/extensions/private/mod.rs
@@ -13,16 +13,18 @@
//! # Examples
//!
//! ```
-//! use icu::locid::extensions::private::{Private, Subtag};
-//! use icu::locid::Locale;
+//! use icu::locid::extensions_private_subtag as subtag;
+//! use icu::locid::{locale, Locale};
//!
//! let mut loc: Locale = "en-US-x-foo-faa".parse().expect("Parsing failed.");
//!
-//! let subtag: Subtag = "foo".parse().expect("Parsing subtag failed.");
-//! assert!(loc.extensions.private.contains(&subtag));
-//! assert_eq!(loc.extensions.private.iter().next(), Some(&subtag));
+//! assert!(loc.extensions.private.contains(&subtag!("foo")));
+//! assert_eq!(loc.extensions.private.iter().next(), Some(&subtag!("foo")));
+//!
//! loc.extensions.private.clear();
-//! assert_eq!(loc.to_string(), "en-US");
+//!
+//! assert!(loc.extensions.private.is_empty());
+//! assert_eq!(loc, locale!("en-US"));
//! ```
mod other;
@@ -50,7 +52,7 @@ use crate::parser::SubtagIterator;
/// let subtag2: Subtag = "bar".parse().expect("Failed to parse a Subtag.");
///
/// let private = Private::from_vec_unchecked(vec![subtag1, subtag2]);
-/// assert_eq!(&private.to_string(), "-x-foo-bar");
+/// assert_eq!(&private.to_string(), "x-foo-bar");
/// ```
///
/// [`Private Use Extensions`]: https://unicode.org/reports/tr35/#pu_extensions
@@ -84,7 +86,7 @@ impl Private {
/// let subtag2: Subtag = "bar".parse().expect("Failed to parse a Subtag.");
///
/// let private = Private::from_vec_unchecked(vec![subtag1, subtag2]);
- /// assert_eq!(&private.to_string(), "-x-foo-bar");
+ /// assert_eq!(&private.to_string(), "x-foo-bar");
/// ```
pub fn from_vec_unchecked(input: Vec<Subtag>) -> Self {
Self(input)
@@ -101,11 +103,11 @@ impl Private {
/// let subtag2: Subtag = "bar".parse().expect("Failed to parse a Subtag.");
/// let mut private = Private::from_vec_unchecked(vec![subtag1, subtag2]);
///
- /// assert_eq!(&private.to_string(), "-x-foo-bar");
+ /// assert_eq!(&private.to_string(), "x-foo-bar");
///
/// private.clear();
///
- /// assert_eq!(&private.to_string(), "");
+ /// assert_eq!(private, Private::new());
/// ```
pub fn clear(&mut self) {
self.0.clear();
@@ -138,7 +140,7 @@ impl writeable::Writeable for Private {
if self.is_empty() {
return Ok(());
}
- sink.write_str("-x")?;
+ sink.write_str("x")?;
for key in self.iter() {
sink.write_char('-')?;
writeable::Writeable::write_to(key, sink)?;
@@ -150,7 +152,7 @@ impl writeable::Writeable for Private {
if self.is_empty() {
return writeable::LengthHint::exact(0);
}
- let mut result = writeable::LengthHint::exact(2);
+ let mut result = writeable::LengthHint::exact(1);
for key in self.iter() {
result += writeable::Writeable::writeable_length_hint(key) + 1;
}
diff --git a/vendor/icu_locid/src/extensions/transform/fields.rs b/vendor/icu_locid/src/extensions/transform/fields.rs
index ca10000a7..f08581a87 100644
--- a/vendor/icu_locid/src/extensions/transform/fields.rs
+++ b/vendor/icu_locid/src/extensions/transform/fields.rs
@@ -25,10 +25,10 @@ use super::Value;
///
/// ```
/// use icu::locid::extensions::transform::{Fields, Key, Value};
+/// use icu::locid::extensions_transform_key as key;
///
-/// let key: Key = "h0".parse().expect("Failed to parse a Key.");
-/// let value: Value = "hybrid".parse().expect("Failed to parse a Value.");
-/// let fields: Fields = vec![(key, value)].into_iter().collect();
+/// let value = "hybrid".parse::<Value>().expect("Failed to parse a Value.");
+/// let fields = vec![(key!("h0"), value)].into_iter().collect::<Fields>();
///
/// assert_eq!(&fields.to_string(), "h0-hybrid");
/// ```
@@ -76,17 +76,17 @@ impl Fields {
/// # Examples
///
/// ```
- /// use icu::locid::extensions::transform::{Fields, Key, Value};
+ /// use icu::locid::extensions::transform::{Fields, Value};
+ /// use icu::locid::extensions_transform_key as key;
///
- /// let key: Key = "h0".parse().expect("Failed to parse a Key.");
- /// let value: Value = "hybrid".parse().expect("Failed to parse a Value.");
- /// let mut fields: Fields = vec![(key, value)].into_iter().collect();
+ /// let value = "hybrid".parse::<Value>().expect("Failed to parse a Value.");
+ /// let mut fields = vec![(key!("h0"), value)].into_iter().collect::<Fields>();
///
/// assert_eq!(&fields.to_string(), "h0-hybrid");
///
/// fields.clear();
///
- /// assert_eq!(&fields.to_string(), "");
+ /// assert_eq!(fields, Fields::new());
/// ```
pub fn clear(&mut self) -> Self {
core::mem::take(self)
@@ -122,16 +122,14 @@ impl Fields {
///
/// ```
/// use icu::locid::extensions::transform::{Fields, Key, Value};
+ /// use icu::locid::extensions_transform_key as key;
///
- /// let key: Key = "h0".parse().expect("Failed to parse a Key.");
- /// let value: Value = "hybrid".parse().expect("Failed to parse a Value.");
- /// let mut fields: Fields = vec![(key, value)].into_iter().collect();
+ /// let value = "hybrid".parse::<Value>().unwrap();
+ /// let fields = vec![(key!("h0"), value.clone())]
+ /// .into_iter()
+ /// .collect::<Fields>();
///
- /// let key: Key = "h0".parse().expect("Failed to parse a Key.");
- /// assert_eq!(
- /// fields.get(&key).map(|v| v.to_string()),
- /// Some("hybrid".to_string())
- /// );
+ /// assert_eq!(fields.get(&key!("h0")), Some(&value));
/// ```
pub fn get<Q>(&self, key: &Q) -> Option<&Value>
where
diff --git a/vendor/icu_locid/src/extensions/transform/mod.rs b/vendor/icu_locid/src/extensions/transform/mod.rs
index a8c605146..7b97d87f6 100644
--- a/vendor/icu_locid/src/extensions/transform/mod.rs
+++ b/vendor/icu_locid/src/extensions/transform/mod.rs
@@ -28,7 +28,7 @@
//! assert!(loc.extensions.transform.fields.contains_key(&key));
//! assert_eq!(loc.extensions.transform.fields.get(&key), Some(&value));
//!
-//! assert_eq!(&loc.extensions.transform.to_string(), "-t-es-AR-h0-hybrid");
+//! assert_eq!(&loc.extensions.transform.to_string(), "t-es-AR-h0-hybrid");
//! ```
mod fields;
mod key;
@@ -208,7 +208,7 @@ impl writeable::Writeable for Transform {
if self.is_empty() {
return Ok(());
}
- sink.write_str("-t")?;
+ sink.write_str("t")?;
if let Some(lang) = &self.lang {
sink.write_char('-')?;
writeable::Writeable::write_to(lang, sink)?;
@@ -224,7 +224,7 @@ impl writeable::Writeable for Transform {
if self.is_empty() {
return writeable::LengthHint::exact(0);
}
- let mut result = writeable::LengthHint::exact(2);
+ let mut result = writeable::LengthHint::exact(1);
if let Some(lang) = &self.lang {
result += writeable::Writeable::writeable_length_hint(lang) + 1;
}
diff --git a/vendor/icu_locid/src/extensions/transform/value.rs b/vendor/icu_locid/src/extensions/transform/value.rs
index 84468361a..f908b0208 100644
--- a/vendor/icu_locid/src/extensions/transform/value.rs
+++ b/vendor/icu_locid/src/extensions/transform/value.rs
@@ -2,7 +2,7 @@
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
-use crate::parser::{get_subtag_iterator, ParserError};
+use crate::parser::{ParserError, SubtagIterator};
use alloc::vec;
use alloc::vec::Vec;
use core::ops::RangeInclusive;
@@ -16,20 +16,18 @@ use tinystr::TinyAsciiStr;
/// Each part of the sequence has to be no shorter than three characters and no
/// longer than 8.
///
-///
/// # Examples
///
/// ```
/// use icu::locid::extensions::transform::Value;
///
-/// let value1: Value = "hybrid".parse().expect("Failed to parse a Value.");
-/// let value2: Value =
-/// "hybrid-foobar".parse().expect("Failed to parse a Value.");
+/// "hybrid".parse::<Value>().expect("Valid Value.");
+///
+/// "hybrid-foobar".parse::<Value>().expect("Valid Value.");
///
-/// assert_eq!(&value1.to_string(), "hybrid");
-/// assert_eq!(&value2.to_string(), "hybrid-foobar");
+/// "no".parse::<Value>().expect_err("Invalid Value.");
/// ```
-#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord, Default)]
pub struct Value(Vec<TinyAsciiStr<{ *TYPE_LENGTH.end() }>>);
const TYPE_LENGTH: RangeInclusive<usize> = 3..=8;
@@ -45,14 +43,12 @@ impl Value {
/// use icu::locid::extensions::transform::Value;
///
/// let value = Value::try_from_bytes(b"hybrid").expect("Parsing failed.");
- ///
- /// assert_eq!(&value.to_string(), "hybrid");
/// ```
pub fn try_from_bytes(input: &[u8]) -> Result<Self, ParserError> {
let mut v = vec![];
let mut has_value = false;
- for subtag in get_subtag_iterator(input) {
+ for subtag in SubtagIterator::new(input) {
if !Self::is_type_subtag(subtag) {
return Err(ParserError::InvalidExtension);
}
@@ -116,4 +112,19 @@ impl FromStr for Value {
}
}
-impl_writeable_for_tinystr_list!(Value, "true", "hybrid", "foobar");
+impl_writeable_for_each_subtag_str_no_test!(Value, selff, selff.0.is_empty() => alloc::borrow::Cow::Borrowed("true"));
+
+#[test]
+fn test_writeable() {
+ use writeable::assert_writeable_eq;
+
+ let hybrid = "hybrid".parse().unwrap();
+ let foobar = "foobar".parse().unwrap();
+
+ assert_writeable_eq!(Value::default(), "true");
+ assert_writeable_eq!(Value::from_vec_unchecked(vec![hybrid]), "hybrid");
+ assert_writeable_eq!(
+ Value::from_vec_unchecked(vec![hybrid, foobar]),
+ "hybrid-foobar"
+ );
+}
diff --git a/vendor/icu_locid/src/extensions/unicode/attributes.rs b/vendor/icu_locid/src/extensions/unicode/attributes.rs
index 1f9536bfa..e58fb04da 100644
--- a/vendor/icu_locid/src/extensions/unicode/attributes.rs
+++ b/vendor/icu_locid/src/extensions/unicode/attributes.rs
@@ -79,18 +79,19 @@ impl Attributes {
///
/// ```
/// use icu::locid::extensions::unicode::{Attribute, Attributes};
+ /// use icu::locid::extensions_unicode_attribute as attribute;
+ /// use writeable::assert_writeable_eq;
///
- /// let attribute1: Attribute = "foobar".parse().expect("Parsing failed.");
- /// let attribute2: Attribute = "testing".parse().expect("Parsing failed.");
- /// let mut v = vec![attribute1, attribute2];
- ///
- /// let mut attributes: Attributes = Attributes::from_vec_unchecked(v);
+ /// let mut attributes = Attributes::from_vec_unchecked(vec![
+ /// attribute!("foobar"),
+ /// attribute!("testing"),
+ /// ]);
///
- /// assert_eq!(attributes.to_string(), "foobar-testing");
+ /// assert_writeable_eq!(attributes, "foobar-testing");
///
/// attributes.clear();
///
- /// assert_eq!(attributes.to_string(), "");
+ /// assert_writeable_eq!(attributes, "");
/// ```
pub fn clear(&mut self) -> Self {
core::mem::take(self)
diff --git a/vendor/icu_locid/src/extensions/unicode/keywords.rs b/vendor/icu_locid/src/extensions/unicode/keywords.rs
index dc9a15921..580cacaf1 100644
--- a/vendor/icu_locid/src/extensions/unicode/keywords.rs
+++ b/vendor/icu_locid/src/extensions/unicode/keywords.rs
@@ -29,11 +29,14 @@ use crate::ordering::SubtagOrderingResult;
/// Manually build up a [`Keywords`] object:
///
/// ```
-/// use icu::locid::extensions::unicode::{Key, Keywords, Value};
+/// use icu::locid::{
+/// extensions::unicode::Keywords, extensions_unicode_key as key,
+/// extensions_unicode_value as value, locale,
+/// };
///
-/// let key: Key = "hc".parse().expect("Failed to parse a Key.");
-/// let value: Value = "h23".parse().expect("Failed to parse a Value.");
-/// let keywords: Keywords = vec![(key, value)].into_iter().collect();
+/// let keywords = vec![(key!("hc"), value!("h23"))]
+/// .into_iter()
+/// .collect::<Keywords>();
///
/// assert_eq!(&keywords.to_string(), "hc-h23");
/// ```
@@ -113,15 +116,16 @@ impl Keywords {
/// # Examples
///
/// ```
- /// use icu::locid::extensions::unicode::{Key, Keywords, Value};
- /// use litemap::LiteMap;
+ /// use icu::locid::{
+ /// extensions::unicode::Keywords, extensions_unicode_key as key,
+ /// extensions_unicode_value as value,
+ /// };
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// let value: Value = "gregory".parse().expect("Failed to parse a Value.");
- /// let keywords: Keywords = vec![(key, value)].into_iter().collect();
+ /// let keywords = vec![(key!("ca"), value!("gregory"))]
+ /// .into_iter()
+ /// .collect::<Keywords>();
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// assert!(&keywords.contains_key(&key));
+ /// assert!(&keywords.contains_key(&key!("ca")));
/// ```
pub fn contains_key<Q>(&self, key: &Q) -> bool
where
@@ -137,17 +141,16 @@ impl Keywords {
/// # Examples
///
/// ```
- /// use icu::locid::extensions::unicode::{Key, Keywords, Value};
+ /// use icu::locid::{
+ /// extensions::unicode::Keywords, extensions_unicode_key as key,
+ /// extensions_unicode_value as value,
+ /// };
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// let value: Value = "buddhist".parse().expect("Failed to parse a Value.");
- /// let keywords: Keywords = vec![(key, value)].into_iter().collect();
+ /// let keywords = vec![(key!("ca"), value!("buddhist"))]
+ /// .into_iter()
+ /// .collect::<Keywords>();
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// assert_eq!(
- /// keywords.get(&key).map(|v| v.to_string()),
- /// Some("buddhist".to_string())
- /// );
+ /// assert_eq!(keywords.get(&key!("ca")), Some(&value!("buddhist")));
/// ```
pub fn get<Q>(&self, key: &Q) -> Option<&Value>
where
@@ -164,20 +167,19 @@ impl Keywords {
/// # Examples
///
/// ```
- /// use icu::locid::extensions::unicode::{Key, Keywords, Value};
+ /// use icu::locid::{
+ /// extensions::unicode::Keywords, extensions_unicode_key as key,
+ /// extensions_unicode_value as value,
+ /// };
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// let value: Value = "buddhist".parse().expect("Failed to parse a Value.");
- /// let mut keywords: Keywords = vec![(key, value)].into_iter().collect();
+ /// let mut keywords = vec![(key!("ca"), value!("buddhist"))]
+ /// .into_iter()
+ /// .collect::<Keywords>();
///
- /// let key: Key = "ca".parse().expect("Failed to parse a Key.");
- /// if let Some(value) = keywords.get_mut(&key) {
- /// *value = "gregory".parse().expect("Failed to parse a Value.");
+ /// if let Some(value) = keywords.get_mut(&key!("ca")) {
+ /// *value = value!("gregory");
/// }
- /// assert_eq!(
- /// keywords.get(&key).map(|v| v.to_string()),
- /// Some("gregory".to_string())
- /// );
+ /// assert_eq!(keywords.get(&key!("ca")), Some(&value!("gregory")));
/// ```
pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut Value>
where
@@ -308,7 +310,6 @@ impl Keywords {
/// .extensions
/// .unicode
/// .keywords;
- /// assert_eq!(a, a_kwds.to_string());
/// assert!(a_kwds.strict_cmp(a.as_bytes()) == Ordering::Equal);
/// assert!(a_kwds.strict_cmp(b.as_bytes()) == Ordering::Less);
/// }
diff --git a/vendor/icu_locid/src/extensions/unicode/mod.rs b/vendor/icu_locid/src/extensions/unicode/mod.rs
index fabf1036c..687a8c383 100644
--- a/vendor/icu_locid/src/extensions/unicode/mod.rs
+++ b/vendor/icu_locid/src/extensions/unicode/mod.rs
@@ -11,21 +11,24 @@
//! # Examples
//!
//! ```
-//! use icu::locid::extensions::unicode::{Attribute, Key, Unicode, Value};
-//! use icu::locid::{LanguageIdentifier, Locale};
+//! use icu::locid::Locale;
+//! use icu::locid::{
+//! extensions::unicode::Unicode,
+//! extensions_unicode_attribute as attribute,
+//! extensions_unicode_key as key, extensions_unicode_value as value,
+//! };
//!
-//! let mut loc: Locale =
-//! "en-US-u-foobar-hc-h12".parse().expect("Parsing failed.");
+//! let loc: Locale = "en-US-u-foobar-hc-h12".parse().expect("Parsing failed.");
//!
-//! let key: Key = "hc".parse().expect("Parsing key failed.");
-//! let value: Value = "h12".parse().expect("Parsing value failed.");
-//! let attribute: Attribute =
-//! "foobar".parse().expect("Parsing attribute failed.");
-//!
-//! assert_eq!(loc.extensions.unicode.keywords.get(&key), Some(&value));
-//! assert!(loc.extensions.unicode.attributes.contains(&attribute));
-//!
-//! assert_eq!(&loc.extensions.unicode.to_string(), "-u-foobar-hc-h12");
+//! assert_eq!(
+//! loc.extensions.unicode.keywords.get(&key!("hc")),
+//! Some(&value!("h12"))
+//! );
+//! assert!(loc
+//! .extensions
+//! .unicode
+//! .attributes
+//! .contains(&attribute!("foobar")));
//! ```
mod attribute;
mod attributes;
@@ -60,15 +63,18 @@ use litemap::LiteMap;
/// # Examples
///
/// ```
-/// use icu::locid::extensions::unicode::{Key, Value};
/// use icu::locid::Locale;
+/// use icu::locid::{
+/// extensions_unicode_key as key, extensions_unicode_value as value,
+/// };
///
-/// let mut loc: Locale =
+/// let loc: Locale =
/// "de-u-hc-h12-ca-buddhist".parse().expect("Parsing failed.");
///
-/// let key: Key = "ca".parse().expect("Parsing key failed.");
-/// let value: Value = "buddhist".parse().expect("Parsing value failed.");
-/// assert_eq!(loc.extensions.unicode.keywords.get(&key), Some(&value));
+/// assert_eq!(
+/// loc.extensions.unicode.keywords.get(&key!("ca")),
+/// Some(&value!("buddhist"))
+/// );
/// ```
#[derive(Clone, PartialEq, Eq, Debug, Default, Hash, PartialOrd, Ord)]
#[allow(clippy::exhaustive_structs)] // spec-backed stable datastructure
@@ -205,7 +211,7 @@ impl writeable::Writeable for Unicode {
if self.is_empty() {
return Ok(());
}
- sink.write_str("-u")?;
+ sink.write_str("u")?;
if !self.attributes.is_empty() {
sink.write_char('-')?;
writeable::Writeable::write_to(&self.attributes, sink)?;
@@ -221,7 +227,7 @@ impl writeable::Writeable for Unicode {
if self.is_empty() {
return writeable::LengthHint::exact(0);
}
- let mut result = writeable::LengthHint::exact(2);
+ let mut result = writeable::LengthHint::exact(1);
if !self.attributes.is_empty() {
result += writeable::Writeable::writeable_length_hint(&self.attributes) + 1;
}
diff --git a/vendor/icu_locid/src/extensions/unicode/value.rs b/vendor/icu_locid/src/extensions/unicode/value.rs
index ce9982a4c..e6374372c 100644
--- a/vendor/icu_locid/src/extensions/unicode/value.rs
+++ b/vendor/icu_locid/src/extensions/unicode/value.rs
@@ -3,7 +3,7 @@
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
use crate::helpers::ShortVec;
-use crate::parser::{get_subtag_iterator, ParserError};
+use crate::parser::{ParserError, SubtagIterator};
use alloc::vec::Vec;
use core::ops::RangeInclusive;
use core::str::FromStr;
@@ -20,20 +20,21 @@ use tinystr::TinyAsciiStr;
/// # Examples
///
/// ```
-/// use icu::locid::extensions::unicode::Value;
-///
-/// let value1: Value = "gregory".parse().expect("Failed to parse a Value.");
-/// let value2: Value =
-/// "islamic-civil".parse().expect("Failed to parse a Value.");
-/// let value3: Value = "true".parse().expect("Failed to parse a Value.");
+/// use icu::locid::{
+/// extensions::unicode::Value, extensions_unicode_value as value,
+/// };
+/// use writeable::assert_writeable_eq;
///
-/// assert_eq!(&value1.to_string(), "gregory");
-/// assert_eq!(&value2.to_string(), "islamic-civil");
+/// assert_writeable_eq!(value!("gregory"), "gregory");
+/// assert_writeable_eq!(
+/// "islamic-civil".parse::<Value>().unwrap(),
+/// "islamic-civil"
+/// );
///
-/// // The value "true" is special-cased to an empty value
-/// assert_eq!(&value3.to_string(), "");
+/// // The value "true" has the special, empty string representation
+/// assert_eq!(value!("true").to_string(), "");
/// ```
-#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord, Default)]
pub struct Value(ShortVec<TinyAsciiStr<{ *VALUE_LENGTH.end() }>>);
const VALUE_LENGTH: RangeInclusive<usize> = 3..=8;
@@ -48,15 +49,13 @@ impl Value {
/// ```
/// use icu::locid::extensions::unicode::Value;
///
- /// let value = Value::try_from_bytes(b"buddhist").expect("Parsing failed.");
- ///
- /// assert_eq!(&value.to_string(), "buddhist");
+ /// Value::try_from_bytes(b"buddhist").expect("Parsing failed.");
/// ```
pub fn try_from_bytes(input: &[u8]) -> Result<Self, ParserError> {
let mut v = ShortVec::new();
if !input.is_empty() {
- for subtag in get_subtag_iterator(input) {
+ for subtag in SubtagIterator::new(input) {
let val = Self::subtag_from_bytes(subtag)?;
if let Some(val) = val {
v.push(val);
@@ -153,7 +152,7 @@ impl FromStr for Value {
}
}
-impl_writeable_for_tinystr_list!(Value, "", "islamic", "civil");
+impl_writeable_for_subtag_list!(Value, "islamic", "civil");
/// A macro allowing for compile-time construction of valid Unicode [`Value`] subtag.
///
diff --git a/vendor/icu_locid/src/helpers.rs b/vendor/icu_locid/src/helpers.rs
index e617ded5d..e5889a7b0 100644
--- a/vendor/icu_locid/src/helpers.rs
+++ b/vendor/icu_locid/src/helpers.rs
@@ -115,7 +115,7 @@ impl<T> ShortVec<T> {
#[allow(clippy::unwrap_used)]
// we know that the vec has exactly one element left
1 => (ShortVec::Single(v.pop().unwrap()), removed_item),
- // v has atleast 2 elements, create a Multi variant
+ // v has at least 2 elements, create a Multi variant
_ => (ShortVec::Multi(v), removed_item),
}
}
@@ -387,6 +387,7 @@ macro_rules! impl_tinystr_subtag {
}
impl writeable::Writeable for $name {
+ #[inline]
fn write_to<W: core::fmt::Write + ?Sized>(&self, sink: &mut W) -> core::fmt::Result {
sink.write_str(self.as_str())
}
@@ -394,6 +395,10 @@ macro_rules! impl_tinystr_subtag {
fn writeable_length_hint(&self) -> writeable::LengthHint {
writeable::LengthHint::exact(self.0.len())
}
+ #[inline]
+ fn write_to_string(&self) -> alloc::borrow::Cow<str> {
+ alloc::borrow::Cow::Borrowed(self.0.as_str())
+ }
}
writeable::impl_display_with_writeable!($name);
@@ -546,7 +551,7 @@ macro_rules! impl_tinystr_subtag {
}
macro_rules! impl_writeable_for_each_subtag_str_no_test {
- ($type:tt) => {
+ ($type:tt $(, $self:ident, $borrow_cond:expr => $borrow:expr)?) => {
impl writeable::Writeable for $type {
fn write_to<W: core::fmt::Write + ?Sized>(&self, sink: &mut W) -> core::fmt::Result {
let mut initial = true;
@@ -576,6 +581,20 @@ macro_rules! impl_writeable_for_each_subtag_str_no_test {
.expect("infallible");
result
}
+
+ $(
+ fn write_to_string(&self) -> alloc::borrow::Cow<str> {
+ #[allow(clippy::unwrap_used)] // impl_writeable_for_subtag_list's $borrow uses unwrap
+ let $self = self;
+ if $borrow_cond {
+ $borrow
+ } else {
+ let mut output = alloc::string::String::with_capacity(self.writeable_length_hint().capacity());
+ let _ = self.write_to(&mut output);
+ alloc::borrow::Cow::Owned(output)
+ }
+ }
+ )?
}
writeable::impl_display_with_writeable!($type);
@@ -584,7 +603,7 @@ macro_rules! impl_writeable_for_each_subtag_str_no_test {
macro_rules! impl_writeable_for_subtag_list {
($type:tt, $sample1:literal, $sample2:literal) => {
- impl_writeable_for_each_subtag_str_no_test!($type);
+ impl_writeable_for_each_subtag_str_no_test!($type, selff, selff.0.len() == 1 => alloc::borrow::Cow::Borrowed(selff.0.as_slice().get(0).unwrap().as_str()));
#[test]
fn test_writeable() {
@@ -594,27 +613,6 @@ macro_rules! impl_writeable_for_subtag_list {
$sample1,
);
writeable::assert_writeable_eq!(
- &$type::from_vec_unchecked(alloc::vec![
- $sample1.parse().unwrap(),
- $sample2.parse().unwrap()
- ]),
- core::concat!($sample1, "-", $sample2),
- );
- }
- };
-}
-
-macro_rules! impl_writeable_for_tinystr_list {
- ($type:tt, $if_empty:literal, $sample1:literal, $sample2:literal) => {
- impl_writeable_for_each_subtag_str_no_test!($type);
-
- #[test]
- fn test_writeable() {
- writeable::assert_writeable_eq!(
- &$type::from_vec_unchecked(vec![$sample1.parse().unwrap()]),
- $sample1,
- );
- writeable::assert_writeable_eq!(
&$type::from_vec_unchecked(vec![
$sample1.parse().unwrap(),
$sample2.parse().unwrap()
diff --git a/vendor/icu_locid/src/langid.rs b/vendor/icu_locid/src/langid.rs
index fc5435766..b6858c91b 100644
--- a/vendor/icu_locid/src/langid.rs
+++ b/vendor/icu_locid/src/langid.rs
@@ -7,27 +7,28 @@ use core::str::FromStr;
use crate::ordering::SubtagOrderingResult;
use crate::parser::{
- get_subtag_iterator, parse_language_identifier, parse_language_identifier_with_single_variant,
- ParserError, ParserMode,
+ parse_language_identifier, parse_language_identifier_with_single_variant, ParserError,
+ ParserMode, SubtagIterator,
};
use crate::subtags;
use alloc::string::String;
-use alloc::string::ToString;
+use writeable::Writeable;
/// A core struct representing a [`Unicode BCP47 Language Identifier`].
///
/// # Examples
///
/// ```
-/// use icu::locid::{subtags::*, LanguageIdentifier};
+/// use icu::locid::{
+/// langid, subtags_language as language, subtags_region as region,
+/// };
///
-/// let li: LanguageIdentifier = "en-US".parse().expect("Failed to parse.");
+/// let li = langid!("en-US");
///
-/// assert_eq!(li.language, "en".parse::<Language>().unwrap());
+/// assert_eq!(li.language, language!("en"));
/// assert_eq!(li.script, None);
-/// assert_eq!(li.region.unwrap(), "US".parse::<Region>().unwrap());
+/// assert_eq!(li.region, Some(region!("US")));
/// assert_eq!(li.variants.len(), 0);
-/// assert_eq!(li.to_string(), "en-US");
/// ```
///
/// # Parsing
@@ -47,18 +48,17 @@ use alloc::string::ToString;
/// # Examples
///
/// ```
-/// use icu::locid::{subtags::*, LanguageIdentifier};
+/// use icu::locid::{
+/// langid, subtags_language as language, subtags_region as region,
+/// subtags_script as script, subtags_variant as variant,
+/// };
///
-/// let li: LanguageIdentifier =
-/// "eN_latn_Us-Valencia".parse().expect("Failed to parse.");
+/// let li = langid!("eN_latn_Us-Valencia");
///
-/// assert_eq!(li.language, "en".parse::<Language>().unwrap());
-/// assert_eq!(li.script, "Latn".parse::<Script>().ok());
-/// assert_eq!(li.region, "US".parse::<Region>().ok());
-/// assert_eq!(
-/// li.variants.get(0),
-/// "valencia".parse::<Variant>().ok().as_ref()
-/// );
+/// assert_eq!(li.language, language!("en"));
+/// assert_eq!(li.script, Some(script!("Latn")));
+/// assert_eq!(li.region, Some(region!("US")));
+/// assert_eq!(li.variants.get(0), Some(&variant!("valencia")));
/// ```
///
/// [`Unicode BCP47 Language Identifier`]: https://unicode.org/reports/tr35/tr35.html#Unicode_language_identifier
@@ -84,10 +84,7 @@ impl LanguageIdentifier {
/// ```
/// use icu::locid::LanguageIdentifier;
///
- /// let li =
- /// LanguageIdentifier::try_from_bytes(b"en-US").expect("Parsing failed.");
- ///
- /// assert_eq!(li.to_string(), "en-US");
+ /// LanguageIdentifier::try_from_bytes(b"en-US").expect("Parsing failed");
/// ```
pub fn try_from_bytes(v: &[u8]) -> Result<Self, ParserError> {
parse_language_identifier(v, ParserMode::LanguageIdentifier)
@@ -117,12 +114,12 @@ impl LanguageIdentifier {
/// # Examples
///
/// ```
- /// use icu::locid::LanguageIdentifier;
+ /// use icu::locid::{langid, LanguageIdentifier};
///
/// let li = LanguageIdentifier::try_from_locale_bytes(b"en-US-x-posix")
/// .expect("Parsing failed.");
///
- /// assert_eq!(li.to_string(), "en-US");
+ /// assert_eq!(li, langid!("en-US"));
/// ```
///
/// This method should be used for input that may be a locale identifier.
@@ -139,7 +136,6 @@ impl LanguageIdentifier {
/// use icu::locid::LanguageIdentifier;
///
/// assert_eq!(LanguageIdentifier::default(), LanguageIdentifier::UND);
- /// assert_eq!("und", LanguageIdentifier::UND.to_string());
/// ```
pub const UND: Self = Self {
language: subtags::Language::UND,
@@ -159,13 +155,13 @@ impl LanguageIdentifier {
/// use icu::locid::LanguageIdentifier;
///
/// assert_eq!(
- /// LanguageIdentifier::canonicalize("pL_latn_pl"),
- /// Ok("pl-Latn-PL".to_string())
+ /// LanguageIdentifier::canonicalize("pL_latn_pl").as_deref(),
+ /// Ok("pl-Latn-PL")
/// );
/// ```
pub fn canonicalize<S: AsRef<[u8]>>(input: S) -> Result<String, ParserError> {
let lang_id = Self::try_from_bytes(input.as_ref())?;
- Ok(lang_id.to_string())
+ Ok(lang_id.write_to_string().into_owned())
}
/// Compare this [`LanguageIdentifier`] with BCP-47 bytes.
@@ -197,7 +193,6 @@ impl LanguageIdentifier {
/// let b = ab[1];
/// assert!(a.cmp(b) == Ordering::Less);
/// let a_langid = a.parse::<LanguageIdentifier>().unwrap();
- /// assert_eq!(a, a_langid.to_string());
/// assert!(a_langid.strict_cmp(a.as_bytes()) == Ordering::Equal);
/// assert!(a_langid.strict_cmp(b.as_bytes()) == Ordering::Less);
/// }
@@ -293,7 +288,7 @@ impl LanguageIdentifier {
};
}
- let mut iter = get_subtag_iterator(other.as_bytes());
+ let mut iter = SubtagIterator::new(other.as_bytes());
if !subtag_matches!(subtags::Language, iter, self.language) {
return false;
}
@@ -359,7 +354,7 @@ impl FromStr for LanguageIdentifier {
}
}
-impl_writeable_for_each_subtag_str_no_test!(LanguageIdentifier);
+impl_writeable_for_each_subtag_str_no_test!(LanguageIdentifier, selff, selff.script.is_none() && selff.region.is_none() && selff.variants.is_empty() => selff.language.write_to_string());
#[test]
fn test_writeable() {
@@ -387,14 +382,11 @@ fn test_writeable() {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_language as language;
-/// use icu::locid::LanguageIdentifier;
-///
-/// let language = language!("en");
-/// let li = LanguageIdentifier::from(language);
+/// use icu::locid::{
+/// langid, subtags_language as language, LanguageIdentifier,
+/// };
///
-/// assert_eq!(li.language, language);
-/// assert_eq!(li.to_string(), "en");
+/// assert_eq!(LanguageIdentifier::from(language!("en")), langid!("en"));
/// ```
impl From<subtags::Language> for LanguageIdentifier {
fn from(language: subtags::Language) -> Self {
@@ -408,14 +400,12 @@ impl From<subtags::Language> for LanguageIdentifier {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_script as script;
-/// use icu::locid::LanguageIdentifier;
+/// use icu::locid::{langid, subtags_script as script, LanguageIdentifier};
///
-/// let script = script!("latn");
-/// let li = LanguageIdentifier::from(Some(script));
-///
-/// assert_eq!(li.script.unwrap(), script);
-/// assert_eq!(li.to_string(), "und-Latn");
+/// assert_eq!(
+/// LanguageIdentifier::from(Some(script!("latn"))),
+/// langid!("und-Latn")
+/// );
/// ```
impl From<Option<subtags::Script>> for LanguageIdentifier {
fn from(script: Option<subtags::Script>) -> Self {
@@ -429,14 +419,12 @@ impl From<Option<subtags::Script>> for LanguageIdentifier {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_region as region;
-/// use icu::locid::LanguageIdentifier;
+/// use icu::locid::{langid, subtags_region as region, LanguageIdentifier};
///
-/// let region = region!("US");
-/// let li = LanguageIdentifier::from(Some(region));
-///
-/// assert_eq!(li.region.unwrap(), region);
-/// assert_eq!(li.to_string(), "und-US");
+/// assert_eq!(
+/// LanguageIdentifier::from(Some(region!("US"))),
+/// langid!("und-US")
+/// );
/// ```
impl From<Option<subtags::Region>> for LanguageIdentifier {
fn from(region: Option<subtags::Region>) -> Self {
@@ -452,22 +440,18 @@ impl From<Option<subtags::Region>> for LanguageIdentifier {
/// # Examples
///
/// ```
-/// use icu::locid::LanguageIdentifier;
/// use icu::locid::{
-/// subtags_language as language, subtags_region as region,
-/// subtags_script as script,
+/// langid, subtags_language as language, subtags_region as region,
+/// subtags_script as script, LanguageIdentifier,
/// };
///
/// let lang = language!("en");
/// let script = script!("Latn");
/// let region = region!("US");
-/// let li = LanguageIdentifier::from((lang, Some(script), Some(region)));
-///
-/// assert_eq!(li.language, lang);
-/// assert_eq!(li.script.unwrap(), script);
-/// assert_eq!(li.region.unwrap(), region);
-/// assert_eq!(li.variants.len(), 0);
-/// assert_eq!(li.to_string(), "en-Latn-US");
+/// assert_eq!(
+/// LanguageIdentifier::from((lang, Some(script), Some(region))),
+/// langid!("en-Latn-US")
+/// );
/// ```
impl
From<(
@@ -497,7 +481,6 @@ impl
/// # Examples
///
/// ```
-/// use icu::locid::LanguageIdentifier;
/// use icu::locid::{
/// langid, subtags_language as language, subtags_region as region,
/// subtags_script as script,
diff --git a/vendor/icu_locid/src/lib.rs b/vendor/icu_locid/src/lib.rs
index 885c4b743..226a8e53c 100644
--- a/vendor/icu_locid/src/lib.rs
+++ b/vendor/icu_locid/src/lib.rs
@@ -22,39 +22,23 @@
//! # Examples
//!
//! ```
-//! use icu::locid::subtags::{Language, Region};
//! use icu::locid::Locale;
+//! use icu::locid::{
+//! locale, subtags_language as language, subtags_region as region,
+//! };
//!
-//! let mut loc: Locale = "en-US".parse().expect("Parsing failed.");
-//!
-//! let lang: Language = "en".parse().expect("Parsing failed.");
-//! let region: Region = "US".parse().expect("Parsing failed.");
+//! let mut loc: Locale = locale!("en-US");
//!
-//! assert_eq!(loc.id.language, lang);
+//! assert_eq!(loc.id.language, language!("en"));
//! assert_eq!(loc.id.script, None);
-//! assert_eq!(loc.id.region, Some(region));
+//! assert_eq!(loc.id.region, Some(region!("US")));
//! assert_eq!(loc.id.variants.len(), 0);
//!
-//! let region: Region = "GB".parse().expect("Parsing failed.");
-//! loc.id.region = Some(region);
+//! loc.id.region = Some(region!("GB"));
//!
-//! assert_eq!(loc.to_string(), "en-GB");
+//! assert_eq!(loc, locale!("en-GB"));
//! ```
//!
-//! ## Macros
-//!
-//! ```rust
-//! use icu::locid::{
-//! langid, subtags_language as language, subtags_region as region,
-//! };
-//!
-//! let lid = langid!("EN_US");
-//!
-//! assert_eq!(lid.language, language!("en"));
-//! assert_eq!(lid.region, Some(region!("US")));
-//! ```
-
-//!
//! For more details, see [`Locale`] and [`LanguageIdentifier`].
//!
//! [`UTS #35: Unicode LDML 3. Unicode Language and Locale Identifiers`]: https://unicode.org/reports/tr35/tr35.html#Unicode_Language_and_Locale_Identifiers
diff --git a/vendor/icu_locid/src/locale.rs b/vendor/icu_locid/src/locale.rs
index d7040d31a..5d9109fee 100644
--- a/vendor/icu_locid/src/locale.rs
+++ b/vendor/icu_locid/src/locale.rs
@@ -4,16 +4,15 @@
use crate::ordering::SubtagOrderingResult;
use crate::parser::{
- get_subtag_iterator, parse_locale,
- parse_locale_with_single_variant_single_keyword_unicode_keyword_extension, ParserError,
- ParserMode,
+ parse_locale, parse_locale_with_single_variant_single_keyword_unicode_keyword_extension,
+ ParserError, ParserMode, SubtagIterator,
};
use crate::{extensions, subtags, LanguageIdentifier};
use alloc::string::String;
-use alloc::string::ToString;
use core::cmp::Ordering;
use core::str::FromStr;
use tinystr::TinyAsciiStr;
+use writeable::Writeable;
/// A core struct representing a [`Unicode Locale Identifier`].
///
@@ -28,20 +27,21 @@ use tinystr::TinyAsciiStr;
/// # Examples
///
/// ```
-/// use icu::locid::extensions::unicode::{Key, Value};
-/// use icu::locid::{subtags::*, Locale};
+/// use icu_locid::{
+/// extensions_unicode_key as key, extensions_unicode_value as value,
+/// locale, subtags_language as language, subtags_region as region,
+/// };
///
-/// let loc: Locale = "en-US-u-ca-buddhist".parse().expect("Failed to parse.");
+/// let loc = locale!("en-US-u-ca-buddhist");
///
-/// assert_eq!(loc.id.language, "en".parse::<Language>().unwrap());
+/// assert_eq!(loc.id.language, language!("en"));
/// assert_eq!(loc.id.script, None);
-/// assert_eq!(loc.id.region, "US".parse::<Region>().ok());
+/// assert_eq!(loc.id.region, Some(region!("US")));
/// assert_eq!(loc.id.variants.len(), 0);
-/// assert_eq!(loc.to_string(), "en-US-u-ca-buddhist");
-///
-/// let key: Key = "ca".parse().expect("Parsing key failed.");
-/// let value: Value = "buddhist".parse().expect("Parsing value failed.");
-/// assert_eq!(loc.extensions.unicode.keywords.get(&key), Some(&value));
+/// assert_eq!(
+/// loc.extensions.unicode.keywords.get(&key!("ca")),
+/// Some(&value!("buddhist"))
+/// );
/// ```
///
/// # Parsing
@@ -87,6 +87,8 @@ pub struct Locale {
#[test]
fn test_sizes() {
+ // Remove when we upgrade to a compiler where the new sizes are default
+ let forced_nightly = std::env::var("ICU4X_BUILDING_WITH_FORCED_NIGHTLY").is_ok();
assert_eq!(core::mem::size_of::<subtags::Language>(), 3);
assert_eq!(core::mem::size_of::<subtags::Script>(), 4);
assert_eq!(core::mem::size_of::<subtags::Region>(), 3);
@@ -99,12 +101,21 @@ fn test_sizes() {
assert_eq!(core::mem::size_of::<extensions::transform::Fields>(), 24);
assert_eq!(core::mem::size_of::<extensions::unicode::Attributes>(), 24);
- assert_eq!(core::mem::size_of::<extensions::unicode::Keywords>(), 48);
+ assert_eq!(
+ core::mem::size_of::<extensions::unicode::Keywords>(),
+ if forced_nightly { 40 } else { 48 }
+ );
assert_eq!(core::mem::size_of::<Vec<extensions::other::Other>>(), 24);
assert_eq!(core::mem::size_of::<extensions::private::Private>(), 24);
- assert_eq!(core::mem::size_of::<extensions::Extensions>(), 192);
+ assert_eq!(
+ core::mem::size_of::<extensions::Extensions>(),
+ if forced_nightly { 184 } else { 192 }
+ );
- assert_eq!(core::mem::size_of::<Locale>(), 240);
+ assert_eq!(
+ core::mem::size_of::<Locale>(),
+ if forced_nightly { 232 } else { 240 }
+ );
}
impl Locale {
@@ -116,10 +127,7 @@ impl Locale {
/// ```
/// use icu::locid::Locale;
///
- /// let loc = Locale::try_from_bytes("en-US-u-hc-h12".as_bytes())
- /// .expect("Parsing failed.");
- ///
- /// assert_eq!(loc.to_string(), "en-US-u-hc-h12");
+ /// Locale::try_from_bytes(b"en-US-u-hc-h12").unwrap();
/// ```
pub fn try_from_bytes(v: &[u8]) -> Result<Self, ParserError> {
parse_locale(v)
@@ -133,7 +141,6 @@ impl Locale {
/// use icu::locid::Locale;
///
/// assert_eq!(Locale::default(), Locale::UND);
- /// assert_eq!("und", Locale::UND.to_string());
/// ```
pub const UND: Self = Self {
id: LanguageIdentifier::UND,
@@ -151,13 +158,13 @@ impl Locale {
/// use icu::locid::Locale;
///
/// assert_eq!(
- /// Locale::canonicalize("pL_latn_pl-U-HC-H12"),
- /// Ok("pl-Latn-PL-u-hc-h12".to_string())
+ /// Locale::canonicalize("pL_latn_pl-U-HC-H12").as_deref(),
+ /// Ok("pl-Latn-PL-u-hc-h12")
/// );
/// ```
pub fn canonicalize<S: AsRef<[u8]>>(input: S) -> Result<String, ParserError> {
let locale = Self::try_from_bytes(input.as_ref())?;
- Ok(locale.to_string())
+ Ok(locale.write_to_string().into_owned())
}
/// Compare this [`Locale`] with BCP-47 bytes.
@@ -189,7 +196,6 @@ impl Locale {
/// let b = ab[1];
/// assert!(a.cmp(b) == Ordering::Less);
/// let a_loc = a.parse::<Locale>().unwrap();
- /// assert_eq!(a, a_loc.to_string());
/// assert!(a_loc.strict_cmp(a.as_bytes()) == Ordering::Equal);
/// assert!(a_loc.strict_cmp(b.as_bytes()) == Ordering::Less);
/// }
@@ -286,7 +292,7 @@ impl Locale {
};
}
- let mut iter = get_subtag_iterator(other.as_bytes());
+ let mut iter = SubtagIterator::new(other.as_bytes());
if !subtag_matches!(subtags::Language, iter, self.id.language) {
return false;
}
@@ -391,7 +397,7 @@ impl core::fmt::Debug for Locale {
}
}
-impl_writeable_for_each_subtag_str_no_test!(Locale);
+impl_writeable_for_each_subtag_str_no_test!(Locale, selff, selff.extensions.is_empty() => selff.id.write_to_string());
#[test]
fn test_writeable() {
@@ -426,14 +432,10 @@ fn test_writeable() {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_language as language;
/// use icu::locid::Locale;
+/// use icu::locid::{locale, subtags_language as language};
///
-/// let language = language!("en");
-/// let loc = Locale::from(language);
-///
-/// assert_eq!(loc.id.language, language);
-/// assert_eq!(loc.to_string(), "en");
+/// assert_eq!(Locale::from(language!("en")), locale!("en"));
/// ```
impl From<subtags::Language> for Locale {
fn from(language: subtags::Language) -> Self {
@@ -447,14 +449,10 @@ impl From<subtags::Language> for Locale {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_script as script;
/// use icu::locid::Locale;
+/// use icu::locid::{locale, subtags_script as script};
///
-/// let script = script!("latn");
-/// let loc = Locale::from(Some(script));
-///
-/// assert_eq!(loc.id.script.unwrap(), script);
-/// assert_eq!(loc.to_string(), "und-Latn");
+/// assert_eq!(Locale::from(Some(script!("latn"))), locale!("und-Latn"));
/// ```
impl From<Option<subtags::Script>> for Locale {
fn from(script: Option<subtags::Script>) -> Self {
@@ -468,14 +466,10 @@ impl From<Option<subtags::Script>> for Locale {
/// # Examples
///
/// ```
-/// use icu::locid::subtags_region as region;
/// use icu::locid::Locale;
+/// use icu::locid::{locale, subtags_region as region};
///
-/// let region = region!("US");
-/// let loc = Locale::from(Some(region));
-///
-/// assert_eq!(loc.id.region.unwrap(), region);
-/// assert_eq!(loc.to_string(), "und-US");
+/// assert_eq!(Locale::from(Some(region!("US"))), locale!("und-US"));
/// ```
impl From<Option<subtags::Region>> for Locale {
fn from(region: Option<subtags::Region>) -> Self {
@@ -491,20 +485,18 @@ impl From<Option<subtags::Region>> for Locale {
/// ```
/// use icu::locid::Locale;
/// use icu::locid::{
-/// subtags_language as language, subtags_region as region,
+/// locale, subtags_language as language, subtags_region as region,
/// subtags_script as script,
/// };
///
-/// let lang = language!("en");
-/// let script = script!("Latn");
-/// let region = region!("US");
-/// let loc = Locale::from((lang, Some(script), Some(region)));
-///
-/// assert_eq!(loc.id.language, lang);
-/// assert_eq!(loc.id.script.unwrap(), script);
-/// assert_eq!(loc.id.region.unwrap(), region);
-/// assert_eq!(loc.id.variants.len(), 0);
-/// assert_eq!(loc.to_string(), "en-Latn-US");
+/// assert_eq!(
+/// Locale::from((
+/// language!("en"),
+/// Some(script!("Latn")),
+/// Some(region!("US"))
+/// )),
+/// locale!("en-Latn-US")
+/// );
/// ```
impl
From<(
diff --git a/vendor/icu_locid/src/parser/errors.rs b/vendor/icu_locid/src/parser/errors.rs
index a989bcc60..5cbbb2bd4 100644
--- a/vendor/icu_locid/src/parser/errors.rs
+++ b/vendor/icu_locid/src/parser/errors.rs
@@ -48,6 +48,22 @@ pub enum ParserError {
/// ```
#[displaydoc("Invalid extension")]
InvalidExtension,
+
+ /// Duplicated extension.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use icu::locid::Locale;
+ /// use icu::locid::ParserError;
+ ///
+ /// assert_eq!(
+ /// "und-u-hc-h12-u-ca-calendar".parse::<Locale>(),
+ /// Err(ParserError::DuplicatedExtension)
+ /// );
+ /// ```
+ #[displaydoc("Duplicated extension")]
+ DuplicatedExtension,
}
#[cfg(feature = "std")]
diff --git a/vendor/icu_locid/src/parser/langid.rs b/vendor/icu_locid/src/parser/langid.rs
index 9efa078ac..653ca7e6e 100644
--- a/vendor/icu_locid/src/parser/langid.rs
+++ b/vendor/icu_locid/src/parser/langid.rs
@@ -5,7 +5,7 @@
pub use super::errors::ParserError;
use crate::extensions::unicode::{Attribute, Key, Value};
use crate::extensions::ExtensionType;
-use crate::parser::{get_subtag_iterator, SubtagIterator};
+use crate::parser::SubtagIterator;
use crate::LanguageIdentifier;
use crate::{extensions, subtags};
use alloc::vec::Vec;
@@ -103,7 +103,7 @@ pub fn parse_language_identifier(
t: &[u8],
mode: ParserMode,
) -> Result<LanguageIdentifier, ParserError> {
- let mut iter = get_subtag_iterator(t);
+ let mut iter = SubtagIterator::new(t);
parse_language_identifier_from_iter(&mut iter, mode)
}
@@ -127,9 +127,9 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
let mut variant = None;
let mut keyword = None;
- if let (i, Some((t, start, end))) = iter.next_manual() {
+ if let (i, Some((start, end))) = iter.next_manual() {
iter = i;
- match subtags::Language::try_from_bytes_manual_slice(t, start, end) {
+ match subtags::Language::try_from_bytes_manual_slice(iter.slice, start, end) {
Ok(l) => language = l,
Err(e) => return Err(e),
}
@@ -139,19 +139,23 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
let mut position = ParserPosition::Script;
- while let Some((t, start, end)) = iter.peek_manual() {
+ while let Some((start, end)) = iter.peek_manual() {
if !matches!(mode, ParserMode::LanguageIdentifier) && end - start == 1 {
break;
}
if matches!(position, ParserPosition::Script) {
- if let Ok(s) = subtags::Script::try_from_bytes_manual_slice(t, start, end) {
+ if let Ok(s) = subtags::Script::try_from_bytes_manual_slice(iter.slice, start, end) {
script = Some(s);
position = ParserPosition::Region;
- } else if let Ok(r) = subtags::Region::try_from_bytes_manual_slice(t, start, end) {
+ } else if let Ok(r) =
+ subtags::Region::try_from_bytes_manual_slice(iter.slice, start, end)
+ {
region = Some(r);
position = ParserPosition::Variant;
- } else if let Ok(v) = subtags::Variant::try_from_bytes_manual_slice(t, start, end) {
+ } else if let Ok(v) =
+ subtags::Variant::try_from_bytes_manual_slice(iter.slice, start, end)
+ {
// We cannot handle multiple variants in a const context
debug_assert!(variant.is_none());
variant = Some(v);
@@ -162,10 +166,12 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
return Err(ParserError::InvalidSubtag);
}
} else if matches!(position, ParserPosition::Region) {
- if let Ok(s) = subtags::Region::try_from_bytes_manual_slice(t, start, end) {
+ if let Ok(s) = subtags::Region::try_from_bytes_manual_slice(iter.slice, start, end) {
region = Some(s);
position = ParserPosition::Variant;
- } else if let Ok(v) = subtags::Variant::try_from_bytes_manual_slice(t, start, end) {
+ } else if let Ok(v) =
+ subtags::Variant::try_from_bytes_manual_slice(iter.slice, start, end)
+ {
// We cannot handle multiple variants in a const context
debug_assert!(variant.is_none());
variant = Some(v);
@@ -175,7 +181,8 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
} else {
return Err(ParserError::InvalidSubtag);
}
- } else if let Ok(v) = subtags::Variant::try_from_bytes_manual_slice(t, start, end) {
+ } else if let Ok(v) = subtags::Variant::try_from_bytes_manual_slice(iter.slice, start, end)
+ {
debug_assert!(matches!(position, ParserPosition::Variant));
if variant.is_some() {
// We cannot handle multiple variants in a const context
@@ -192,12 +199,12 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
}
if matches!(mode, ParserMode::Locale) {
- if let Some((bytes, start, end)) = iter.peek_manual() {
- match ExtensionType::try_from_bytes_manual_slice(bytes, start, end) {
+ if let Some((start, end)) = iter.peek_manual() {
+ match ExtensionType::try_from_bytes_manual_slice(iter.slice, start, end) {
Ok(ExtensionType::Unicode) => {
iter = iter.next_manual().0;
- if let Some((bytes, start, end)) = iter.peek_manual() {
- if Attribute::try_from_bytes_manual_slice(bytes, start, end).is_ok() {
+ if let Some((start, end)) = iter.peek_manual() {
+ if Attribute::try_from_bytes_manual_slice(iter.slice, start, end).is_ok() {
// We cannot handle Attributes in a const context
return Err(ParserError::InvalidSubtag);
}
@@ -206,19 +213,21 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_extension_f
let mut key = None;
let mut current_type = None;
- while let Some((bytes, start, end)) = iter.peek_manual() {
+ while let Some((start, end)) = iter.peek_manual() {
let slen = end - start;
if slen == 2 {
if key.is_some() {
// We cannot handle more than one Key in a const context
return Err(ParserError::InvalidSubtag);
}
- match Key::try_from_bytes_manual_slice(bytes, start, end) {
+ match Key::try_from_bytes_manual_slice(iter.slice, start, end) {
Ok(k) => key = Some(k),
Err(e) => return Err(e),
};
} else if key.is_some() {
- match Value::parse_subtag_from_bytes_manual_slice(bytes, start, end) {
+ match Value::parse_subtag_from_bytes_manual_slice(
+ iter.slice, start, end,
+ ) {
Ok(Some(t)) => {
if current_type.is_some() {
// We cannot handle more than one type in a const context
@@ -261,7 +270,7 @@ pub const fn parse_language_identifier_with_single_variant(
),
ParserError,
> {
- let iter = get_subtag_iterator(t);
+ let iter = SubtagIterator::new(t);
match parse_locale_with_single_variant_single_keyword_unicode_extension_from_iter(iter, mode) {
Ok((l, s, r, v, _)) => Ok((l, s, r, v)),
Err(e) => Err(e),
diff --git a/vendor/icu_locid/src/parser/locale.rs b/vendor/icu_locid/src/parser/locale.rs
index 805b6c290..175fd3a05 100644
--- a/vendor/icu_locid/src/parser/locale.rs
+++ b/vendor/icu_locid/src/parser/locale.rs
@@ -6,13 +6,13 @@ use tinystr::TinyAsciiStr;
use crate::extensions::{self, Extensions};
use crate::parser::errors::ParserError;
-use crate::parser::{get_subtag_iterator, parse_language_identifier_from_iter, ParserMode};
+use crate::parser::{parse_language_identifier_from_iter, ParserMode, SubtagIterator};
use crate::{subtags, Locale};
use super::parse_locale_with_single_variant_single_keyword_unicode_extension_from_iter;
pub fn parse_locale(t: &[u8]) -> Result<Locale, ParserError> {
- let mut iter = get_subtag_iterator(t);
+ let mut iter = SubtagIterator::new(t);
let id = parse_language_identifier_from_iter(&mut iter, ParserMode::Locale)?;
let extensions = if iter.peek().is_some() {
@@ -37,6 +37,6 @@ pub const fn parse_locale_with_single_variant_single_keyword_unicode_keyword_ext
),
ParserError,
> {
- let iter = get_subtag_iterator(t);
+ let iter = SubtagIterator::new(t);
parse_locale_with_single_variant_single_keyword_unicode_extension_from_iter(iter, mode)
}
diff --git a/vendor/icu_locid/src/parser/mod.rs b/vendor/icu_locid/src/parser/mod.rs
index fef10b0ab..4b02f71c9 100644
--- a/vendor/icu_locid/src/parser/mod.rs
+++ b/vendor/icu_locid/src/parser/mod.rs
@@ -17,72 +17,93 @@ pub use locale::{
parse_locale, parse_locale_with_single_variant_single_keyword_unicode_keyword_extension,
};
-pub const fn get_subtag_iterator(slice: &[u8]) -> SubtagIterator {
- let mut current_start = 0;
+#[inline]
+const fn is_separator(slice: &[u8], idx: usize) -> bool {
#[allow(clippy::indexing_slicing)]
- while current_start < slice.len()
- && (slice[current_start] == b'-' || slice[current_start] == b'_')
- {
- current_start += 1;
- }
- let mut current_end = current_start;
- #[allow(clippy::indexing_slicing)]
- while current_end < slice.len() && slice[current_end] != b'-' && slice[current_end] != b'_' {
- current_end += 1;
- }
- SubtagIterator {
- slice,
- current_start,
- current_end,
+ let b = slice[idx];
+ b == b'-' || b == b'_'
+}
+
+const fn get_current_subtag(slice: &[u8], idx: usize) -> (usize, usize) {
+ debug_assert!(idx < slice.len());
+
+ // This function is called only on the idx == 0 or on a separator.
+ let (start, mut end) = if is_separator(slice, idx) {
+ // If it's a separator, set the start to idx+1 and advance the idx to the next char.
+ (idx + 1, idx + 1)
+ } else {
+ // If it's idx=0, start is 0 and end is set to 1
+ debug_assert!(idx == 0);
+ (0, 1)
+ };
+
+ while end < slice.len() && !is_separator(slice, end) {
+ // Advance until we reach end of slice or a separator.
+ end += 1;
}
+ // Notice: this slice may be empty (start == end) for cases like `"en-"` or `"en--US"`
+ (start, end)
}
+// `SubtagIterator` is a helper iterator for [`LanguageIdentifier`] and [`Locale`] parsing.
+//
+// It is quite extraordinary due to focus on performance and Rust limitations for `const`
+// functions.
+//
+// The iterator is eager and fallible allowing it to reject invalid slices such as `"-"`, `"-en"`,
+// `"en-"` etc.
+//
+// The iterator provides methods available for static users - `next_manual` and `peek_manual`,
+// as well as typical `Peekable` iterator APIs - `next` and `peek`.
+//
+// All methods return an `Option` of a `Result`.
#[derive(Copy, Clone, Debug)]
pub struct SubtagIterator<'a> {
- slice: &'a [u8],
- current_start: usize,
- current_end: usize,
+ pub slice: &'a [u8],
+ done: bool,
+ // done + subtag is faster than Option<(usize, usize)>
+ // at the time of writing.
+ subtag: (usize, usize),
}
-pub type ManualSlice<'a> = (&'a [u8], usize, usize);
-
impl<'a> SubtagIterator<'a> {
- pub const fn next_manual(mut self) -> (Self, Option<ManualSlice<'a>>) {
- if self.current_start == self.current_end {
- (self, None)
+ pub const fn new(slice: &'a [u8]) -> Self {
+ let subtag = if slice.is_empty() || is_separator(slice, 0) {
+ // This returns (0, 0) which returns Some(b"") for slices like `"-en"` or `"-"`
+ (0, 0)
} else {
- let r = (self.slice, self.current_start, self.current_end);
- self.current_start = self.current_end;
- #[allow(clippy::indexing_slicing)]
- while self.current_start < self.slice.len()
- && (self.slice[self.current_start] == b'-'
- || self.slice[self.current_start] == b'_')
- {
- self.current_start += 1;
- }
- self.current_end = self.current_start;
- #[allow(clippy::indexing_slicing)]
- while self.current_end < self.slice.len()
- && self.slice[self.current_end] != b'-'
- && self.slice[self.current_end] != b'_'
- {
- self.current_end += 1;
- }
- (self, Some(r))
+ get_current_subtag(slice, 0)
+ };
+ Self {
+ slice,
+ done: false,
+ subtag,
}
}
- pub const fn peek_manual(&self) -> Option<ManualSlice<'a>> {
- if self.current_start == self.current_end {
- None
+ pub const fn next_manual(mut self) -> (Self, Option<(usize, usize)>) {
+ if self.done {
+ return (self, None);
+ }
+ let result = self.subtag;
+ if result.1 < self.slice.len() {
+ self.subtag = get_current_subtag(self.slice, result.1);
} else {
- Some((self.slice, self.current_start, self.current_end))
+ self.done = true;
}
+ (self, Some(result))
+ }
+
+ pub const fn peek_manual(&self) -> Option<(usize, usize)> {
+ if self.done {
+ return None;
+ }
+ Some(self.subtag)
}
pub fn peek(&self) -> Option<&'a [u8]> {
#[allow(clippy::indexing_slicing)] // peek_manual returns valid indices
- self.peek_manual().map(|(t, s, e)| &t[s..e])
+ self.peek_manual().map(|(s, e)| &self.slice[s..e])
}
}
@@ -91,8 +112,120 @@ impl<'a> Iterator for SubtagIterator<'a> {
fn next(&mut self) -> Option<Self::Item> {
let (s, res) = self.next_manual();
- self.clone_from(&s);
+ *self = s;
#[allow(clippy::indexing_slicing)] // next_manual returns valid indices
- res.map(|(t, s, e)| &t[s..e])
+ res.map(|(s, e)| &self.slice[s..e])
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ fn slice_to_str(input: &[u8]) -> &str {
+ std::str::from_utf8(input).unwrap()
+ }
+
+ #[test]
+ fn subtag_iterator_peek_test() {
+ let slice = "de_at-u-ca-foobar";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+
+ assert_eq!(si.peek().map(slice_to_str), Some("de"));
+ assert_eq!(si.peek().map(slice_to_str), Some("de"));
+ assert_eq!(si.next().map(slice_to_str), Some("de"));
+
+ assert_eq!(si.peek().map(slice_to_str), Some("at"));
+ assert_eq!(si.peek().map(slice_to_str), Some("at"));
+ assert_eq!(si.next().map(slice_to_str), Some("at"));
+ }
+
+ #[test]
+ fn subtag_iterator_test() {
+ let slice = "";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+
+ let slice = "-";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+
+ let slice = "-en";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next().map(slice_to_str), Some("en"));
+ assert_eq!(si.next(), None);
+
+ let slice = "en";
+ let si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.map(slice_to_str).collect::<Vec<_>>(), vec!["en",]);
+
+ let slice = "en-";
+ let si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.map(slice_to_str).collect::<Vec<_>>(), vec!["en", "",]);
+
+ let slice = "--";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next(), None);
+
+ let slice = "-en-";
+ let mut si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next().map(slice_to_str), Some("en"));
+ assert_eq!(si.next().map(slice_to_str), Some(""));
+ assert_eq!(si.next(), None);
+
+ let slice = "de_at-u-ca-foobar";
+ let si = SubtagIterator::new(slice.as_bytes());
+ assert_eq!(
+ si.map(slice_to_str).collect::<Vec<_>>(),
+ vec!["de", "at", "u", "ca", "foobar",]
+ );
+ }
+
+ #[test]
+ fn get_current_subtag_test() {
+ let slice = "-";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (1, 1));
+
+ let slice = "-en";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (1, 3));
+
+ let slice = "-en-";
+ let current = get_current_subtag(slice.as_bytes(), 3);
+ assert_eq!(current, (4, 4));
+
+ let slice = "en-";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (0, 2));
+
+ let current = get_current_subtag(slice.as_bytes(), 2);
+ assert_eq!(current, (3, 3));
+
+ let slice = "en--US";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (0, 2));
+
+ let current = get_current_subtag(slice.as_bytes(), 2);
+ assert_eq!(current, (3, 3));
+
+ let current = get_current_subtag(slice.as_bytes(), 3);
+ assert_eq!(current, (4, 6));
+
+ let slice = "--";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (1, 1));
+
+ let current = get_current_subtag(slice.as_bytes(), 1);
+ assert_eq!(current, (2, 2));
+
+ let slice = "-";
+ let current = get_current_subtag(slice.as_bytes(), 0);
+ assert_eq!(current, (1, 1));
}
}
diff --git a/vendor/icu_locid/src/subtags/language.rs b/vendor/icu_locid/src/subtags/language.rs
index a5ec8d76e..86b51b93a 100644
--- a/vendor/icu_locid/src/subtags/language.rs
+++ b/vendor/icu_locid/src/subtags/language.rs
@@ -55,7 +55,6 @@ impl Language {
/// use icu::locid::subtags::Language;
///
/// assert_eq!(Language::default(), Language::UND);
- /// assert_eq!("und", Language::UND.to_string());
/// ```
pub const UND: Self = unsafe { Self::from_raw_unchecked(*b"und") };
@@ -64,15 +63,15 @@ impl Language {
/// # Examples
///
/// ```
- /// use icu::locid::subtags::Language;
+ /// use icu::locid::{subtags::Language, subtags_language as language};
///
- /// let mut lang: Language = "csb".parse().expect("Parsing failed.");
+ /// let mut lang = language!("csb");
///
- /// assert_eq!(lang.as_str(), "csb");
+ /// assert_ne!(lang, Language::UND);
///
/// lang.clear();
///
- /// assert_eq!(lang.as_str(), "und");
+ /// assert_eq!(lang, Language::UND);
/// ```
#[inline]
pub fn clear(&mut self) {
@@ -86,7 +85,7 @@ impl Language {
/// ```
/// use icu::locid::subtags::Language;
///
- /// let mut lang: Language = "und".parse().expect("Parsing failed.");
+ /// let mut lang = Language::UND;
///
/// assert!(lang.is_empty());
///
diff --git a/vendor/icu_locid/src/subtags/variants.rs b/vendor/icu_locid/src/subtags/variants.rs
index bbff9ebac..3bd83f149 100644
--- a/vendor/icu_locid/src/subtags/variants.rs
+++ b/vendor/icu_locid/src/subtags/variants.rs
@@ -16,14 +16,9 @@ use core::ops::Deref;
/// # Examples
///
/// ```
-/// use icu::locid::subtags::{Variant, Variants};
+/// use icu::locid::{subtags::Variants, subtags_variant as variant};
///
-/// let variant1: Variant =
-/// "posix".parse().expect("Failed to parse a variant subtag.");
-///
-/// let variant2: Variant =
-/// "macos".parse().expect("Failed to parse a variant subtag.");
-/// let mut v = vec![variant1, variant2];
+/// let mut v = vec![variant!("posix"), variant!("macos")];
/// v.sort();
/// v.dedup();
///
@@ -53,10 +48,9 @@ impl Variants {
/// # Examples
///
/// ```
- /// use icu::locid::subtags::{Variant, Variants};
+ /// use icu::locid::{subtags::Variants, subtags_variant as variant};
///
- /// let variant: Variant = "posix".parse().expect("Parsing failed.");
- /// let variants = Variants::from_variant(variant);
+ /// let variants = Variants::from_variant(variant!("posix"));
/// ```
#[inline]
pub const fn from_variant(variant: Variant) -> Self {
@@ -70,11 +64,9 @@ impl Variants {
/// # Examples
///
/// ```
- /// use icu::locid::subtags::{Variant, Variants};
+ /// use icu::locid::{subtags::Variants, subtags_variant as variant};
///
- /// let variant1: Variant = "posix".parse().expect("Parsing failed.");
- /// let variant2: Variant = "macos".parse().expect("Parsing failed.");
- /// let mut v = vec![variant1, variant2];
+ /// let mut v = vec![variant!("posix"), variant!("macos")];
/// v.sort();
/// v.dedup();
///
@@ -95,11 +87,9 @@ impl Variants {
/// # Examples
///
/// ```
- /// use icu::locid::subtags::{Variant, Variants};
+ /// use icu::locid::{subtags::Variants, subtags_variant as variant};
///
- /// let variant1: Variant = "posix".parse().expect("Parsing failed.");
- /// let variant2: Variant = "macos".parse().expect("Parsing failed.");
- /// let mut v = vec![variant1, variant2];
+ /// let mut v = vec![variant!("posix"), variant!("macos")];
/// v.sort();
/// v.dedup();
///
@@ -109,7 +99,7 @@ impl Variants {
///
/// variants.clear();
///
- /// assert_eq!(variants.to_string(), "");
+ /// assert_eq!(variants, Variants::default());
/// ```
pub fn clear(&mut self) -> Self {
core::mem::take(self)
diff --git a/vendor/icu_locid/tests/fixtures/invalid-extensions.json b/vendor/icu_locid/tests/fixtures/invalid-extensions.json
index a5f3a923d..3aff2636b 100644
--- a/vendor/icu_locid/tests/fixtures/invalid-extensions.json
+++ b/vendor/icu_locid/tests/fixtures/invalid-extensions.json
@@ -108,5 +108,45 @@
"error": "InvalidExtension",
"text": "Invalid subtag"
}
+ },
+ {
+ "input": {
+ "type": "Locale",
+ "identifier": "de-u-ca-"
+ },
+ "output": {
+ "error": "InvalidExtension",
+ "text": "Invalid subtag"
+ }
+ },
+ {
+ "input": {
+ "type": "Locale",
+ "identifier": "de-u-ca-gregory-"
+ },
+ "output": {
+ "error": "InvalidExtension",
+ "text": "Invalid subtag"
+ }
+ },
+ {
+ "input": {
+ "type": "Locale",
+ "identifier": "de-u-ca-gregory-u-hc-hc24"
+ },
+ "output": {
+ "error": "DuplicatedExtension",
+ "text": "Duplicated extension"
+ }
+ },
+ {
+ "input": {
+ "type": "Locale",
+ "identifier": "de-l-foo-l-bar"
+ },
+ "output": {
+ "error": "DuplicatedExtension",
+ "text": "Duplicated extension"
+ }
}
]
diff --git a/vendor/icu_locid/tests/fixtures/invalid.json b/vendor/icu_locid/tests/fixtures/invalid.json
index d44007596..c22459e65 100644
--- a/vendor/icu_locid/tests/fixtures/invalid.json
+++ b/vendor/icu_locid/tests/fixtures/invalid.json
@@ -1,5 +1,54 @@
[
{
+ "input": "-",
+ "output": {
+ "error": "InvalidLanguage",
+ "text": "The given language subtag is invalid"
+ }
+ },
+ {
+ "input": "--",
+ "output": {
+ "error": "InvalidLanguage",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
+ "input": "en-",
+ "output": {
+ "error": "InvalidSubtag",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
+ "input": "-en",
+ "output": {
+ "error": "InvalidLanguage",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
+ "input": "en-us-",
+ "output": {
+ "error": "InvalidSubtag",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
+ "input": "en--US",
+ "output": {
+ "error": "InvalidSubtag",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
+ "input": "-e-",
+ "output": {
+ "error": "InvalidLanguage",
+ "text": "The given subtag is invalid"
+ }
+ },
+ {
"input": "a1a",
"output": {
"error": "InvalidLanguage",
diff --git a/vendor/icu_locid/tests/fixtures/mod.rs b/vendor/icu_locid/tests/fixtures/mod.rs
index b688632ba..f00fd6c3b 100644
--- a/vendor/icu_locid/tests/fixtures/mod.rs
+++ b/vendor/icu_locid/tests/fixtures/mod.rs
@@ -248,6 +248,7 @@ impl From<LocaleError> for ParserError {
"InvalidLanguage" => ParserError::InvalidLanguage,
"InvalidSubtag" => ParserError::InvalidSubtag,
"InvalidExtension" => ParserError::InvalidExtension,
+ "DuplicatedExtension" => ParserError::DuplicatedExtension,
_ => unreachable!("Unknown error name"),
}
}
diff --git a/vendor/icu_locid/tests/langid.rs b/vendor/icu_locid/tests/langid.rs
index 96d022a9b..60414e087 100644
--- a/vendor/icu_locid/tests/langid.rs
+++ b/vendor/icu_locid/tests/langid.rs
@@ -6,6 +6,7 @@ mod fixtures;
mod helpers;
use std::convert::TryInto;
+use writeable::*;
use icu_locid::{subtags, LanguageIdentifier, ParserError};
@@ -21,7 +22,7 @@ fn test_langid_fixtures(tests: Vec<fixtures::LocaleTest>) {
}
}
let input: LanguageIdentifier = test.input.try_into().expect("Parsing failed.");
- assert_eq!(input.to_string(), s);
+ assert_writeable_eq!(input, s);
}
fixtures::LocaleInfo::Error(err) => {
let err: ParserError = err.into();
@@ -83,28 +84,28 @@ fn test_langid_subtag_language() {
assert_eq!(lang, subtags::Language::UND);
assert!(lang.is_empty());
- assert_eq!(lang.to_string(), "und");
+ assert_writeable_eq!(lang, "und");
}
#[test]
fn test_langid_subtag_region() {
let region: subtags::Region = "en".parse().expect("Failed to parse a region.");
assert_eq!(region.as_str(), "EN");
- assert_eq!(region.to_string(), "EN");
+ assert_writeable_eq!(region, "EN");
}
#[test]
fn test_langid_subtag_script() {
let script: subtags::Script = "Latn".parse().expect("Failed to parse a script.");
assert_eq!(script.as_str(), "Latn");
- assert_eq!(script.to_string(), "Latn");
+ assert_writeable_eq!(script, "Latn");
}
#[test]
fn test_langid_subtag_variant() {
let variant: subtags::Variant = "macos".parse().expect("Failed to parse a variant.");
assert_eq!(variant.as_str(), "macos");
- assert_eq!(variant.to_string(), "macos");
+ assert_writeable_eq!(variant, "macos");
}
#[test]
@@ -123,7 +124,7 @@ fn test_langid_normalizing_eq_str() {
helpers::read_fixture(path).expect("Failed to read a fixture");
for test in tests {
let parsed: LanguageIdentifier = test.input.try_into().expect("Parsing failed.");
- assert!(parsed.normalizing_eq(parsed.to_string().as_str()));
+ assert!(parsed.normalizing_eq(&*parsed.write_to_string()));
}
// Check that trailing characters are not ignored
@@ -148,7 +149,7 @@ fn test_langid_strict_cmp() {
let a_langid = a
.parse::<LanguageIdentifier>()
.expect("Invalid BCP-47 in fixture");
- let a_normalized = a_langid.to_string();
+ let a_normalized = a_langid.write_to_string();
let string_cmp = a_normalized.as_bytes().cmp(b.as_bytes());
let test_cmp = a_langid.strict_cmp(b.as_bytes());
assert_eq!(string_cmp, test_cmp, "{:?}/{:?}", a, b);
diff --git a/vendor/icu_locid/tests/locale.rs b/vendor/icu_locid/tests/locale.rs
index 37c43181e..cd3448983 100644
--- a/vendor/icu_locid/tests/locale.rs
+++ b/vendor/icu_locid/tests/locale.rs
@@ -6,6 +6,7 @@ mod fixtures;
mod helpers;
use std::convert::TryInto;
+use writeable::*;
use icu_locid::{LanguageIdentifier, Locale, ParserError};
@@ -16,7 +17,7 @@ fn test_langid_fixtures(tests: Vec<fixtures::LocaleTest>) {
match test.output {
fixtures::LocaleInfo::String(s) => {
let input: Locale = test.input.try_into().expect("Parsing failed.");
- assert_eq!(input.to_string(), s);
+ assert_writeable_eq!(input, s);
}
fixtures::LocaleInfo::Error(err) => {
let err: ParserError = err.into();
@@ -27,7 +28,7 @@ fn test_langid_fixtures(tests: Vec<fixtures::LocaleTest>) {
let input: Locale = test.input.try_into().expect("Parsing failed.");
let output: Locale = ident.clone().try_into().expect("Parsing failed.");
assert_eq!(input, output);
- assert_eq!(input.to_string(), ident.identifier);
+ assert_writeable_eq!(input, ident.identifier);
}
fixtures::LocaleInfo::Object(o) => {
let input: Locale = test.input.try_into().expect("Parsing failed.");
@@ -58,7 +59,7 @@ fn test_langid_invalid() {
fn test_locale_is_empty() {
let locale: Locale = Locale::default();
assert!(locale.extensions.is_empty());
- assert_eq!(locale.to_string(), "und".to_string());
+ assert_writeable_eq!(locale, "und");
}
#[test]
@@ -74,10 +75,7 @@ fn test_locale_canonicalize() {
let locale: Locale = "En-latn-US-MacOS"
.parse()
.expect("Failed to parse a locale.");
- assert_eq!(
- locale.to_string(),
- Locale::canonicalize("eN-latN-uS-macOS").unwrap()
- );
+ assert_writeable_eq!(locale, Locale::canonicalize("eN-latN-uS-macOS").unwrap());
}
#[test]
@@ -87,7 +85,7 @@ fn test_locale_normalizing_eq_str() {
helpers::read_fixture(path).expect("Failed to read a fixture");
for test in tests {
let parsed: Locale = test.input.try_into().expect("Parsing failed.");
- assert!(parsed.normalizing_eq(parsed.to_string().as_str()));
+ assert!(parsed.normalizing_eq(&*parsed.write_to_string()));
}
// Check that trailing characters are not ignored
@@ -113,7 +111,7 @@ fn test_locale_strict_cmp() {
for a in bcp47_strings.iter() {
for b in bcp47_strings.iter() {
let a_langid = a.parse::<Locale>().expect("Invalid BCP-47 in fixture");
- let a_normalized = a_langid.to_string();
+ let a_normalized = a_langid.write_to_string();
let string_cmp = a_normalized.as_bytes().cmp(b.as_bytes());
let test_cmp = a_langid.strict_cmp(b.as_bytes());
assert_eq!(string_cmp, test_cmp, "{:?}/{:?}", a, b);
diff --git a/vendor/icu_provider/.cargo-checksum.json b/vendor/icu_provider/.cargo-checksum.json
index fd85bb931..079ffc8f3 100644
--- a/vendor/icu_provider/.cargo-checksum.json
+++ b/vendor/icu_provider/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"2b5b03ef885db4858645deae855ec7347fda3b8b18ce326521d11dd5fa5e7f0d","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"97887d230a6a85aa40110bf2bb9356a69b8762f64bdbf66d019f2cdf9f8fee0e","src/any.rs":"f347c91b6ad34d330d69e60f276716c595c32ca7abf591340ba659a54ba73415","src/buf.rs":"ed8d68fc8facae65294f648bca8dad2ad6e75fbea6250212a2f6fc8f1147a015","src/constructors.rs":"895162fc4f3ceeff9236a765b6caf41a343794af2fc0d9cc686487e6980ff111","src/data_provider.rs":"088fa10caeec785777366d543d8622ae4ac52fa0721de25281456d8346371e56","src/datagen/data_conversion.rs":"21c21bb300557f158f77155397a023e1eda22413d66ac0c4f8ce04184fd90b20","src/datagen/heap_measure.rs":"14f3224d071d0c9695720b53257a60da19c3355c4bff272f9a9e3c3a9711f27a","src/datagen/iter.rs":"6353ec6476596ae6dfd5aee5f3f420c3a1c1c6e71911fa7dae49e17564e1930f","src/datagen/mod.rs":"0ed8676ad23d5acf622b9b971195c44976eaa41320f86627ad2c3f506c064d4e","src/datagen/payload.rs":"d9b1e5ceb07bdadc01d200f712b556d37a325cfad8fc6c7c17ceecaab4ef57e2","src/dynutil.rs":"bff12a7000298a1f46c98bb66f94492a88f9111afb59e8accea6b83d95e83af2","src/error.rs":"d01b45212c1c3bf5e904c631a26d0c5f91802575e55675f73d3591a1d0da8f25","src/hello_world.rs":"57fd467793374d66ed05982ae04b72983b3d67744fa81cf7ddde1174d1d2e741","src/helpers.rs":"1712d389dfda2a9c2d5d973f212209ecb4a16f27998d893a2f1a4cf96cb44e10","src/key.rs":"286d271012fc2eb8b34bfa843a5783b4a3ce5a1e23a63234ca11a8f82f2c13cc","src/lib.rs":"26b72a010db444d339d3803b7e598842efe52fff97b56fb3c6ddf632fc70ecdc","src/marker.rs":"58d770fb28e696c33192b9ecc5a86e1dca01f206d62656b45787375d69168852","src/request.rs":"141b12da429c5dcc2577d0911a4b09b6d579d173510eac6d3cd997fd986020f4","src/response.rs":"71e078dabcc91c822c315ae89f499b7d9724b40addc89786320ca658979e2f6c","src/serde/borrow_de_utils.rs":"30936d0ac69a9edc1a4935dcf777f1c1161d3f8d8d3a997b80744cf6c62bc4b5","src/serde/mod.rs":"b7033686b2f6d72fbdab6388b65c29651fd3b6ac45a37981421dab03a73c5ef5"},"package":"2f911086e3c521a8a824d4f8bfd87769645ced2f07ff913b521c0d793be07100"} \ No newline at end of file
+{"files":{"Cargo.toml":"5eb9543e4ba0986f30237d8e026704143a75c1d197292f370294abd5a45bd10b","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"bb2790cbf8d6109a07f4f0fe38aedfba0258637b1a28e67d35057eb57362fc41","src/any.rs":"7a6678e693e6090ebee04366dcf47a0449ce9006cca1f2ac429bd87eb9a0bbf2","src/buf.rs":"c1cb09ef992f6cefb65c8a0131d3f1e30332f093bd714429bba67e82fc3ee145","src/constructors.rs":"bbb8e0b82d8be54b6992d82df6738c20468e97af5910a22ceda437415ece9c13","src/data_provider.rs":"088fa10caeec785777366d543d8622ae4ac52fa0721de25281456d8346371e56","src/datagen/data_conversion.rs":"21c21bb300557f158f77155397a023e1eda22413d66ac0c4f8ce04184fd90b20","src/datagen/heap_measure.rs":"14f3224d071d0c9695720b53257a60da19c3355c4bff272f9a9e3c3a9711f27a","src/datagen/iter.rs":"6353ec6476596ae6dfd5aee5f3f420c3a1c1c6e71911fa7dae49e17564e1930f","src/datagen/mod.rs":"47e5b36432d27af3ff2662efceebe7dfb9b875f776e162dc547bb8ce2893e3a3","src/datagen/payload.rs":"d9b1e5ceb07bdadc01d200f712b556d37a325cfad8fc6c7c17ceecaab4ef57e2","src/dynutil.rs":"bff12a7000298a1f46c98bb66f94492a88f9111afb59e8accea6b83d95e83af2","src/error.rs":"c25cf6969feb7097edfc831a7d1aa6a3e3ba31c97e637f8f382bd3ec7905d5a7","src/hello_world.rs":"ff99a920ca8ef84d29129f1c8563fa96e963848532f9dbbddd02d634b8d6c0fb","src/helpers.rs":"1712d389dfda2a9c2d5d973f212209ecb4a16f27998d893a2f1a4cf96cb44e10","src/key.rs":"59d2d89107a24ee3f92f085715a1ba8c1222eced384969534fbcd43c68e2a658","src/lib.rs":"dcae8affb71e51095eab6180e08194e87a5aa6e339b8f3f18c4e1ce45a8c714a","src/marker.rs":"58d770fb28e696c33192b9ecc5a86e1dca01f206d62656b45787375d69168852","src/request.rs":"9ab23e38f941a4abc54be028888db6e787557088fc290d67b4f112db77c45521","src/response.rs":"9d93b42a1dda53f71195f1371c8a0a50755488af1fa20262fb703d60a6e1359e","src/serde/borrow_de_utils.rs":"30936d0ac69a9edc1a4935dcf777f1c1161d3f8d8d3a997b80744cf6c62bc4b5","src/serde/mod.rs":"a65937b6501a8106ae6036a7ba09977707b5268d2a6bdc3f2e43088610170da8"},"package":"a86816c97bc4e613086497f9479f63e120315e056763e8c4435604f98d21d82d"} \ No newline at end of file
diff --git a/vendor/icu_provider/Cargo.toml b/vendor/icu_provider/Cargo.toml
index 4ed67b0c0..84f561096 100644
--- a/vendor/icu_provider/Cargo.toml
+++ b/vendor/icu_provider/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "icu_provider"
-version = "1.0.1"
+version = "1.1.0"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -30,23 +30,18 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
-[package.metadata.cargo-all-features]
-skip_optional_dependencies = true
-denylist = [
- "bench",
- "macros",
-]
-extra_features = ["serde"]
-
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["macros"]
+
[dependencies.bincode]
version = "1.3"
optional = true
[dependencies.databake]
-version = "0.1.0"
+version = "0.1.3"
features = ["derive"]
optional = true
@@ -65,10 +60,10 @@ optional = true
default-features = false
[dependencies.icu_locid]
-version = "1.0.0"
+version = "1.1.0"
[dependencies.icu_provider_macros]
-version = "1.0.0"
+version = "1.1.0"
optional = true
[dependencies.log]
@@ -100,53 +95,47 @@ version = "1.2.0"
default-features = false
[dependencies.writeable]
-version = "0.5"
+version = "0.5.1"
[dependencies.yoke]
-version = "0.6.2"
+version = "0.7.0"
features = ["derive"]
[dependencies.zerofrom]
-version = "0.1.0"
+version = "0.1.1"
features = ["derive"]
[dependencies.zerovec]
-version = "0.9"
+version = "0.9.2"
features = ["derive"]
[dev-dependencies.serde_json]
version = "1.0"
-[dev-dependencies.static_assertions]
-version = "1.1"
-
[features]
datagen = [
- "dhat",
+ "dep:dhat",
"serde",
- "erased-serde",
- "databake",
+ "dep:erased-serde",
+ "dep:databake",
"std",
- "serde_json",
"sync",
- "yoke/serde",
]
-default = []
deserialize_bincode_1 = [
"serde",
- "bincode",
+ "dep:bincode",
"std",
]
deserialize_json = [
"serde",
- "serde_json",
+ "dep:serde_json",
]
deserialize_postcard_1 = [
"serde",
- "postcard",
+ "dep:postcard",
]
-log_error_context = ["log"]
-macros = ["icu_provider_macros"]
+log_error_context = ["dep:log"]
+macros = ["dep:icu_provider_macros"]
serde = [
"dep:serde",
"yoke/serde",
diff --git a/vendor/icu_provider/README.md b/vendor/icu_provider/README.md
index 19d73a1a2..5692fa6b3 100644
--- a/vendor/icu_provider/README.md
+++ b/vendor/icu_provider/README.md
@@ -92,7 +92,7 @@ structs to borrow zero-copy data.
### Data generation API
-*This functionality is enabled with the "datagen" feature*
+*This functionality is enabled with the "datagen" Cargo feature*
The [`datagen`] module contains several APIs for data generation. See [`icu_datagen`] for the reference
data generation implementation.
diff --git a/vendor/icu_provider/src/any.rs b/vendor/icu_provider/src/any.rs
index 1c7a60435..989438c6b 100644
--- a/vendor/icu_provider/src/any.rs
+++ b/vendor/icu_provider/src/any.rs
@@ -18,7 +18,7 @@ use alloc::rc::Rc as SelectedRc;
use alloc::sync::Arc as SelectedRc;
/// A trait that allows to specify `Send + Sync` bounds that are only required when
-/// the `sync` feature is enabled. Without the feature, this is an empty bound.
+/// the `sync` Cargo feature is enabled. Without the Cargo feature, this is an empty bound.
#[cfg(feature = "sync")]
pub trait MaybeSendSync: Send + Sync {}
#[cfg(feature = "sync")]
@@ -115,6 +115,19 @@ impl AnyPayload {
}
}
+ /// Clones and then transforms a type-erased `AnyPayload` into a concrete `DataPayload<M>`.
+ pub fn downcast_cloned<M>(&self) -> Result<DataPayload<M>, DataError>
+ where
+ M: DataMarker + 'static,
+ // For the StructRef case:
+ M::Yokeable: ZeroFrom<'static, M::Yokeable>,
+ // For the PayloadRc case:
+ M::Yokeable: MaybeSendSync,
+ for<'a> YokeTraitHack<<M::Yokeable as Yokeable<'a>>::Output>: Clone,
+ {
+ self.clone().downcast()
+ }
+
/// Creates an `AnyPayload` from a static reference to a data struct.
///
/// # Examples
@@ -230,7 +243,7 @@ impl From<AnyResponse> for DataResponse<AnyMarker> {
}
impl AnyResponse {
- /// Transforms a type-erased `DataResponse<AnyMarker>` into a concrete `DataResponse<M>`.
+ /// Transforms a type-erased `AnyResponse` into a concrete `DataResponse<M>`.
#[inline]
pub fn downcast<M>(self) -> Result<DataResponse<M>, DataError>
where
@@ -244,6 +257,39 @@ impl AnyResponse {
payload: self.payload.map(|p| p.downcast()).transpose()?,
})
}
+
+ /// Clones and then transforms a type-erased `AnyResponse` into a concrete `DataResponse<M>`.
+ pub fn downcast_cloned<M>(&self) -> Result<DataResponse<M>, DataError>
+ where
+ M: DataMarker + 'static,
+ M::Yokeable: ZeroFrom<'static, M::Yokeable>,
+ M::Yokeable: MaybeSendSync,
+ for<'a> YokeTraitHack<<M::Yokeable as Yokeable<'a>>::Output>: Clone,
+ {
+ Ok(DataResponse {
+ metadata: self.metadata.clone(),
+ payload: self
+ .payload
+ .as_ref()
+ .map(|p| p.downcast_cloned())
+ .transpose()?,
+ })
+ }
+}
+
+impl<M> DataResponse<M>
+where
+ M: DataMarker + 'static,
+ M::Yokeable: MaybeSendSync,
+{
+ /// Moves the inner DataPayload to the heap (requiring an allocation) and returns it as an
+ /// erased `AnyResponse`.
+ pub fn wrap_into_any_response(self) -> AnyResponse {
+ AnyResponse {
+ metadata: self.metadata,
+ payload: self.payload.map(|p| p.wrap_into_any_payload()),
+ }
+ }
}
/// An object-safe data provider that returns data structs cast to `dyn Any` trait objects.
@@ -345,7 +391,10 @@ where
{
#[inline]
fn load(&self, req: DataRequest) -> Result<DataResponse<M>, DataError> {
- self.0.load_any(M::KEY, req)?.downcast()
+ self.0
+ .load_any(M::KEY, req)?
+ .downcast()
+ .map_err(|e| e.with_req(M::KEY, req))
}
}
@@ -359,7 +408,10 @@ where
{
#[inline]
fn load_data(&self, key: DataKey, req: DataRequest) -> Result<DataResponse<M>, DataError> {
- self.0.load_any(key, req)?.downcast()
+ self.0
+ .load_any(key, req)?
+ .downcast()
+ .map_err(|e| e.with_req(key, req))
}
}
diff --git a/vendor/icu_provider/src/buf.rs b/vendor/icu_provider/src/buf.rs
index 73bc0f165..796ad32f3 100644
--- a/vendor/icu_provider/src/buf.rs
+++ b/vendor/icu_provider/src/buf.rs
@@ -21,7 +21,11 @@ impl DataMarker for BufferMarker {
///
/// Generally, these bytes are expected to be deserializable with Serde. To get an object
/// implementing [`DataProvider`] via Serde, use [`as_deserializing()`], which requires
-/// enabling at least one of the Serde features.
+/// enabling at least one of the deserialization Cargo features:
+///
+/// - `deserialize_json`
+/// - `deserialize_postcard_1`
+/// - `deserialize_bincode_1`
///
/// Along with [`DataProvider`], this is one of the two foundational traits in this crate.
///
diff --git a/vendor/icu_provider/src/constructors.rs b/vendor/icu_provider/src/constructors.rs
index a9330c3f2..053da0320 100644
--- a/vendor/icu_provider/src/constructors.rs
+++ b/vendor/icu_provider/src/constructors.rs
@@ -54,7 +54,7 @@
//! 2. [`FsDataProvider`]
//! 3. [`ForkByKeyProvider`] between any of the above
//!
-//! Please note that you must enable the `"serde"` feature on each crate in which you use the
+//! Please note that you must enable the `"serde"` Cargo feature on each crate in which you use the
//! `*_with_buffer_provider` constructor.
//!
//! # Data Versioning Policy
@@ -78,8 +78,8 @@
//! Over FFI, there is only one data provider type: [`ICU4XDataProvider`]. Internally, it is an
//! `enum` between `dyn `[`AnyProvider`] and `dyn `[`BufferProvider`].
//!
-//! To control for code size, there are two features, `any_provider` and `buffer_provider`, that
-//! enable the corresponding items in the enum.
+//! To control for code size, there are two Cargo features, `any_provider` and `buffer_provider`,
+//! that enable the corresponding items in the enum.
//!
//! In Rust ICU4X, a similar buffer/any enum approach was not taken because:
//!
diff --git a/vendor/icu_provider/src/datagen/mod.rs b/vendor/icu_provider/src/datagen/mod.rs
index 5ede82275..e52a19c4d 100644
--- a/vendor/icu_provider/src/datagen/mod.rs
+++ b/vendor/icu_provider/src/datagen/mod.rs
@@ -6,7 +6,7 @@
//! via the `icu_datagen` reference crate. End users should not need to consume anything in
//! this module as a library unless defining new types that integrate with `icu_datagen`.
//!
-//! This module can be enabled with the `datagen` feature on `icu_provider`.
+//! This module can be enabled with the `datagen` Cargo feature on `icu_provider`.
mod data_conversion;
mod heap_measure;
diff --git a/vendor/icu_provider/src/error.rs b/vendor/icu_provider/src/error.rs
index 39bd1d0bb..05a48f9b7 100644
--- a/vendor/icu_provider/src/error.rs
+++ b/vendor/icu_provider/src/error.rs
@@ -64,8 +64,8 @@ pub enum DataErrorKind {
MissingSourceData,
/// An error indicating that the desired buffer format is not available. This usually
- /// means that a required feature was not enabled
- #[displaydoc("Unavailable buffer format: {0:?} (does icu_provider need to be compiled with an additional feature?)")]
+ /// means that a required Cargo feature was not enabled
+ #[displaydoc("Unavailable buffer format: {0:?} (does icu_provider need to be compiled with an additional Cargo feature?)")]
UnavailableBufferFormat(BufferFormat),
}
@@ -196,7 +196,7 @@ impl DataError {
/// Logs the data error with the given request, returning an error containing the resource key.
///
- /// If the "log_error_context" feature is enabled, this logs the whole request. Either way,
+ /// If the "log_error_context" Cargo feature is enabled, this logs the whole request. Either way,
/// it returns an error with the resource key portion of the request as context.
#[cfg_attr(not(feature = "log_error_context"), allow(unused_variables))]
pub fn with_req(self, key: DataKey, req: DataRequest) -> Self {
@@ -210,7 +210,7 @@ impl DataError {
/// Logs the data error with the given context, then return self.
///
- /// This does not modify the error, but if the "log_error_context" feature is enabled,
+ /// This does not modify the error, but if the "log_error_context" Cargo feature is enabled,
/// it will print out the context.
#[cfg(feature = "std")]
#[cfg_attr(not(feature = "log_error_context"), allow(unused_variables))]
@@ -222,7 +222,7 @@ impl DataError {
/// Logs the data error with the given context, then return self.
///
- /// This does not modify the error, but if the "log_error_context" feature is enabled,
+ /// This does not modify the error, but if the "log_error_context" Cargo feature is enabled,
/// it will print out the context.
#[cfg_attr(not(feature = "log_error_context"), allow(unused_variables))]
#[inline]
@@ -234,7 +234,7 @@ impl DataError {
/// Logs the data error with the given context, then return self.
///
- /// This does not modify the error, but if the "log_error_context" feature is enabled,
+ /// This does not modify the error, but if the "log_error_context" Cargo feature is enabled,
/// it will print out the context.
#[cfg_attr(not(feature = "log_error_context"), allow(unused_variables))]
#[inline]
diff --git a/vendor/icu_provider/src/hello_world.rs b/vendor/icu_provider/src/hello_world.rs
index 5fa671d84..7fd8289df 100644
--- a/vendor/icu_provider/src/hello_world.rs
+++ b/vendor/icu_provider/src/hello_world.rs
@@ -266,6 +266,8 @@ impl<'l> Writeable for FormattedHelloWorld<'l> {
}
}
+writeable::impl_display_with_writeable!(FormattedHelloWorld<'_>);
+
#[cfg(feature = "datagen")]
impl IterableDataProvider<HelloWorldV1Marker> for HelloWorldProvider {
fn supported_locales(&self) -> Result<Vec<DataLocale>, DataError> {
diff --git a/vendor/icu_provider/src/key.rs b/vendor/icu_provider/src/key.rs
index 2f55e4d46..d4d6905c9 100644
--- a/vendor/icu_provider/src/key.rs
+++ b/vendor/icu_provider/src/key.rs
@@ -140,11 +140,6 @@ impl DataKeyPath {
/// Gets the path as a static string slice.
#[inline]
pub const fn get(self) -> &'static str {
- /// core::slice::from_raw_parts(a, b) = core::mem::transmute((a, b)) hack
- /// ```compile_fail
- /// const unsafe fn canary() { core::slice::from_raw_parts(0 as *const u8, 0); }
- /// ```
- const _: () = ();
unsafe {
// Safe due to invariant that self.path is tagged correctly
core::str::from_utf8_unchecked(core::mem::transmute((
@@ -624,7 +619,6 @@ fn test_key_to_string() {
expected: "core/cardinal@65535",
},
] {
- assert_eq!(cas.expected, cas.key.to_string());
writeable::assert_writeable_eq!(&cas.key, cas.expected);
}
}
diff --git a/vendor/icu_provider/src/lib.rs b/vendor/icu_provider/src/lib.rs
index 594e872f4..7ee5b34e2 100644
--- a/vendor/icu_provider/src/lib.rs
+++ b/vendor/icu_provider/src/lib.rs
@@ -94,7 +94,7 @@
//!
//! ## Data generation API
//!
-//! *This functionality is enabled with the "datagen" feature*
+//! *This functionality is enabled with the "datagen" Cargo feature*
//!
//! The [`datagen`] module contains several APIs for data generation. See [`icu_datagen`] for the reference
//! data generation implementation.
diff --git a/vendor/icu_provider/src/request.rs b/vendor/icu_provider/src/request.rs
index 7f6bb5911..5f51f3a2c 100644
--- a/vendor/icu_provider/src/request.rs
+++ b/vendor/icu_provider/src/request.rs
@@ -53,11 +53,11 @@ pub struct DataRequestMetadata;
/// use icu_locid::locale;
/// use icu_provider::DataLocale;
///
-/// let locale1 = locale!("en-u-ca-buddhist");
-/// let data_locale = DataLocale::from(locale1);
-/// let locale2 = data_locale.into_locale();
+/// let locale = locale!("en-u-ca-buddhist");
+/// let data_locale = DataLocale::from(locale);
+/// let locale = data_locale.into_locale();
///
-/// assert_eq!(locale2.to_string(), "en-u-ca-buddhist");
+/// assert_eq!(locale, locale!("en-u-ca-buddhist"));
/// ```
///
/// You can alternatively create a [`DataLocale`] from a borrowed [`Locale`], which is more
@@ -81,18 +81,18 @@ pub struct DataRequestMetadata;
/// use icu_locid::langid;
/// use icu_provider::DataLocale;
///
-/// let langid1 = langid!("es-CA-valencia");
-/// let data_locale = DataLocale::from(langid1);
-/// let langid2 = data_locale.get_langid();
+/// let langid = langid!("es-CA-valencia");
+/// let data_locale = DataLocale::from(langid);
+/// let langid = data_locale.get_langid();
///
-/// assert_eq!(langid2.to_string(), "es-CA-valencia");
+/// assert_eq!(langid, langid!("es-CA-valencia"));
/// ```
///
/// [`DataLocale`] only supports `-u` keywords, to reflect the current state of CLDR data
/// lookup and fallback. This may change in the future.
///
/// ```
-/// use icu_locid::Locale;
+/// use icu_locid::{locale, Locale};
/// use icu_provider::DataLocale;
///
/// let locale = "hi-t-en-h0-hybrid-u-attr-ca-buddhist"
@@ -100,7 +100,7 @@ pub struct DataRequestMetadata;
/// .unwrap();
/// let data_locale = DataLocale::from(locale);
///
-/// assert_eq!(data_locale.to_string(), "hi-u-ca-buddhist");
+/// assert_eq!(data_locale.into_locale(), locale!("hi-u-ca-buddhist"));
/// ```
#[derive(PartialEq, Clone, Default, Eq, Hash)]
pub struct DataLocale {
@@ -225,7 +225,6 @@ impl DataLocale {
/// let b = ab[1];
/// assert!(a.cmp(b) == Ordering::Less);
/// let a_loc: DataLocale = a.parse::<Locale>().unwrap().into();
- /// assert_eq!(a, a_loc.to_string());
/// assert!(
/// a_loc.strict_cmp(a.as_bytes()) == Ordering::Equal,
/// "{} == {}",
@@ -239,7 +238,6 @@ impl DataLocale {
/// b
/// );
/// let b_loc: DataLocale = b.parse::<Locale>().unwrap().into();
- /// assert_eq!(b, b_loc.to_string());
/// assert!(
/// b_loc.strict_cmp(b.as_bytes()) == Ordering::Equal,
/// "{} == {}",
@@ -338,21 +336,20 @@ impl DataLocale {
///
/// ```
/// use icu_locid::{
- /// langid, subtags_language as language, subtags_region as region, Locale,
+ /// langid, locale, subtags_language as language, subtags_region as region,
+ /// Locale,
/// };
/// use icu_provider::prelude::*;
///
- /// let locale: Locale = "it-IT-u-ca-coptic".parse().expect("Valid BCP-47");
- /// let locale: DataLocale = locale.into();
+ /// let locale: DataLocale = locale!("it-IT-u-ca-coptic").into();
///
- /// assert_eq!(locale.to_string(), "it-IT-u-ca-coptic");
/// assert_eq!(locale.get_langid(), langid!("it-IT"));
/// assert_eq!(locale.language(), language!("it"));
/// assert_eq!(locale.script(), None);
/// assert_eq!(locale.region(), Some(region!("IT")));
///
/// let locale = locale.into_locale();
- /// assert_eq!(locale.to_string(), "it-IT-u-ca-coptic");
+ /// assert_eq!(locale, locale!("it-IT-u-ca-coptic"));
/// ```
pub fn into_locale(self) -> Locale {
let mut loc = Locale {
@@ -488,6 +485,8 @@ impl DataLocale {
#[test]
fn test_data_locale_to_string() {
+ use icu_locid::locale;
+
struct TestCase {
pub locale: DataLocale,
pub expected: &'static str,
@@ -499,15 +498,14 @@ fn test_data_locale_to_string() {
expected: "und",
},
TestCase {
- locale: "und-u-cu-gbp".parse::<Locale>().unwrap().into(),
+ locale: locale!("und-u-cu-gbp").into(),
expected: "und-u-cu-gbp",
},
TestCase {
- locale: "en-ZA-u-cu-gbp".parse::<Locale>().unwrap().into(),
+ locale: locale!("en-ZA-u-cu-gbp").into(),
expected: "en-ZA-u-cu-gbp",
},
] {
- assert_eq!(cas.expected, cas.locale.to_string());
- writeable::assert_writeable_eq!(&cas.locale, cas.expected);
+ writeable::assert_writeable_eq!(cas.locale, cas.expected);
}
}
diff --git a/vendor/icu_provider/src/response.rs b/vendor/icu_provider/src/response.rs
index 653d20a68..1ea6c8a76 100644
--- a/vendor/icu_provider/src/response.rs
+++ b/vendor/icu_provider/src/response.rs
@@ -51,10 +51,10 @@ pub struct DataResponseMetadata {
/// To transform a [`DataPayload`] to a different type backed by the same data store (cart), use
/// [`DataPayload::map_project()`] or one of its sister methods.
///
-/// # `sync` feature
+/// # Cargo feature: `sync`
///
/// By default, the payload uses non-concurrent reference counting internally, and hence is neither
-/// [`Sync`] nor [`Send`]; if these traits are required, the `sync` feature can be enabled.
+/// [`Sync`] nor [`Send`]; if these traits are required, the `sync` Cargo feature can be enabled.
///
/// # Examples
///
@@ -183,7 +183,7 @@ where
/// use std::borrow::Cow;
///
/// let local_struct = HelloWorldV1 {
- /// message: Cow::Owned("example".to_string()),
+ /// message: Cow::Owned("example".to_owned()),
/// };
///
/// let payload =
@@ -236,8 +236,8 @@ where
/// let mut payload =
/// DataPayload::<HelloWorldV1Marker>::from_static_str("Hello");
///
- /// let suffix = " World".to_string();
- /// payload.with_mut(move |s| s.message.to_mut().push_str(&suffix));
+ /// let suffix = " World";
+ /// payload.with_mut(move |s| s.message.to_mut().push_str(suffix));
///
/// assert_eq!("Hello World", payload.get().message);
/// ```
diff --git a/vendor/icu_provider/src/serde/mod.rs b/vendor/icu_provider/src/serde/mod.rs
index d32148f02..2e96b3cb3 100644
--- a/vendor/icu_provider/src/serde/mod.rs
+++ b/vendor/icu_provider/src/serde/mod.rs
@@ -94,7 +94,7 @@ impl DataPayload<BufferMarker> {
///
/// # Examples
///
- /// Requires the `deserialize_json` feature:
+ /// Requires the `deserialize_json` Cargo feature:
///
/// ```
/// use icu_provider::buf::BufferFormat;
@@ -136,16 +136,16 @@ where
{
fn load_data(&self, key: DataKey, req: DataRequest) -> Result<DataResponse<M>, DataError> {
let buffer_response = BufferProvider::load_buffer(self.0, key, req)?;
- let buffer_format = buffer_response
- .metadata
- .buffer_format
- .ok_or_else(|| DataError::custom("BufferProvider didn't set BufferFormat"))?;
+ let buffer_format = buffer_response.metadata.buffer_format.ok_or_else(|| {
+ DataError::custom("BufferProvider didn't set BufferFormat").with_req(key, req)
+ })?;
Ok(DataResponse {
metadata: buffer_response.metadata,
payload: buffer_response
.payload
.map(|p| p.into_deserialized(buffer_format))
- .transpose()?,
+ .transpose()
+ .map_err(|e| e.with_req(key, req))?,
})
}
}
@@ -165,21 +165,21 @@ where
}
}
-#[cfg(feature = "serde_json")]
+#[cfg(feature = "deserialize_json")]
impl From<serde_json::error::Error> for crate::DataError {
fn from(e: serde_json::error::Error) -> Self {
crate::DataError::custom("JSON deserialize").with_display_context(&e)
}
}
-#[cfg(feature = "bincode")]
+#[cfg(feature = "deserialize_bincode_1")]
impl From<bincode::Error> for crate::DataError {
fn from(e: bincode::Error) -> Self {
crate::DataError::custom("Bincode deserialize").with_display_context(&e)
}
}
-#[cfg(feature = "postcard")]
+#[cfg(feature = "deserialize_postcard_1")]
impl From<postcard::Error> for crate::DataError {
fn from(e: postcard::Error) -> Self {
crate::DataError::custom("Postcard deserialize").with_display_context(&e)
diff --git a/vendor/icu_provider_adapters/.cargo-checksum.json b/vendor/icu_provider_adapters/.cargo-checksum.json
index 82805760b..292541e8f 100644
--- a/vendor/icu_provider_adapters/.cargo-checksum.json
+++ b/vendor/icu_provider_adapters/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"5ecb7cc7b9f229262dc3694db154ab479591430d0d10e03b5aed677067c16d09","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"e6a08c2bc307b66c93de20c2c366da896f7fd3f5e5eb402edd2eeba79476b600","src/any_payload.rs":"55336b0a861be2da125a411924a3597de5593135c8ff589420237189b77311f8","src/either.rs":"aa309c26deafcb9470bb9eb15e629fc53f2fb5d7e412d1a2317bc170f0652588","src/empty.rs":"2ef71bb0eb3b81cf8cbb9f93c37bb0b7a106b4db941ffdefc8e88b944bee90d2","src/fallback/adapter.rs":"31d851e1d2adda59c06510cdfc43846a107ee706b731b3019b7c28571f0398ff","src/fallback/algorithms.rs":"bc8d17fb957f8f9ada8e13d6f0d35af248caa621441ffb3fcd273ff97e39e4a4","src/fallback/mod.rs":"ec165267f61c722300cc4a44be7561e31a5c65007f8a4c11fa074f31d768797a","src/fallback/provider.rs":"ef8a835e3ed2b7871b6f6e78ae2e73575b26adc9e8e0b5a7170c8513e7fe3269","src/filter/impls.rs":"58fcc1f03454769a3c11a92cb4b7b9f7833a0c042ea80b4e863609e2414eed0f","src/filter/mod.rs":"960e1c22d7d2323bf73c3ff6ea2d34158ff767f2a6b4fc8c40e982bf8c2dd500","src/fork/by_error.rs":"67742b78a80ff9765fd83fc28390687ed6e07f71afc0964b20221ff1f508e380","src/fork/macros.rs":"5aebb0134923fa9fa0fe7c16d2d85a1a29cf4ea49505187db56bf0bc8e984ad0","src/fork/mod.rs":"7378895128bf1ccc9de7f5675668ef922d3df7b84b04f68af4d5b49efe60341e","src/fork/predicates.rs":"314bf33144c6827cd3df05877d7c20fb64c9b18f6230e229340b8f04f4ccf4d3","src/helpers.rs":"008af3aa36ebf43ec249dd9162ff36c150fa00fe78b3065b023d59acbe218b7b","src/lib.rs":"7499b4bcb40453bc4678d6ae3febf05f5c1e351120513ddd694983e6da56f268","tests/data/langtest/de/core/helloworld@1/de.json":"5a45b1d80567de8c4ff754f7dbe20c22477c1e00a9400b38397eb034f295b8f8","tests/data/langtest/de/manifest.json":"fa2f848cff051fd12a909389fbbc44b93ae1feb92cce466cc4381f9548443ea9","tests/data/langtest/ro/core/helloworld@1/ro.json":"b6b68292746dd6bb2d92d9c08e2753db556ec9c954a3b593b0b8999df550f298","tests/data/langtest/ro/manifest.json":"fa2f848cff051fd12a909389fbbc44b93ae1feb92cce466cc4381f9548443ea9"},"package":"980c71d8a91b246ebbb97847178a4b816eea39d1d550c70ee566384555bb6545"} \ No newline at end of file
+{"files":{"Cargo.toml":"2546ca7ab46e2bd189ae3821948d10fdd46634138784d8b3c373c90ae9b1fe27","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"e6a08c2bc307b66c93de20c2c366da896f7fd3f5e5eb402edd2eeba79476b600","src/any_payload.rs":"55336b0a861be2da125a411924a3597de5593135c8ff589420237189b77311f8","src/either.rs":"aa309c26deafcb9470bb9eb15e629fc53f2fb5d7e412d1a2317bc170f0652588","src/empty.rs":"3b55431437af04aaed5dcfc524d42c51a35bb7213274327cb1df70cb69d5a5c5","src/fallback/adapter.rs":"2e51f50215e9020f2e84a926227054e61fc001f44170bf27e1366c66097f2c83","src/fallback/algorithms.rs":"9409ad6f6f617d4433b9f6b303d131782877a0a10470533e3864b97355b9a3f1","src/fallback/mod.rs":"b2aa936b64ac0498c815683d5f3d151154fca9f962f56b172fd2582ff4b639ae","src/fallback/provider.rs":"ef8a835e3ed2b7871b6f6e78ae2e73575b26adc9e8e0b5a7170c8513e7fe3269","src/filter/impls.rs":"58fcc1f03454769a3c11a92cb4b7b9f7833a0c042ea80b4e863609e2414eed0f","src/filter/mod.rs":"960e1c22d7d2323bf73c3ff6ea2d34158ff767f2a6b4fc8c40e982bf8c2dd500","src/fork/by_error.rs":"4f89ba3b019f6ab36784577a664fece042ad2318026038cecb23c6f62fc63aa0","src/fork/macros.rs":"5aebb0134923fa9fa0fe7c16d2d85a1a29cf4ea49505187db56bf0bc8e984ad0","src/fork/mod.rs":"7378895128bf1ccc9de7f5675668ef922d3df7b84b04f68af4d5b49efe60341e","src/fork/predicates.rs":"314bf33144c6827cd3df05877d7c20fb64c9b18f6230e229340b8f04f4ccf4d3","src/helpers.rs":"008af3aa36ebf43ec249dd9162ff36c150fa00fe78b3065b023d59acbe218b7b","src/lib.rs":"7499b4bcb40453bc4678d6ae3febf05f5c1e351120513ddd694983e6da56f268","tests/data/langtest/de/core/helloworld@1/de.json":"5a45b1d80567de8c4ff754f7dbe20c22477c1e00a9400b38397eb034f295b8f8","tests/data/langtest/de/manifest.json":"fa2f848cff051fd12a909389fbbc44b93ae1feb92cce466cc4381f9548443ea9","tests/data/langtest/ro/core/helloworld@1/ro.json":"b6b68292746dd6bb2d92d9c08e2753db556ec9c954a3b593b0b8999df550f298","tests/data/langtest/ro/manifest.json":"fa2f848cff051fd12a909389fbbc44b93ae1feb92cce466cc4381f9548443ea9"},"package":"8e89bf33962b24bb48a4a21330c20c9ff17949338ea376360dd9eda2c209dca1"} \ No newline at end of file
diff --git a/vendor/icu_provider_adapters/Cargo.toml b/vendor/icu_provider_adapters/Cargo.toml
index 365a2a69b..6de64fe8b 100644
--- a/vendor/icu_provider_adapters/Cargo.toml
+++ b/vendor/icu_provider_adapters/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "icu_provider_adapters"
-version = "1.0.0"
+version = "1.1.0"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -31,16 +31,16 @@ repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
[dependencies.databake]
-version = "0.1.0"
+version = "0.1.3"
features = ["derive"]
optional = true
[dependencies.icu_locid]
-version = "1.0.0"
+version = "1.1.0"
features = ["zerovec"]
[dependencies.icu_provider]
-version = "1.0.0"
+version = "1.1.0"
features = ["macros"]
[dependencies.serde]
@@ -53,28 +53,23 @@ optional = true
default-features = false
[dependencies.tinystr]
-version = "0.7"
+version = "0.7.1"
features = ["zerovec"]
[dependencies.yoke]
-version = "0.6"
+version = "0.7.0"
[dependencies.zerovec]
-version = "0.9"
+version = "0.9.2"
features = ["yoke"]
-[dev-dependencies.icu_provider]
-version = "1.0.0"
-features = [
- "macros",
- "deserialize_json",
-]
+[dev-dependencies]
[features]
datagen = [
"std",
"serde",
- "databake",
+ "dep:databake",
"icu_provider/datagen",
"icu_locid/databake",
"zerovec/databake",
@@ -85,4 +80,7 @@ serde = [
"icu_locid/serde",
"icu_provider/serde",
]
-std = ["icu_locid/std"]
+std = [
+ "icu_locid/std",
+ "icu_provider/std",
+]
diff --git a/vendor/icu_provider_adapters/src/empty.rs b/vendor/icu_provider_adapters/src/empty.rs
index 31a6c42aa..85c6b9bc1 100644
--- a/vendor/icu_provider_adapters/src/empty.rs
+++ b/vendor/icu_provider_adapters/src/empty.rs
@@ -86,3 +86,26 @@ where
Err(self.error_kind.with_req(M::KEY, base_req))
}
}
+
+#[cfg(feature = "datagen")]
+impl<M> icu_provider::datagen::IterableDataProvider<M> for EmptyDataProvider
+where
+ M: KeyedDataMarker,
+{
+ fn supported_locales(&self) -> Result<alloc::vec::Vec<DataLocale>, DataError> {
+ Ok(vec![])
+ }
+}
+
+#[cfg(feature = "datagen")]
+impl<M> icu_provider::datagen::IterableDynamicDataProvider<M> for EmptyDataProvider
+where
+ M: DataMarker,
+{
+ fn supported_locales_for_key(
+ &self,
+ _: DataKey,
+ ) -> Result<alloc::vec::Vec<DataLocale>, DataError> {
+ Ok(vec![])
+ }
+}
diff --git a/vendor/icu_provider_adapters/src/fallback/adapter.rs b/vendor/icu_provider_adapters/src/fallback/adapter.rs
index 4d1f79255..f7cc10fe6 100644
--- a/vendor/icu_provider_adapters/src/fallback/adapter.rs
+++ b/vendor/icu_provider_adapters/src/fallback/adapter.rs
@@ -35,14 +35,15 @@ use crate::helpers::result_is_err_missing_data_options;
/// DataProvider::<HelloWorldV1Marker>::load(&provider, req).expect("successful with vertical fallback");
///
/// assert_eq!(
-/// "ja",
-/// response.metadata.locale.unwrap().to_string()
+/// response.metadata.locale.unwrap(),
+/// locale!("ja").into(),
/// );
/// assert_eq!(
+/// response.payload.unwrap().get().message,
/// "こんにちは世界",
-/// response.payload.unwrap().get().message
/// );
/// ```
+#[derive(Clone)]
pub struct LocaleFallbackProvider<P> {
inner: P,
fallbacker: LocaleFallbacker,
@@ -161,6 +162,11 @@ impl<P> LocaleFallbackProvider<P> {
&self.inner
}
+ /// Returns a mutable reference to the inner provider.
+ pub fn inner_mut(&mut self) -> &mut P {
+ &mut self.inner
+ }
+
/// Returns ownership of the inner provider to the caller.
pub fn into_inner(self) -> P {
self.inner
diff --git a/vendor/icu_provider_adapters/src/fallback/algorithms.rs b/vendor/icu_provider_adapters/src/fallback/algorithms.rs
index 9af52ef6d..0edc59304 100644
--- a/vendor/icu_provider_adapters/src/fallback/algorithms.rs
+++ b/vendor/icu_provider_adapters/src/fallback/algorithms.rs
@@ -207,6 +207,7 @@ mod tests {
use super::*;
use icu_locid::Locale;
use std::str::FromStr;
+ use writeable::Writeable;
struct TestCase {
input: &'static str,
@@ -355,6 +356,25 @@ mod tests {
expected_region_chain: &["hi-Latn-IN", "und-IN"],
},
TestCase {
+ input: "zh-CN",
+ requires_data: true,
+ extension_key: None,
+ fallback_supplement: None,
+ // Note: "zh-Hans" is not reachable because it is the default script for "zh".
+ // The fallback algorithm does not visit the language-script bundle when the
+ // script is the default for the language
+ expected_language_chain: &["zh-CN", "zh"],
+ expected_region_chain: &["zh-CN", "und-CN"],
+ },
+ TestCase {
+ input: "zh-TW",
+ requires_data: true,
+ extension_key: None,
+ fallback_supplement: None,
+ expected_language_chain: &["zh-TW", "zh-Hant-TW", "zh-Hant"],
+ expected_region_chain: &["zh-TW", "und-TW"],
+ },
+ TestCase {
input: "yue-HK",
requires_data: true,
extension_key: None,
@@ -396,10 +416,10 @@ mod tests {
};
let locale = DataLocale::from(Locale::from_str(cas.input).unwrap());
let mut it = key_fallbacker.fallback_for(locale);
- for expected in expected_chain {
+ for &expected in expected_chain {
assert_eq!(
expected,
- &it.get().to_string(),
+ &*it.get().write_to_string(),
"{:?} ({:?})",
cas.input,
priority
@@ -408,7 +428,7 @@ mod tests {
}
assert_eq!(
"und",
- it.get().to_string(),
+ &*it.get().write_to_string(),
"{:?} ({:?})",
cas.input,
priority
diff --git a/vendor/icu_provider_adapters/src/fallback/mod.rs b/vendor/icu_provider_adapters/src/fallback/mod.rs
index 6ec636a09..81adad41c 100644
--- a/vendor/icu_provider_adapters/src/fallback/mod.rs
+++ b/vendor/icu_provider_adapters/src/fallback/mod.rs
@@ -15,6 +15,7 @@
//! Run the locale fallback algorithm:
//!
//! ```
+//! use icu_locid::locale;
//! use icu_provider_adapters::fallback::LocaleFallbacker;
//! use icu_provider::prelude::*;
//!
@@ -26,20 +27,20 @@
//! let key_fallbacker = fallbacker.for_config(Default::default());
//!
//! // Set up the fallback iterator.
-//! let mut fallback_iterator = key_fallbacker.fallback_for(icu_locid::locale!("hi-Latn-IN").into());
+//! let mut fallback_iterator = key_fallbacker.fallback_for(DataLocale::from(locale!("hi-Latn-IN")));
//!
//! // Run the algorithm and check the results.
-//! assert_eq!(fallback_iterator.get().to_string(), "hi-Latn-IN");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("hi-Latn-IN")));
//! fallback_iterator.step();
-//! assert_eq!(fallback_iterator.get().to_string(), "hi-Latn");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("hi-Latn")));
//! fallback_iterator.step();
-//! assert_eq!(fallback_iterator.get().to_string(), "en-IN");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("en-IN")));
//! fallback_iterator.step();
-//! assert_eq!(fallback_iterator.get().to_string(), "en-001");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("en-001")));
//! fallback_iterator.step();
-//! assert_eq!(fallback_iterator.get().to_string(), "en");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("en")));
//! fallback_iterator.step();
-//! assert_eq!(fallback_iterator.get().to_string(), "und");
+//! assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
//! ```
use icu_locid::extensions::unicode::{Key, Value};
@@ -67,6 +68,7 @@ pub struct LocaleFallbackConfig {
/// Retain the language and script subtags until the final step:
///
/// ```
+ /// use icu_locid::locale;
/// use icu_provider::prelude::*;
/// use icu_provider::FallbackPriority;
/// use icu_provider_adapters::fallback::LocaleFallbackConfig;
@@ -80,21 +82,25 @@ pub struct LocaleFallbackConfig {
/// config.priority = FallbackPriority::Language;
/// let key_fallbacker = fallbacker.for_config(config);
/// let mut fallback_iterator = key_fallbacker
- /// .fallback_for(icu_locid::locale!("ca-ES-valencia").into());
+ /// .fallback_for(DataLocale::from(locale!("ca-ES-valencia")));
///
/// // Run the algorithm and check the results.
- /// assert_eq!(fallback_iterator.get().to_string(), "ca-ES-valencia");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("ca-ES-valencia"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "ca-ES");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("ca-ES")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "ca");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("ca")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
/// ```
///
/// Retain the region subtag until the final step:
///
/// ```
+ /// use icu_locid::locale;
/// use icu_provider::prelude::*;
/// use icu_provider::FallbackPriority;
/// use icu_provider_adapters::fallback::LocaleFallbackConfig;
@@ -108,18 +114,27 @@ pub struct LocaleFallbackConfig {
/// config.priority = FallbackPriority::Region;
/// let key_fallbacker = fallbacker.for_config(config);
/// let mut fallback_iterator = key_fallbacker
- /// .fallback_for(icu_locid::locale!("ca-ES-valencia").into());
+ /// .fallback_for(DataLocale::from(locale!("ca-ES-valencia")));
///
/// // Run the algorithm and check the results.
- /// assert_eq!(fallback_iterator.get().to_string(), "ca-ES-valencia");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("ca-ES-valencia"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "ca-ES");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("ca-ES")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und-ES-valencia");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("und-ES-valencia"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und-ES");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("und-ES"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
/// ```
pub priority: FallbackPriority,
/// An extension keyword to retain during locale fallback.
@@ -127,6 +142,7 @@ pub struct LocaleFallbackConfig {
/// # Examples
///
/// ```
+ /// use icu_locid::locale;
/// use icu_provider::prelude::*;
/// use icu_provider_adapters::fallback::LocaleFallbackConfig;
/// use icu_provider_adapters::fallback::LocaleFallbacker;
@@ -139,16 +155,19 @@ pub struct LocaleFallbackConfig {
/// config.extension_key = Some(icu_locid::extensions_unicode_key!("nu"));
/// let key_fallbacker = fallbacker.for_config(config);
/// let mut fallback_iterator = key_fallbacker
- /// .fallback_for(icu_locid::locale!("ar-EG-u-nu-latn").into());
+ /// .fallback_for(DataLocale::from(locale!("ar-EG-u-nu-latn")));
///
/// // Run the algorithm and check the results.
- /// assert_eq!(fallback_iterator.get().to_string(), "ar-EG-u-nu-latn");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("ar-EG-u-nu-latn"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "ar-EG");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("ar-EG")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "ar");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("ar")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
/// ```
pub extension_key: Option<Key>,
/// Fallback supplement data key to customize fallback rules.
@@ -162,6 +181,7 @@ pub struct LocaleFallbackConfig {
/// # Examples
///
/// ```
+ /// use icu_locid::locale;
/// use icu_provider::prelude::*;
/// use icu_provider::FallbackPriority;
/// use icu_provider::FallbackSupplement;
@@ -178,17 +198,23 @@ pub struct LocaleFallbackConfig {
/// config.fallback_supplement = Some(FallbackSupplement::Collation);
/// let key_fallbacker = fallbacker.for_config(config);
/// let mut fallback_iterator =
- /// key_fallbacker.fallback_for(icu_locid::locale!("yue-HK").into());
+ /// key_fallbacker.fallback_for(DataLocale::from(locale!("yue-HK")));
///
/// // Run the algorithm and check the results.
/// // TODO(#1964): add "zh" as a target.
- /// assert_eq!(fallback_iterator.get().to_string(), "yue-HK");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("yue-HK"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "yue");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("yue")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "zh-Hant");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("zh-Hant"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
/// ```
pub fallback_supplement: Option<FallbackSupplement>,
}
@@ -303,6 +329,7 @@ impl LocaleFallbacker {
/// # Examples
///
/// ```
+ /// use icu_locid::locale;
/// use icu_provider::prelude::*;
/// use icu_provider_adapters::fallback::LocaleFallbacker;
/// use std::borrow::Cow;
@@ -323,14 +350,17 @@ impl LocaleFallbacker {
/// .expect("data");
/// let key_fallbacker = fallbacker.for_key(FooV1Marker::KEY);
/// let mut fallback_iterator =
- /// key_fallbacker.fallback_for(icu_locid::locale!("en-GB").into());
+ /// key_fallbacker.fallback_for(DataLocale::from(locale!("en-GB")));
///
/// // Run the algorithm and check the results.
- /// assert_eq!(fallback_iterator.get().to_string(), "en-GB");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("en-GB")));
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und-GB");
+ /// assert_eq!(
+ /// fallback_iterator.get(),
+ /// &DataLocale::from(locale!("und-GB"))
+ /// );
/// fallback_iterator.step();
- /// assert_eq!(fallback_iterator.get().to_string(), "und");
+ /// assert_eq!(fallback_iterator.get(), &DataLocale::from(locale!("und")));
/// ```
///
/// [`DataRequestMetadata`]: icu_provider::DataRequestMetadata
diff --git a/vendor/icu_provider_adapters/src/fork/by_error.rs b/vendor/icu_provider_adapters/src/fork/by_error.rs
index 5069229fa..b3badb147 100644
--- a/vendor/icu_provider_adapters/src/fork/by_error.rs
+++ b/vendor/icu_provider_adapters/src/fork/by_error.rs
@@ -32,6 +32,11 @@ impl<P0, P1, F> ForkByErrorProvider<P0, P1, F> {
(&self.0, &self.1)
}
+ /// Returns mutable references to the inner providers.
+ pub fn inner_mut(&mut self) -> (&mut P0, &mut P1) {
+ (&mut self.0, &mut self.1)
+ }
+
/// Returns ownership of the inner providers to the caller.
pub fn into_inner(self) -> (P0, P1) {
(self.0, self.1)
@@ -142,10 +147,20 @@ impl<P, F> MultiForkByErrorProvider<P, F> {
&self.providers
}
+ /// Exposes a mutable vector of providers to a closure so it can be mutated.
+ pub fn with_inner_mut(&mut self, f: impl FnOnce(&mut Vec<P>)) {
+ f(&mut self.providers)
+ }
+
/// Returns ownership of the inner providers to the caller.
pub fn into_inner(self) -> Vec<P> {
self.providers
}
+
+ /// Adds an additional child provider.
+ pub fn push(&mut self, provider: P) {
+ self.providers.push(provider);
+ }
}
impl<P, F> BufferProvider for MultiForkByErrorProvider<P, F>
@@ -158,15 +173,16 @@ where
key: DataKey,
req: DataRequest,
) -> Result<DataResponse<BufferMarker>, DataError> {
+ let mut last_error = DataErrorKind::MissingDataKey.with_key(key);
for provider in self.providers.iter() {
let result = provider.load_buffer(key, req);
match result {
Ok(ok) => return Ok(ok),
Err(err) if !self.predicate.test(key, Some(req), err) => return Err(err),
- _ => (),
+ Err(err) => last_error = err,
};
}
- Err(DataErrorKind::MissingDataKey.with_key(key))
+ Err(last_error)
}
}
@@ -176,15 +192,16 @@ where
F: ForkByErrorPredicate,
{
fn load_any(&self, key: DataKey, req: DataRequest) -> Result<AnyResponse, DataError> {
+ let mut last_error = DataErrorKind::MissingDataKey.with_key(key);
for provider in self.providers.iter() {
let result = provider.load_any(key, req);
match result {
Ok(ok) => return Ok(ok),
Err(err) if !self.predicate.test(key, Some(req), err) => return Err(err),
- _ => (),
+ Err(err) => last_error = err,
};
}
- Err(DataErrorKind::MissingDataKey.with_key(key))
+ Err(last_error)
}
}
@@ -195,15 +212,16 @@ where
F: ForkByErrorPredicate,
{
fn load_data(&self, key: DataKey, req: DataRequest) -> Result<DataResponse<M>, DataError> {
+ let mut last_error = DataErrorKind::MissingDataKey.with_key(key);
for provider in self.providers.iter() {
let result = provider.load_data(key, req);
match result {
Ok(ok) => return Ok(ok),
Err(err) if !self.predicate.test(key, Some(req), err) => return Err(err),
- _ => (),
+ Err(err) => last_error = err,
};
}
- Err(DataErrorKind::MissingDataKey.with_key(key))
+ Err(last_error)
}
}
@@ -215,15 +233,16 @@ where
F: ForkByErrorPredicate,
{
fn supported_locales_for_key(&self, key: DataKey) -> Result<Vec<DataLocale>, DataError> {
+ let mut last_error = DataErrorKind::MissingDataKey.with_key(key);
for provider in self.providers.iter() {
let result = provider.supported_locales_for_key(key);
match result {
Ok(ok) => return Ok(ok),
Err(err) if !self.predicate.test(key, None, err) => return Err(err),
- _ => (),
+ Err(err) => last_error = err,
};
}
- Err(DataErrorKind::MissingDataKey.with_key(key))
+ Err(last_error)
}
}
@@ -240,6 +259,7 @@ where
key: DataKey,
mut from: DataPayload<MFrom>,
) -> Result<DataPayload<MTo>, (DataPayload<MFrom>, DataError)> {
+ let mut last_error = DataErrorKind::MissingDataKey.with_key(key);
for provider in self.providers.iter() {
let result = provider.convert(key, from);
match result {
@@ -250,10 +270,11 @@ where
return Err((returned, err));
}
from = returned;
+ last_error = err;
}
};
}
- Err((from, DataErrorKind::MissingDataKey.with_key(key)))
+ Err((from, last_error))
}
}
diff --git a/vendor/icu_provider_macros/.cargo-checksum.json b/vendor/icu_provider_macros/.cargo-checksum.json
index 21caa79bb..67fb422f7 100644
--- a/vendor/icu_provider_macros/.cargo-checksum.json
+++ b/vendor/icu_provider_macros/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"5c2656c9d6a8256494d0d58648ba09110cb602fa81b59580e758695704299664","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"f9f95075851c9c3b7d0559c25794ebcad5dee957c7fb27c343418a3324f15b8a","src/lib.rs":"e3f9092950ee1db12dbbd5b3fea39aead909896ee317f5b38a6442c209e37d03","src/tests.rs":"729a551b69f368100b142a3260573307b653931228cdc803eeffbb81910134f7"},"package":"38cf6f5b65cf81f0b4298da647101acbfe6ae0e25263f92bd7a22597e9d6d606"} \ No newline at end of file
+{"files":{"Cargo.toml":"8f35af4529f0a10bf10dc6ba2ad5fffcfb78d29c78d1e3d4e1d737839480f7a1","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"f9f95075851c9c3b7d0559c25794ebcad5dee957c7fb27c343418a3324f15b8a","src/lib.rs":"c6675b847fb5a2fd0a686460598c4e76b55eff51df792da0386be9b02156db5f","src/tests.rs":"c0f93af328a3e15e93ca5914a14adacf73274eff821f2f849c5b96ce884547b1"},"package":"9ddb07844c2ffc4c28840e799e9e54ff054393cf090740decf25624e9d94b93a"} \ No newline at end of file
diff --git a/vendor/icu_provider_macros/Cargo.toml b/vendor/icu_provider_macros/Cargo.toml
index 1f2350f36..289aad854 100644
--- a/vendor/icu_provider_macros/Cargo.toml
+++ b/vendor/icu_provider_macros/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "icu_provider_macros"
-version = "1.0.0"
+version = "1.1.0"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
diff --git a/vendor/icu_provider_macros/src/lib.rs b/vendor/icu_provider_macros/src/lib.rs
index e43cebfce..8d39f0f87 100644
--- a/vendor/icu_provider_macros/src/lib.rs
+++ b/vendor/icu_provider_macros/src/lib.rs
@@ -129,7 +129,7 @@ fn data_struct_impl(attr: AttributeArgs, input: DeriveInput) -> TokenStream2 {
.find(|a| a.path.is_ident("databake"))
.map(|a| {
quote! {
- #[derive(Default, databake::Bake)]
+ #[derive(databake::Bake)]
#a
}
})
diff --git a/vendor/icu_provider_macros/src/tests.rs b/vendor/icu_provider_macros/src/tests.rs
index 51411852b..c7ca63530 100644
--- a/vendor/icu_provider_macros/src/tests.rs
+++ b/vendor/icu_provider_macros/src/tests.rs
@@ -144,7 +144,7 @@ fn test_databake() {
),
quote!(
#[doc = "Marker type for [`FooV1`]: \"demo/bar@1\"\n\n- Fallback priority: language (default)\n- Extension keyword: none (default)"]
- #[derive(Default, databake::Bake)]
+ #[derive(databake::Bake)]
#[databake(path = test::path)]
pub struct BarV1Marker;
impl icu_provider::DataMarker for BarV1Marker {
diff --git a/vendor/itoa/.cargo-checksum.json b/vendor/itoa/.cargo-checksum.json
index 5c464a6c5..5ab0879bb 100644
--- a/vendor/itoa/.cargo-checksum.json
+++ b/vendor/itoa/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"895c75445c305a4d56486a2ca7312a85a486469492286e277f41601be418bcd4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"4bc11375d5c1c2c7cf4988b94d44c0f1fe8cdf2d68755fef127cd5ba0d70b939","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"9438cd2cfcf95e4a6719816580e209c3873beb75074ca756118f8723d336141b","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"f7404fc5f7cd1bdaf74a3b64a70d5b30586241ddc1ce2c82bd1b564999fcce0e"},"package":"4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"} \ No newline at end of file
+{"files":{"Cargo.toml":"bb96760f2d45e86313dbec93a3210e5073c4ee74116097bb5ca45ba9c5b049a6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"48573443063fa4e0786c3b46f42b6efd1f171c6b73408a64afc1b34de89f31fe","benches/bench.rs":"636f3093bd461210ad3063289d455f90669c4a1be3273bcd30898de39f02c641","src/lib.rs":"da13f0d5dcba3bb2971f67b6856ea6f2e3cbdc31d47f7042d7f131b08bb7de85","src/udiv128.rs":"d28c1872c37ee2185931babcb20a221b8706a5aa8abc4963419763888023ff17","tests/test.rs":"f7404fc5f7cd1bdaf74a3b64a70d5b30586241ddc1ce2c82bd1b564999fcce0e"},"package":"fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"} \ No newline at end of file
diff --git a/vendor/itoa/Cargo.toml b/vendor/itoa/Cargo.toml
index bab03ecf7..86c10b644 100644
--- a/vendor/itoa/Cargo.toml
+++ b/vendor/itoa/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.36"
name = "itoa"
-version = "1.0.4"
+version = "1.0.5"
authors = ["David Tolnay <dtolnay@gmail.com>"]
exclude = [
"performance.png",
diff --git a/vendor/itoa/README.md b/vendor/itoa/README.md
index 914d1ff30..5728fb726 100644
--- a/vendor/itoa/README.md
+++ b/vendor/itoa/README.md
@@ -4,7 +4,7 @@ itoa
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/itoa-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/itoa)
[<img alt="crates.io" src="https://img.shields.io/crates/v/itoa.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/itoa)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-itoa-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/itoa)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/itoa/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/itoa/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/itoa/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/itoa/actions?query=branch%3Amaster)
This crate provides a fast conversion of integer primitives to decimal strings.
The implementation comes straight from [libcore] but avoids the performance
diff --git a/vendor/itoa/src/lib.rs b/vendor/itoa/src/lib.rs
index 7c3616e51..168407f06 100644
--- a/vendor/itoa/src/lib.rs
+++ b/vendor/itoa/src/lib.rs
@@ -30,7 +30,7 @@
//!
//! ![performance](https://raw.githubusercontent.com/dtolnay/itoa/master/performance.png)
-#![doc(html_root_url = "https://docs.rs/itoa/1.0.4")]
+#![doc(html_root_url = "https://docs.rs/itoa/1.0.5")]
#![no_std]
#![allow(
clippy::cast_lossless,
diff --git a/vendor/jobserver/.cargo-checksum.json b/vendor/jobserver/.cargo-checksum.json
index 09c948a8e..734fd6db9 100644
--- a/vendor/jobserver/.cargo-checksum.json
+++ b/vendor/jobserver/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9d5e9bc3aa927124431914287d2830ff03ca5a80931a3b44b3f057b92d09e850","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"07d8d79f8f6b6a94321fe8db78d26ed409de47cee49290947bd6bbfa29d05e9c","src/lib.rs":"891d9c9cf2d75eee30c02ffa8ae7fa8fb1f7675dccddb0cc79a067e1a87a9850","src/unix.rs":"5802f031e80295c5498a2ddb95b5ee6e39b7294f25b35075302fb978b8d4c409","src/wasm.rs":"bb67f97bccd0b0c1762917de342d721e319a3a204604ab1517285c59b5e2a369","src/windows.rs":"f886175abbf75ff45ea3fc09396bbcc3048e7daf732ed78149377f7b8e9148b2","tests/client-of-myself.rs":"ca09bf398f69df4bac1730999e954dbbc3faf3c6512678c136e0938e7e9cd0ab","tests/client.rs":"64547b780edce5ebcd397db1160fd86baab030c530c6976fa013bca9f07a85ff","tests/helper.rs":"c0e6c00eaf849295d8ec23e374690b6645c0f7d993e91abf7ad53ac960f71762","tests/make-as-a-client.rs":"ec09a7cdbf78d6c3b16f26de15766c4bd62d44a913ada6b86b66e067e6c484ba","tests/server.rs":"9a260f1302ae4908479df0bd34b46edb9d2b8b9b3dbc3e2b6666296d9e1b2b84"},"package":"af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa"} \ No newline at end of file
+{"files":{"Cargo.toml":"0f712c94e98313fc5833521e1dc0c42a57e5603ea54d1e05a004b95b52fb39b7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"07d8d79f8f6b6a94321fe8db78d26ed409de47cee49290947bd6bbfa29d05e9c","src/lib.rs":"be789fda33a51375fcc87e4bf9bf1256930d718f698f700bbec5e335c83e0659","src/unix.rs":"1e1efc5cd1f381fea83bdaaee474ad3530b396305a9bbdc9da08b0c36baced31","src/wasm.rs":"65d3d8ed45972b4459581505906481d32a50d2f7514cd7ff2a595fceeaa672f0","src/windows.rs":"8e0fa3ab29757d809d4fa03c8101870435ce8c4ceaebe491df3144d62fe0aaaf","tests/client-of-myself.rs":"ca09bf398f69df4bac1730999e954dbbc3faf3c6512678c136e0938e7e9cd0ab","tests/client.rs":"d4745cdd650c86d19bc81f6c9b35df498996deffb86ae6412ad040af96a19183","tests/helper.rs":"c0e6c00eaf849295d8ec23e374690b6645c0f7d993e91abf7ad53ac960f71762","tests/make-as-a-client.rs":"8be1f3fef1e9e65c7904dbaa04364bf0f44e9deab84a2a247a5a94b5cf0df9bc","tests/server.rs":"da15bf12e1df1883f660892b996c9e0d92485aace3f7b50ee70c4a8e6deae8da"},"package":"936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2"} \ No newline at end of file
diff --git a/vendor/jobserver/Cargo.toml b/vendor/jobserver/Cargo.toml
index d9a878266..938371008 100644
--- a/vendor/jobserver/Cargo.toml
+++ b/vendor/jobserver/Cargo.toml
@@ -3,21 +3,23 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "jobserver"
-version = "0.1.24"
+version = "0.1.26"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
-description = "An implementation of the GNU make jobserver for Rust\n"
+description = """
+An implementation of the GNU make jobserver for Rust
+"""
homepage = "https://github.com/alexcrichton/jobserver-rs"
documentation = "https://docs.rs/jobserver"
+readme = "README.md"
license = "MIT/Apache-2.0"
repository = "https://github.com/alexcrichton/jobserver-rs"
@@ -43,19 +45,21 @@ harness = false
[[test]]
name = "helper"
path = "tests/helper.rs"
+
[dev-dependencies.futures]
version = "0.1"
[dev-dependencies.num_cpus]
version = "1.0"
-[dev-dependencies.tempdir]
-version = "0.3"
+[dev-dependencies.tempfile]
+version = "3"
[dev-dependencies.tokio-core]
version = "0.1"
[dev-dependencies.tokio-process]
version = "0.2"
+
[target."cfg(unix)".dependencies.libc]
version = "0.2.50"
diff --git a/vendor/jobserver/src/lib.rs b/vendor/jobserver/src/lib.rs
index 72c02c120..cd0cdd749 100644
--- a/vendor/jobserver/src/lib.rs
+++ b/vendor/jobserver/src/lib.rs
@@ -11,7 +11,10 @@
//! The jobserver implementation can be found in [detail online][docs] but
//! basically boils down to a cross-process semaphore. On Unix this is
//! implemented with the `pipe` syscall and read/write ends of a pipe and on
-//! Windows this is implemented literally with IPC semaphores.
+//! Windows this is implemented literally with IPC semaphores. Starting from
+//! GNU `make` version 4.4, named pipe becomes the default way in communication
+//! on Unix. This crate also supports that feature in the sense of inheriting
+//! and forwarding the correct environment.
//!
//! The jobserver protocol in `make` also dictates when tokens are acquired to
//! run child work, and clients using this crate should take care to implement
@@ -208,7 +211,7 @@ impl Client {
/// with `CLOEXEC` so they're not automatically inherited by spawned
/// children.
///
- /// # Unsafety
+ /// # Safety
///
/// This function is `unsafe` to call on Unix specifically as it
/// transitively requires usage of the `from_raw_fd` function, which is
@@ -273,6 +276,19 @@ impl Client {
})
}
+ /// Returns amount of tokens in the read-side pipe.
+ ///
+ /// # Return value
+ ///
+ /// Number of bytes available to be read from the jobserver pipe
+ ///
+ /// # Errors
+ ///
+ /// Underlying errors from the ioctl will be passed up.
+ pub fn available(&self) -> io::Result<usize> {
+ self.inner.available()
+ }
+
/// Configures a child process to have access to this client's jobserver as
/// well.
///
@@ -290,13 +306,41 @@ impl Client {
///
/// On platforms other than Unix and Windows this panics.
pub fn configure(&self, cmd: &mut Command) {
+ cmd.env("CARGO_MAKEFLAGS", &self.mflags_env());
+ self.inner.configure(cmd);
+ }
+
+ /// Configures a child process to have access to this client's jobserver as
+ /// well.
+ ///
+ /// This function is required to be called to ensure that a jobserver is
+ /// properly inherited to a child process. If this function is *not* called
+ /// then this `Client` will not be accessible in the child process. In other
+ /// words, if not called, then `Client::from_env` will return `None` in the
+ /// child process (or the equivalent of `Child::from_env` that `make` uses).
+ ///
+ /// ## Platform-specific behavior
+ ///
+ /// On Unix and Windows this will clobber the `CARGO_MAKEFLAGS`,
+ /// `MAKEFLAGS` and `MFLAGS` environment variables for the child process,
+ /// and on Unix this will also allow the two file descriptors for
+ /// this client to be inherited to the child.
+ ///
+ /// On platforms other than Unix and Windows this panics.
+ pub fn configure_make(&self, cmd: &mut Command) {
+ let value = self.mflags_env();
+ cmd.env("CARGO_MAKEFLAGS", &value);
+ cmd.env("MAKEFLAGS", &value);
+ cmd.env("MFLAGS", &value);
+ self.inner.configure(cmd);
+ }
+
+ fn mflags_env(&self) -> String {
let arg = self.inner.string_arg();
// Older implementations of make use `--jobserver-fds` and newer
// implementations use `--jobserver-auth`, pass both to try to catch
// both implementations.
- let value = format!("-j --jobserver-fds={0} --jobserver-auth={0}", arg);
- cmd.env("CARGO_MAKEFLAGS", &value);
- self.inner.configure(cmd);
+ format!("-j --jobserver-fds={0} --jobserver-auth={0}", arg)
}
/// Converts this `Client` into a helper thread to deal with a blocking
diff --git a/vendor/jobserver/src/unix.rs b/vendor/jobserver/src/unix.rs
index d69ae88e3..e4b143505 100644
--- a/vendor/jobserver/src/unix.rs
+++ b/vendor/jobserver/src/unix.rs
@@ -1,8 +1,11 @@
use libc::c_int;
-use std::fs::File;
+
+use std::fs::{File, OpenOptions};
use std::io::{self, Read, Write};
use std::mem;
+use std::mem::MaybeUninit;
use std::os::unix::prelude::*;
+use std::path::{Path, PathBuf};
use std::process::Command;
use std::ptr;
use std::sync::{Arc, Once};
@@ -10,9 +13,11 @@ use std::thread::{self, Builder, JoinHandle};
use std::time::Duration;
#[derive(Debug)]
-pub struct Client {
- read: File,
- write: File,
+pub enum Client {
+ /// `--jobserver-auth=R,W`
+ Pipe { read: File, write: File },
+ /// `--jobserver-auth=fifo:PATH`
+ Fifo { file: File, path: PathBuf },
}
#[derive(Debug)]
@@ -21,13 +26,26 @@ pub struct Acquired {
}
impl Client {
- pub fn new(limit: usize) -> io::Result<Client> {
+ pub fn new(mut limit: usize) -> io::Result<Client> {
let client = unsafe { Client::mk()? };
+
// I don't think the character written here matters, but I could be
// wrong!
- for _ in 0..limit {
- (&client.write).write_all(&[b'|'])?;
+ const BUFFER: [u8; 128] = [b'|'; 128];
+
+ let mut write = client.write();
+
+ set_nonblocking(write.as_raw_fd(), true)?;
+
+ while limit > 0 {
+ let n = limit.min(BUFFER.len());
+
+ write.write_all(&BUFFER[..n])?;
+ limit -= n;
}
+
+ set_nonblocking(write.as_raw_fd(), false)?;
+
Ok(client)
}
@@ -64,6 +82,31 @@ impl Client {
}
pub unsafe fn open(s: &str) -> Option<Client> {
+ Client::from_fifo(s).or_else(|| Client::from_pipe(s))
+ }
+
+ /// `--jobserver-auth=fifo:PATH`
+ fn from_fifo(s: &str) -> Option<Client> {
+ let mut parts = s.splitn(2, ':');
+ if parts.next().unwrap() != "fifo" {
+ return None;
+ }
+ let path = match parts.next() {
+ Some(p) => Path::new(p),
+ None => return None,
+ };
+ let file = match OpenOptions::new().read(true).write(true).open(path) {
+ Ok(f) => f,
+ Err(_) => return None,
+ };
+ Some(Client::Fifo {
+ file,
+ path: path.into(),
+ })
+ }
+
+ /// `--jobserver-auth=R,W`
+ unsafe fn from_pipe(s: &str) -> Option<Client> {
let mut parts = s.splitn(2, ',');
let read = parts.next().unwrap();
let write = match parts.next() {
@@ -97,12 +140,28 @@ impl Client {
}
unsafe fn from_fds(read: c_int, write: c_int) -> Client {
- Client {
+ Client::Pipe {
read: File::from_raw_fd(read),
write: File::from_raw_fd(write),
}
}
+ /// Gets the read end of our jobserver client.
+ fn read(&self) -> &File {
+ match self {
+ Client::Pipe { read, .. } => read,
+ Client::Fifo { file, .. } => file,
+ }
+ }
+
+ /// Gets the write end of our jobserver client.
+ fn write(&self) -> &File {
+ match self {
+ Client::Pipe { write, .. } => write,
+ Client::Fifo { file, .. } => file,
+ }
+ }
+
pub fn acquire(&self) -> io::Result<Acquired> {
// Ignore interrupts and keep trying if that happens
loop {
@@ -137,11 +196,12 @@ impl Client {
// to shut us down, so we otherwise punt all errors upwards.
unsafe {
let mut fd: libc::pollfd = mem::zeroed();
- fd.fd = self.read.as_raw_fd();
+ let mut read = self.read();
+ fd.fd = read.as_raw_fd();
fd.events = libc::POLLIN;
loop {
let mut buf = [0];
- match (&self.read).read(&mut buf) {
+ match read.read(&mut buf) {
Ok(1) => return Ok(Some(Acquired { byte: buf[0] })),
Ok(_) => {
return Err(io::Error::new(
@@ -179,7 +239,7 @@ impl Client {
// always quickly release a token). If that turns out to not be the
// case we'll get an error anyway!
let byte = data.map(|d| d.byte).unwrap_or(b'+');
- match (&self.write).write(&[byte])? {
+ match self.write().write(&[byte])? {
1 => Ok(()),
_ => Err(io::Error::new(
io::ErrorKind::Other,
@@ -189,16 +249,31 @@ impl Client {
}
pub fn string_arg(&self) -> String {
- format!("{},{}", self.read.as_raw_fd(), self.write.as_raw_fd())
+ match self {
+ Client::Pipe { read, write } => format!("{},{}", read.as_raw_fd(), write.as_raw_fd()),
+ Client::Fifo { path, .. } => format!("fifo:{}", path.to_str().unwrap()),
+ }
+ }
+
+ pub fn available(&self) -> io::Result<usize> {
+ let mut len = MaybeUninit::<c_int>::uninit();
+ cvt(unsafe { libc::ioctl(self.read().as_raw_fd(), libc::FIONREAD, len.as_mut_ptr()) })?;
+ Ok(unsafe { len.assume_init() } as usize)
}
pub fn configure(&self, cmd: &mut Command) {
+ match self {
+ // We `File::open`ed it when inheriting from environment,
+ // so no need to set cloexec for fifo.
+ Client::Fifo { .. } => return,
+ Client::Pipe { .. } => {}
+ };
// Here we basically just want to say that in the child process
// we'll configure the read/write file descriptors to *not* be
// cloexec, so they're inherited across the exec and specified as
// integers through `string_arg` above.
- let read = self.read.as_raw_fd();
- let write = self.write.as_raw_fd();
+ let read = self.read().as_raw_fd();
+ let write = self.write().as_raw_fd();
unsafe {
cmd.pre_exec(move || {
set_cloexec(read, false)?;
@@ -224,7 +299,14 @@ pub(crate) fn spawn_helper(
let mut err = None;
USR1_INIT.call_once(|| unsafe {
let mut new: libc::sigaction = mem::zeroed();
- new.sa_sigaction = sigusr1_handler as usize;
+ #[cfg(target_os = "aix")]
+ {
+ new.sa_union.__su_sigaction = sigusr1_handler;
+ }
+ #[cfg(not(target_os = "aix"))]
+ {
+ new.sa_sigaction = sigusr1_handler as usize;
+ }
new.sa_flags = libc::SA_SIGINFO as _;
if libc::sigaction(libc::SIGUSR1, &new, ptr::null_mut()) != 0 {
err = Some(io::Error::last_os_error());
@@ -322,6 +404,16 @@ fn set_cloexec(fd: c_int, set: bool) -> io::Result<()> {
}
}
+fn set_nonblocking(fd: c_int, set: bool) -> io::Result<()> {
+ let status_flag = if set { libc::O_NONBLOCK } else { 0 };
+
+ unsafe {
+ cvt(libc::fcntl(fd, libc::F_SETFL, status_flag))?;
+ }
+
+ Ok(())
+}
+
fn cvt(t: c_int) -> io::Result<c_int> {
if t == -1 {
Err(io::Error::last_os_error())
diff --git a/vendor/jobserver/src/wasm.rs b/vendor/jobserver/src/wasm.rs
index b88a9d952..3793bd67c 100644
--- a/vendor/jobserver/src/wasm.rs
+++ b/vendor/jobserver/src/wasm.rs
@@ -59,6 +59,11 @@ impl Client {
);
}
+ pub fn available(&self) -> io::Result<usize> {
+ let lock = self.inner.count.lock().unwrap_or_else(|e| e.into_inner());
+ Ok(*lock)
+ }
+
pub fn configure(&self, _cmd: &mut Command) {
unreachable!();
}
diff --git a/vendor/jobserver/src/windows.rs b/vendor/jobserver/src/windows.rs
index d795c1cee..6791efea4 100644
--- a/vendor/jobserver/src/windows.rs
+++ b/vendor/jobserver/src/windows.rs
@@ -170,6 +170,26 @@ impl Client {
self.name.clone()
}
+ pub fn available(&self) -> io::Result<usize> {
+ // Can't read value of a semaphore on Windows, so
+ // try to acquire without sleeping, since we can find out the
+ // old value on release. If acquisiton fails, then available is 0.
+ unsafe {
+ let r = WaitForSingleObject(self.sem.0, 0);
+ if r != WAIT_OBJECT_0 {
+ Ok(0)
+ } else {
+ let mut prev: LONG = 0;
+ let r = ReleaseSemaphore(self.sem.0, 1, &mut prev);
+ if r != 0 {
+ Ok(prev as usize + 1)
+ } else {
+ Err(io::Error::last_os_error())
+ }
+ }
+ }
+ }
+
pub fn configure(&self, _cmd: &mut Command) {
// nothing to do here, we gave the name of our semaphore to the
// child above
diff --git a/vendor/jobserver/tests/client.rs b/vendor/jobserver/tests/client.rs
index 7f319c09c..2516b8ccf 100644
--- a/vendor/jobserver/tests/client.rs
+++ b/vendor/jobserver/tests/client.rs
@@ -10,7 +10,6 @@ use std::thread;
use futures::future::{self, Future};
use futures::stream::{self, Stream};
use jobserver::Client;
-use tempdir::TempDir;
use tokio_core::reactor::Core;
use tokio_process::CommandExt;
@@ -128,7 +127,7 @@ fn main() {
None => true,
})
.map(|test| {
- let td = t!(TempDir::new("foo"));
+ let td = t!(tempfile::tempdir());
let makefile = format!(
"\
all: export TEST_TO_RUN={}
diff --git a/vendor/jobserver/tests/make-as-a-client.rs b/vendor/jobserver/tests/make-as-a-client.rs
index e530211b0..4faac5b88 100644
--- a/vendor/jobserver/tests/make-as-a-client.rs
+++ b/vendor/jobserver/tests/make-as-a-client.rs
@@ -5,7 +5,6 @@ use std::net::{TcpListener, TcpStream};
use std::process::Command;
use jobserver::Client;
-use tempdir::TempDir;
macro_rules! t {
($e:expr) => {
@@ -37,7 +36,7 @@ fn main() {
}
let c = t!(Client::new(1));
- let td = TempDir::new("foo").unwrap();
+ let td = tempfile::tempdir().unwrap();
let prog = env::var("MAKE").unwrap_or_else(|_| "make".to_string());
diff --git a/vendor/jobserver/tests/server.rs b/vendor/jobserver/tests/server.rs
index fcdd12ca2..70ea218fc 100644
--- a/vendor/jobserver/tests/server.rs
+++ b/vendor/jobserver/tests/server.rs
@@ -8,7 +8,6 @@ use std::sync::Arc;
use std::thread;
use jobserver::Client;
-use tempdir::TempDir;
macro_rules! t {
($e:expr) => {
@@ -35,6 +34,30 @@ fn server_multiple() {
}
#[test]
+fn server_available() {
+ let c = t!(Client::new(10));
+ assert_eq!(c.available().unwrap(), 10);
+ let a = c.acquire().unwrap();
+ assert_eq!(c.available().unwrap(), 9);
+ drop(a);
+ assert_eq!(c.available().unwrap(), 10);
+}
+
+#[test]
+fn server_none_available() {
+ let c = t!(Client::new(2));
+ assert_eq!(c.available().unwrap(), 2);
+ let a = c.acquire().unwrap();
+ assert_eq!(c.available().unwrap(), 1);
+ let b = c.acquire().unwrap();
+ assert_eq!(c.available().unwrap(), 0);
+ drop(a);
+ assert_eq!(c.available().unwrap(), 1);
+ drop(b);
+ assert_eq!(c.available().unwrap(), 2);
+}
+
+#[test]
fn server_blocks() {
let c = t!(Client::new(1));
let a = c.acquire().unwrap();
@@ -56,7 +79,7 @@ fn server_blocks() {
#[test]
fn make_as_a_single_thread_client() {
let c = t!(Client::new(1));
- let td = TempDir::new("foo").unwrap();
+ let td = tempfile::tempdir().unwrap();
let prog = env::var("MAKE").unwrap_or_else(|_| "make".to_string());
let mut cmd = Command::new(prog);
@@ -110,7 +133,7 @@ foo
#[test]
fn make_as_a_multi_thread_client() {
let c = t!(Client::new(1));
- let td = TempDir::new("foo").unwrap();
+ let td = tempfile::tempdir().unwrap();
let prog = env::var("MAKE").unwrap_or_else(|_| "make".to_string());
let mut cmd = Command::new(prog);
diff --git a/vendor/libc/.cargo-checksum.json b/vendor/libc/.cargo-checksum.json
index 979f0dec2..e90619c72 100644
--- a/vendor/libc/.cargo-checksum.json
+++ b/vendor/libc/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CONTRIBUTING.md":"bdc90b52cf803faac96e594069a86dd8ea150d5ba7fb3e6cadfc08dac4c7b0ce","Cargo.toml":"89e8938bf82dadf7854b0831c879ef1c66708a3917b0c48079aadbc2855b18b8","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"a8d47ff51ca256f56a8932dba07660672dbfe3004257ca8de708aac1415937a1","README.md":"776affa26b66843a2b4f1a1c8f88d92f6461b74568911450fea717e9db6f877b","build.rs":"cecfa3f926ab4a9c87cd59cc0d687f98eceb6035b0dde185152e5206d090f8d0","rustfmt.toml":"eaa2ea84fc1ba0359b77680804903e07bb38d257ab11986b95b158e460f787b2","src/fixed_width_ints.rs":"7f986e5f5e68d25ef04d386fd2f640e8be8f15427a8d4a458ea01d26b8dca0ca","src/fuchsia/aarch64.rs":"378776a9e40766154a54c94c2a7b4675b5c302a38e6e42da99e67bfbaee60e56","src/fuchsia/align.rs":"ae1cf8f011a99737eabeb14ffff768e60f13b13363d7646744dbb0f443dab3d6","src/fuchsia/mod.rs":"e54e72e15cf4ab2df88da9ac3b69fa12e918b0bcfeadc81843b2a838b43d9547","src/fuchsia/no_align.rs":"303f3f1b255e0088b5715094353cf00476131d8e94e6aebb3f469557771c8b8a","src/fuchsia/x86_64.rs":"93a3632b5cf67d2a6bcb7dc0a558605252d5fe689e0f38d8aa2ec5852255ac87","src/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/hermit/mod.rs":"d3bfce41e4463d4be8020a2d063c9bfa8b665f45f1cc6cbf3163f5d01e7cb21f","src/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/lib.rs":"ce753ef318b300bbd441feabdd77d00322dfb6ce9eee8c78a38afe02b57aa4c0","src/macros.rs":"b457eb028b8e8ab3c24bb7292b874ad4e491edbb83594f6a3da024df5348c088","src/psp.rs":"dd31aabd46171d474ec5828372e28588935120e7355c90c105360d8fa9264c1c","src/sgx.rs":"16a95cdefc81c5ee00d8353a60db363c4cc3e0f75abcd5d0144723f2a306ed1b","src/solid/aarch64.rs":"a726e47f324adf73a4a0b67a2c183408d0cad105ae66acf36db37a42ab7f8707","src/solid/arm.rs":"e39a4f74ebbef3b97b8c95758ad741123d84ed3eb48d9cf4f1f4872097fc27fe","src/solid/mod.rs":"5f4151dca5132e4b4e4c23ab9737e12856dddbdc0ca3f7dbc004328ef3c8acde","src/switch.rs":"9da3dd39b3de45a7928789926e8572d00e1e11a39e6f7289a1349aadce90edba","src/unix/align.rs":"2cdc7c826ef7ae61f5171c5ae8c445a743d86f1a7f2d9d7e4ceeec56d6874f65","src/unix/bsd/apple/b32/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b32/mod.rs":"2546ad3eb6aecb95f916648bc63264117c92b4b4859532b34cb011e4c75a5a72","src/unix/bsd/apple/b64/aarch64/align.rs":"e8eb38d064b5fefec6f37d42873820a0483e7c758ed336cc59a7155455ca89c9","src/unix/bsd/apple/b64/aarch64/mod.rs":"44c217a4f263afe7a97435de9323d20a96c37836f899ca0925306d4b7e073c27","src/unix/bsd/apple/b64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/mod.rs":"f5e278a1af7fb358891d1c9be4eb7e815aaca0c5cb738d0c3604ba2208a856f7","src/unix/bsd/apple/b64/x86_64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/x86_64/mod.rs":"8c87c5855038aae5d433c8f5eb3b29b0a175879a0245342b3bfd83bdf4cfd936","src/unix/bsd/apple/mod.rs":"1cc76b056d5925aedae04ead411057dc5c6c9bd3948609fdb30164ac1fb6565e","src/unix/bsd/freebsdlike/dragonfly/errno.rs":"8295b8bb0dfd38d2cdb4d9192cdeeb534cc6c3b208170e64615fa3e0edb3e578","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"8986a8c79bcadfbdb58ec1a72e1aff8ce9b341c9392d7b0b7449bddf6db59058","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"6c8e216385f53a4bf5f171749b57602fc34a4e4b160a44ca31c058cb0c8a2126","src/unix/bsd/freebsdlike/freebsd/arm.rs":"59d6a670eea562fb87686e243e0a84603d29a2028a3d4b3f99ccc01bd04d2f47","src/unix/bsd/freebsdlike/freebsd/freebsd11/b64.rs":"9808d152c1196aa647f1b0f0cf84dac8c930da7d7f897a44975545e3d9d17681","src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs":"badda6f0f7666f38345b1f4ca78817a47bc92bbdcdc3a1377f376f4e08c316e5","src/unix/bsd/freebsdlike/freebsd/freebsd12/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs":"327700c5668be863f1fea205c82b0402e2936b883df63dda677716c30a50a284","src/unix/bsd/freebsdlike/freebsd/freebsd12/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd13/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs":"7f689e9a944c919834ec852b30c224f98e14b4b4087571adefeea5abad5d6374","src/unix/bsd/freebsdlike/freebsd/freebsd13/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd14/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd14/mod.rs":"9b6fa1c3f5217f9482e0bed3bd0ea2905cdf56d628307cea48e49d8e00a6b09d","src/unix/bsd/freebsdlike/freebsd/freebsd14/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/mod.rs":"26bda8cf730ab4868523a6ed7c513f51885cad30cdd02da905ff35b597576415","src/unix/bsd/freebsdlike/freebsd/powerpc.rs":"9ca3f82f88974e6db5569f2d76a5a3749b248a31747a6c0da5820492bdfeca42","src/unix/bsd/freebsdlike/freebsd/powerpc64.rs":"2dae3ecc87eac3b11657aa98915def55fc4b5c0de11fe26aae23329a54628a9a","src/unix/bsd/freebsdlike/freebsd/riscv64.rs":"fa4bed4c58cad24ba3395941c7fa6b11e089551a04714f9561078e400f5b2b62","src/unix/bsd/freebsdlike/freebsd/x86.rs":"c5005e3249eb7c93cfbac72a9e9272320d80ce7983da990ceb05a447f59a02c5","src/unix/bsd/freebsdlike/freebsd/x86_64/align.rs":"0e1f69a88fca1c32874b1daf5db3d446fefbe518dca497f096cc9168c39dde70","src/unix/bsd/freebsdlike/freebsd/x86_64/mod.rs":"51e4dd0c8ae247bb652feda5adad9333ea3bb30c750c3a3935e0b0e47d7803eb","src/unix/bsd/freebsdlike/mod.rs":"bd80ce2ff628ed5eaa856ebe8b6a8fb0e89d1c9728040b9ee6b967beb4bcf3a7","src/unix/bsd/mod.rs":"6f7e4f0affa04e5c26375875389a9891f69930bb56842cf20526aad3a0f64299","src/unix/bsd/netbsdlike/mod.rs":"34f60d73631f3c59936c87db1f62ddb8f693901c3cb199f7f370882e84d509fa","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"65dcb58d11e8d8028401a9d07ca3eb4cb4f053e04249cc877353449d84ccc4cb","src/unix/bsd/netbsdlike/netbsd/arm.rs":"58cdbb70b0d6f536551f0f3bb3725d2d75c4690db12c26c034e7d6ec4a924452","src/unix/bsd/netbsdlike/netbsd/mod.rs":"107a4aa396b8383c66e0ace2f941450b4b69146558cdc4d9fbe33eeab51760f1","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"ee7ff5d89d0ed22f531237b5059aa669df93a3b5c489fa641465ace8d405bf41","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"9489f4b3e4566f43bb12dfb92238960613dac7f6a45cc13068a8d152b902d7d9","src/unix/bsd/netbsdlike/netbsd/x86.rs":"20692320e36bfe028d1a34d16fe12ca77aa909cb02bda167376f98f1a09aefe7","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"1afe5ef46b14397cdd68664b5b232e4f5b035b6db1d4cf411c899d51ebca9f30","src/unix/bsd/netbsdlike/openbsd/aarch64.rs":"dd91931d373b7ecaf6e2de25adadee10d16fa9b12c2cbacdff3eb291e1ba36af","src/unix/bsd/netbsdlike/openbsd/arm.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/mips64.rs":"8532a189ae10c7d668d9d4065da8b05d124e09bd39442c9f74a7f231c43eca48","src/unix/bsd/netbsdlike/openbsd/mod.rs":"38b7d65a86701a75a9047a3ef3b006f09ffbb33ab2312174bcee889efb74e100","src/unix/bsd/netbsdlike/openbsd/powerpc.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/powerpc64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/riscv64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/sparc64.rs":"d04fd287afbaa2c5df9d48c94e8374a532a3ba491b424ddf018270c7312f4085","src/unix/bsd/netbsdlike/openbsd/x86.rs":"6f7f5c4fde2a2259eb547890cbd86570cea04ef85347d7569e94e679448bec87","src/unix/bsd/netbsdlike/openbsd/x86_64.rs":"d31db31630289c85af3339dbe357998a21ca584cbae31607448fe2cf7675a4e1","src/unix/haiku/b32.rs":"a2efdbf7158a6da341e1db9176b0ab193ba88b449616239ed95dced11f54d87b","src/unix/haiku/b64.rs":"ff8115367d3d7d354f792d6176dfaaa26353f57056197b563bf4681f91ff7985","src/unix/haiku/mod.rs":"14171bbff41ad7f112198064b80f7b86a78c2c36c689d5cc04a748f2186c6bf3","src/unix/haiku/native.rs":"dbfcbf4954a79d1df2ff58e0590bbcb8c57dfc7a32392aa73ee4726b66bd6cc8","src/unix/haiku/x86_64.rs":"3ec3aeeb7ed208b8916f3e32d42bfd085ff5e16936a1a35d9a52789f043b7237","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"859814f5df89e28fd4b345db399d181e11e7ed413841b6ff703a1fcbdbf013ae","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/linux_like/android/b32/arm.rs":"433c1530f602cc5ed26610c58055dde0c4ceea5e00150063b24ddc60768332a4","src/unix/linux_like/android/b32/mod.rs":"7c173e0375119bf06a3081652faede95e5bcd6858e7576b7533d037978737c8f","src/unix/linux_like/android/b32/x86/align.rs":"812914e4241df82e32b12375ca3374615dc3a4bdd4cf31f0423c5815320c0dab","src/unix/linux_like/android/b32/x86/mod.rs":"8388bd3a0fcb5636bf965eee6dc95ae6860b85a2b555b387c868aa4d4e01ec89","src/unix/linux_like/android/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/android/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/android/b64/aarch64/mod.rs":"ef230d49fd0d182adf2dae6f8e10babf18d72259d65980bf1c4c2dc8a4f84501","src/unix/linux_like/android/b64/mod.rs":"71e4fcbe952bfa4a5f9022f3972e906917b38f729b9d8ef57cd5d179104894ac","src/unix/linux_like/android/b64/riscv64/align.rs":"0bf138f84e5327d8339bcd4adf071a6832b516445e597552c82bbd881095e3a8","src/unix/linux_like/android/b64/riscv64/mod.rs":"80e9f93fed838a48b4e2e8d77b95c72cfd7c0647bcce63851555c5ad16dad143","src/unix/linux_like/android/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/android/b64/x86_64/mod.rs":"e10d19bea39f719723ab6666a5ddbd378b6958769441c5904629e1df173b1dc2","src/unix/linux_like/android/mod.rs":"fbfcd29a13b58a89841ea23737b45d1a3d69e8ec72bd168755297c4d0eb701f3","src/unix/linux_like/emscripten/align.rs":"86c95cbed7a7161b1f23ee06843e7b0e2340ad92b2cb86fe2a8ef3e0e8c36216","src/unix/linux_like/emscripten/mod.rs":"6ef4652dfb94e3c58aed5133ece982ad30569d46b6b1054552cd61905fa61690","src/unix/linux_like/emscripten/no_align.rs":"0128e4aa721a9902754828b61b5ec7d8a86619983ed1e0544a85d35b1051fad6","src/unix/linux_like/linux/align.rs":"d6c259942c8e843373accd180fc8f4f45f03544dfd21b93a8d02641ead3ef63e","src/unix/linux_like/linux/arch/generic/mod.rs":"46dd6634e564fb1b60c8a2d1018ef8f365d1a8ed26f162c99528922d06d14134","src/unix/linux_like/linux/arch/mips/mod.rs":"2d166054a586bb4bf6e4a4ba35f7574907b217225eff8f1a43adc4277e142460","src/unix/linux_like/linux/arch/mod.rs":"466a29622e47c6c7f1500682b2eb17f5566dd81b322cd6348f0fdd355cec593a","src/unix/linux_like/linux/arch/powerpc/mod.rs":"3f6da7b0fa7b394c7d4eea2bb3caa7a7729ab0d6c1491fef02206a912c41b815","src/unix/linux_like/linux/arch/sparc/mod.rs":"91593ec0440f1dd8f8e612028f432c44c14089286e2aca50e10511ab942db8c3","src/unix/linux_like/linux/gnu/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/gnu/b32/arm/align.rs":"6ec0eb3ee93f7ae99fd714b4deabfb5e97fbcefd8c26f5a45fb8e7150899cdeb","src/unix/linux_like/linux/gnu/b32/arm/mod.rs":"5bd3f6b3484e049ddaac95f411b0d82cbf1cd28e6a5defbc927bd917f5f7d299","src/unix/linux_like/linux/gnu/b32/m68k/align.rs":"8faa92f77a9232c035418d45331774e64a9a841d99c91791570a203bf2b45bcb","src/unix/linux_like/linux/gnu/b32/m68k/mod.rs":"a2a0a9400dae44086ebf579e0448e0676d4a3214d1ae7d13a024857251e23b6b","src/unix/linux_like/linux/gnu/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/gnu/b32/mips/mod.rs":"6b9a5dac6f937ddc1453e808e3c43502c87143332df9e43ac64fb8b1eda6c116","src/unix/linux_like/linux/gnu/b32/mod.rs":"8da281da578cdee972e952b118b903b370320897a7e335342a15e1359864bef2","src/unix/linux_like/linux/gnu/b32/powerpc.rs":"5c5d90326b54b57b98eff4745fe7a3fb02f053b2dc782241a73e807b491936a3","src/unix/linux_like/linux/gnu/b32/riscv32/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/gnu/b32/riscv32/mod.rs":"5e7c1e29aeb82fc422f45b73fb0cf3d13d0902300f9150d2755a9074f8d96999","src/unix/linux_like/linux/gnu/b32/sparc/align.rs":"21adbed27df73e2d1ed934aaf733a643003d7baf2bde9c48ea440895bcca6d41","src/unix/linux_like/linux/gnu/b32/sparc/mod.rs":"80894eece66e9348f45d1b07ad37c757ea694bbd10ed49d3f920b34e9f51a9a3","src/unix/linux_like/linux/gnu/b32/x86/align.rs":"e4bafdc4a519a7922a81b37a62bbfd1177a2f620890eef8f1fbc47162e9eb413","src/unix/linux_like/linux/gnu/b32/x86/mod.rs":"c703cc5e9de2dc31d9e5831bfb6f354d6e3518b2ae02263f68a9a70f1c0167e2","src/unix/linux_like/linux/gnu/b64/aarch64/align.rs":"ea39d5fd8ca5a71314127d1e1f542bca34ac566eac9a95662076d91ea4bee548","src/unix/linux_like/linux/gnu/b64/aarch64/ilp32.rs":"21a21503ef2e095f4371044915d4bfb07a8578011cb5c713cd9f45947b0b5730","src/unix/linux_like/linux/gnu/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/linux/gnu/b64/aarch64/lp64.rs":"e78c3cd197f44832338b414d1a9bc0d194f44c74db77bd7bf830c1fff62b2690","src/unix/linux_like/linux/gnu/b64/aarch64/mod.rs":"666beae35371cb54a4ad091764e0e3ca4983d5205179c119a8ff97d3ae301869","src/unix/linux_like/linux/gnu/b64/loongarch64/align.rs":"6616c38bf8cab53034dce9f968adae8fb7771334445a93876d000cfd08f117a8","src/unix/linux_like/linux/gnu/b64/loongarch64/mod.rs":"17e9478b6a5830f6b8f6bea4ccab712cfd1972cdfb43b97408a068e4ea924106","src/unix/linux_like/linux/gnu/b64/mips64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/mips64/mod.rs":"80b4b97a41564290c510e68a1fb20cfd8424206f010e71a596f12877de886a71","src/unix/linux_like/linux/gnu/b64/mod.rs":"3c6555f30a7a8852757b31a542ea73fb6a16a6e27e838397e819278ad56e57a4","src/unix/linux_like/linux/gnu/b64/powerpc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/powerpc64/mod.rs":"a595e37c2325ceb40ef66c634bd3c255ad184a1d70ff8025e98a075f0ec67704","src/unix/linux_like/linux/gnu/b64/riscv64/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/gnu/b64/riscv64/mod.rs":"ef4b13477ffd8532fb6705ca3fa63a1f13e8d19ee39b083c5355dfce430c1a5b","src/unix/linux_like/linux/gnu/b64/s390x.rs":"788fde4fa1919859cc028b59da31de00449edd2b2c1530ae76134beac418b73c","src/unix/linux_like/linux/gnu/b64/sparc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/sparc64/mod.rs":"c4fa0ede3f78b21a9982667922cccd0681bee3cb6d42208ea9958f65e93d6308","src/unix/linux_like/linux/gnu/b64/x86_64/align.rs":"62e822478356db4a73b6bbd1b36d825b893939ab4b308ec11b0578bcc4b49769","src/unix/linux_like/linux/gnu/b64/x86_64/mod.rs":"e37e0421290b152fe508883181c41225e09dd5452a6b085e8d807b3b54823028","src/unix/linux_like/linux/gnu/b64/x86_64/not_x32.rs":"c1b6345ce14f67d1b2e2f7f2c0ff9a074c07acbd348df69cb4558bda8c8fb9ae","src/unix/linux_like/linux/gnu/b64/x86_64/x32.rs":"3f4d2aeadb7d2620cad09564abdbfc5cf02eeb5a27f2bab8a4e9b4bdbdb258a5","src/unix/linux_like/linux/gnu/mod.rs":"e31aa4bd147c83d05dcd13baca4b9f676320a7713ff50de08b51d0c88f8241a3","src/unix/linux_like/linux/gnu/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/mod.rs":"ddd008ff6304dca5d6fe9b452eb7a69745098bce47834d0999abdedd5978c2b3","src/unix/linux_like/linux/musl/b32/arm/align.rs":"3e8ac052c1043764776b54c93ba4260e061df998631737a897d9d47d54f7b80c","src/unix/linux_like/linux/musl/b32/arm/mod.rs":"f5b217a93f99c2852f7fd1459f529798372fa7df84ee0cfd3d8cdd5b2021b8cf","src/unix/linux_like/linux/musl/b32/hexagon.rs":"226a8b64ce9c75abbbee6d2dceb0b44f7b6c750c4102ebd4d015194afee6666e","src/unix/linux_like/linux/musl/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/musl/b32/mips/mod.rs":"16a7a03d998a5db11be9ee81525c7faec4623383260e8bc125b1c53a050fde75","src/unix/linux_like/linux/musl/b32/mod.rs":"580e27c5ce3344df686f1ffc08fdfa2c282d1ceb623d778c50d210d4bd65ec7e","src/unix/linux_like/linux/musl/b32/powerpc.rs":"dc52adc264c34bce80753d6bd064e8fc4b8237fa1e5c5315ccb6c72df74c2813","src/unix/linux_like/linux/musl/b32/riscv32/align.rs":"efd2accf33b87de7c7547903359a5da896edc33cd6c719552c7474b60d4a5d48","src/unix/linux_like/linux/musl/b32/riscv32/mod.rs":"e57dc5562553aab6d0765e0ec266254aa52975f8757bfe97e0c6028fa7d5d37c","src/unix/linux_like/linux/musl/b32/x86/align.rs":"08e77fbd7435d7dec2ff56932433bece3f02e47ce810f89004a275a86d39cbe1","src/unix/linux_like/linux/musl/b32/x86/mod.rs":"7a1586f77bb693f0b319ec720c35963da056287fc42f8e2ccf1d5b2bcccf4fd6","src/unix/linux_like/linux/musl/b64/aarch64/align.rs":"6ba32725d24d7d8e6aa111f3b57aafa318f83b606abe96561329151829821133","src/unix/linux_like/linux/musl/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/linux/musl/b64/aarch64/mod.rs":"31e75179cbb4e26425b3f5b052e358f593153da662884655e60801d852e55dc2","src/unix/linux_like/linux/musl/b64/mips64.rs":"9a5d29f666332bb056d0e2951e9de989aa1dc016075f009db3f2f628e0cdda8c","src/unix/linux_like/linux/musl/b64/mod.rs":"8c10627bd582cb272514e7350ae4743a65d489356eae039d2e7e55cd533fbbc8","src/unix/linux_like/linux/musl/b64/powerpc64.rs":"455dc0ffa55afc1db6ffaf461f6f2a7b49d31658bfebe0bb4efac5967a6f956c","src/unix/linux_like/linux/musl/b64/riscv64/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/musl/b64/riscv64/mod.rs":"42d4b6d36807f37759094a732a321080cccdf498b174d632cebba147051de294","src/unix/linux_like/linux/musl/b64/s390x.rs":"d8a4fdfea0960ec284cae4facb8b0fb342e8aa41544cffacdcaf08c5a92a43f8","src/unix/linux_like/linux/musl/b64/x86_64/align.rs":"77309276ad7a42cbe59ca381f23590b7a143aded05555b34a5b307b808cbca6e","src/unix/linux_like/linux/musl/b64/x86_64/mod.rs":"7a877cd23b64be66d28e6b8dddae32d59a88d69115637539daf19381f4e39330","src/unix/linux_like/linux/musl/mod.rs":"8d8b50a0bf7ec53bd4d2ea92e8bfae14529f0beb3f22a65b55623f7086fee8ac","src/unix/linux_like/linux/no_align.rs":"da2a8721becaaaa528781f97f5d9aae6a982ae5d4f5f6d2ffc0150bed72319b3","src/unix/linux_like/linux/non_exhaustive.rs":"181a05bf94fdb911db83ce793b993bd6548a4115b306a7ef3c10f745a8fea3e9","src/unix/linux_like/linux/uclibc/align.rs":"9ed16138d8e439bd90930845a65eafa7ebd67366e6bf633936d44014f6e4c959","src/unix/linux_like/linux/uclibc/arm/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/arm/mod.rs":"bf2dcf0a468f386899f572324f3bc14be1974f570afdfff1075ca6c5dd57710d","src/unix/linux_like/linux/uclibc/arm/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips32/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/mips/mips32/mod.rs":"d2fc251754458b697ef6e599f0a670ebca18f61e4026bf681be4e2baa896b9df","src/unix/linux_like/linux/uclibc/mips/mips32/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips64/align.rs":"a7bdcb18a37a2d91e64d5fad83ea3edc78f5412adb28f77ab077dbb26dd08b2d","src/unix/linux_like/linux/uclibc/mips/mips64/mod.rs":"256a428290a560163ef7dc7d18b27bd3c6ce9748a0f28d5dc7f82203ee228220","src/unix/linux_like/linux/uclibc/mips/mips64/no_align.rs":"4a18e3875698c85229599225ac3401a2a40da87e77b2ad4ef47c6fcd5a24ed30","src/unix/linux_like/linux/uclibc/mips/mod.rs":"367ec5483ad317e6ccba1ac0888da6cf088a8d32689214cc8d16129aa692260c","src/unix/linux_like/linux/uclibc/mod.rs":"1c3d25cddcfefa2bd17bdc81550826be31a08eef235e13f825f169a5029c8bca","src/unix/linux_like/linux/uclibc/no_align.rs":"3f28637046524618adaa1012e26cb7ffe94b9396e6b518cccdc69d59f274d709","src/unix/linux_like/linux/uclibc/x86_64/l4re.rs":"024eba5753e852dbdd212427351affe7e83f9916c1864bce414d7aa2618f192e","src/unix/linux_like/linux/uclibc/x86_64/mod.rs":"420dbea99e99091f333641e202960fa4bed0733de2a834e610708555be6bab4c","src/unix/linux_like/linux/uclibc/x86_64/other.rs":"42c3f71e58cabba373f6a55a623f3c31b85049eb64824c09c2b082b3b2d6a0a8","src/unix/linux_like/mod.rs":"03ae2e1f8113ed650e1c9691e715254dd6b6d13cc829b639d5857336ecd11b24","src/unix/mod.rs":"4647dc713fb4208df76fe6009b9716eef1d9e09a77f6b2f1b793d63537b4c670","src/unix/newlib/aarch64/mod.rs":"bac93836a9a57b2c710f32f852e92a4d11ad6759ab0fb6ad33e71d60e53278af","src/unix/newlib/align.rs":"28aaf87fafbc6b312622719d472d8cf65f9e5467d15339df5f73e66d8502b28a","src/unix/newlib/arm/mod.rs":"cbba6b3e957eceb496806e60de8725a23ff3fa0015983b4b4fa27b233732b526","src/unix/newlib/espidf/mod.rs":"816f235f4aa4baabba7f2606b31d0fdb03988c52194c966728de8690bf17299d","src/unix/newlib/generic.rs":"eab066d9f0a0f3eb53cc1073d01496bba0110989e1f6a59838afd19f870cd599","src/unix/newlib/horizon/mod.rs":"7cc5cc120437421db139bfa6a90b18168cd3070bdd0f5be96d40fe4c996f3ca1","src/unix/newlib/mod.rs":"494e56628d4408bf66ad30ff71fbd21bc33d9037935c411dff7bf73dd3f1070b","src/unix/newlib/no_align.rs":"e0743b2179495a9514bc3a4d1781e492878c4ec834ee0085d0891dd1712e82fb","src/unix/newlib/powerpc/mod.rs":"0202ffd57caf75b6afa2c9717750ffb96e375ac33df0ae9609a3f831be393b67","src/unix/no_align.rs":"c06e95373b9088266e0b14bba0954eef95f93fb2b01d951855e382d22de78e53","src/unix/redox/mod.rs":"c8afea39f81e73ad4e9bf8a8f25138a200e616048e189debad80a6b7e08f710f","src/unix/solarish/compat.rs":"00f1ee3faec9da69204e42f025f6735dd13d894071a154425dcc43ecbdd06e7f","src/unix/solarish/illumos.rs":"cd93c2d84722bbf9933a92842a8998eb0b2afc962f50bc2546ad127b82809fa7","src/unix/solarish/mod.rs":"eace25beaa4874bfc2451f8136efc261f9dfb7a30caab3c97c708068d5431bbf","src/unix/solarish/solaris.rs":"41b350a89ddf01cd12a10f93640f92be53be0b0d976021cdc08da17bf3e72edf","src/unix/solarish/x86.rs":"e86e806df0caed72765040eaa2f3c883198d1aa91508540adf9b7008c77f522e","src/unix/solarish/x86_64.rs":"ec2b01f194eb8a6a27133c57681da195a949e03098f3ea1e847227a9c09ef5fc","src/unix/solarish/x86_common.rs":"ac869d9c3c95645c22460468391eb1982023c3a8e02b9e06a72e3aef3d5f1eac","src/vxworks/aarch64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/arm.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/mod.rs":"aea3da66f2140f2a82dfc9c58f6e6531d2dd9c15ea696e0f95a0d4a2a187b5b6","src/vxworks/powerpc.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/powerpc64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/x86.rs":"552f007f38317620b23889cb7c49d1d115841252439060122f52f434fbc6e5ba","src/vxworks/x86_64.rs":"018d92be3ad628a129eff9f2f5dfbc0883d8b8e5f2fa917b900a7f98ed6b514a","src/wasi.rs":"3f6219f036ac1d917ab5fa7323e3770857f566bc4189a9d4c4ea8ca6517b8953","src/windows/gnu/align.rs":"b2c13ec1b9f3b39a75c452c80c951dff9d0215e31d77e883b4502afb31794647","src/windows/gnu/mod.rs":"3c8c7edb7cdf5d0c44af936db2a94869585c69dfabeef30571b4f4e38375767a","src/windows/mod.rs":"7fe35c1f5e6272acfba059a1ad7a78b144f5bc63456f6377d58bbc42cc23f509","src/windows/msvc/mod.rs":"c068271e00fca6b62bc4bf44bcf142cfc38caeded9b6c4e01d1ceef3ccf986f4","tests/const_fn.rs":"cb75a1f0864f926aebe79118fc34d51a0d1ade2c20a394e7774c7e545f21f1f4"},"package":"db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"} \ No newline at end of file
+{"files":{"CONTRIBUTING.md":"bdc90b52cf803faac96e594069a86dd8ea150d5ba7fb3e6cadfc08dac4c7b0ce","Cargo.toml":"cec0b42b20e776ac9165b5a5eedfbd8fc033700cacabfa4209c60d35833e42f1","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"a8d47ff51ca256f56a8932dba07660672dbfe3004257ca8de708aac1415937a1","README.md":"776affa26b66843a2b4f1a1c8f88d92f6461b74568911450fea717e9db6f877b","build.rs":"cecfa3f926ab4a9c87cd59cc0d687f98eceb6035b0dde185152e5206d090f8d0","rustfmt.toml":"eaa2ea84fc1ba0359b77680804903e07bb38d257ab11986b95b158e460f787b2","src/fixed_width_ints.rs":"7f986e5f5e68d25ef04d386fd2f640e8be8f15427a8d4a458ea01d26b8dca0ca","src/fuchsia/aarch64.rs":"378776a9e40766154a54c94c2a7b4675b5c302a38e6e42da99e67bfbaee60e56","src/fuchsia/align.rs":"ae1cf8f011a99737eabeb14ffff768e60f13b13363d7646744dbb0f443dab3d6","src/fuchsia/mod.rs":"1b4e6c34b01b4c970f4d4c044d1642020e0fc2ea1b8442e8f1a9b698569ab1f6","src/fuchsia/no_align.rs":"303f3f1b255e0088b5715094353cf00476131d8e94e6aebb3f469557771c8b8a","src/fuchsia/x86_64.rs":"93a3632b5cf67d2a6bcb7dc0a558605252d5fe689e0f38d8aa2ec5852255ac87","src/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/hermit/mod.rs":"d3bfce41e4463d4be8020a2d063c9bfa8b665f45f1cc6cbf3163f5d01e7cb21f","src/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/lib.rs":"ce753ef318b300bbd441feabdd77d00322dfb6ce9eee8c78a38afe02b57aa4c0","src/macros.rs":"b457eb028b8e8ab3c24bb7292b874ad4e491edbb83594f6a3da024df5348c088","src/psp.rs":"dd31aabd46171d474ec5828372e28588935120e7355c90c105360d8fa9264c1c","src/sgx.rs":"16a95cdefc81c5ee00d8353a60db363c4cc3e0f75abcd5d0144723f2a306ed1b","src/solid/aarch64.rs":"a726e47f324adf73a4a0b67a2c183408d0cad105ae66acf36db37a42ab7f8707","src/solid/arm.rs":"e39a4f74ebbef3b97b8c95758ad741123d84ed3eb48d9cf4f1f4872097fc27fe","src/solid/mod.rs":"5f4151dca5132e4b4e4c23ab9737e12856dddbdc0ca3f7dbc004328ef3c8acde","src/switch.rs":"9da3dd39b3de45a7928789926e8572d00e1e11a39e6f7289a1349aadce90edba","src/unix/align.rs":"2cdc7c826ef7ae61f5171c5ae8c445a743d86f1a7f2d9d7e4ceeec56d6874f65","src/unix/bsd/apple/b32/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b32/mod.rs":"2546ad3eb6aecb95f916648bc63264117c92b4b4859532b34cb011e4c75a5a72","src/unix/bsd/apple/b64/aarch64/align.rs":"e8eb38d064b5fefec6f37d42873820a0483e7c758ed336cc59a7155455ca89c9","src/unix/bsd/apple/b64/aarch64/mod.rs":"44c217a4f263afe7a97435de9323d20a96c37836f899ca0925306d4b7e073c27","src/unix/bsd/apple/b64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/mod.rs":"f5e278a1af7fb358891d1c9be4eb7e815aaca0c5cb738d0c3604ba2208a856f7","src/unix/bsd/apple/b64/x86_64/align.rs":"ec833a747866fe19ca2d9b4d3c9ff0385faba5edf4bd0d15fa68884c40b0e26c","src/unix/bsd/apple/b64/x86_64/mod.rs":"8c87c5855038aae5d433c8f5eb3b29b0a175879a0245342b3bfd83bdf4cfd936","src/unix/bsd/apple/mod.rs":"1cc76b056d5925aedae04ead411057dc5c6c9bd3948609fdb30164ac1fb6565e","src/unix/bsd/freebsdlike/dragonfly/errno.rs":"8295b8bb0dfd38d2cdb4d9192cdeeb534cc6c3b208170e64615fa3e0edb3e578","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"8986a8c79bcadfbdb58ec1a72e1aff8ce9b341c9392d7b0b7449bddf6db59058","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"6c8e216385f53a4bf5f171749b57602fc34a4e4b160a44ca31c058cb0c8a2126","src/unix/bsd/freebsdlike/freebsd/arm.rs":"59d6a670eea562fb87686e243e0a84603d29a2028a3d4b3f99ccc01bd04d2f47","src/unix/bsd/freebsdlike/freebsd/freebsd11/b64.rs":"9808d152c1196aa647f1b0f0cf84dac8c930da7d7f897a44975545e3d9d17681","src/unix/bsd/freebsdlike/freebsd/freebsd11/mod.rs":"badda6f0f7666f38345b1f4ca78817a47bc92bbdcdc3a1377f376f4e08c316e5","src/unix/bsd/freebsdlike/freebsd/freebsd12/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd12/mod.rs":"327700c5668be863f1fea205c82b0402e2936b883df63dda677716c30a50a284","src/unix/bsd/freebsdlike/freebsd/freebsd12/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd13/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd13/mod.rs":"7f689e9a944c919834ec852b30c224f98e14b4b4087571adefeea5abad5d6374","src/unix/bsd/freebsdlike/freebsd/freebsd13/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/freebsd14/b64.rs":"61cbe45f8499bedb168106b686d4f8239472f25c7553b069eec2afe197ff2df6","src/unix/bsd/freebsdlike/freebsd/freebsd14/mod.rs":"9b6fa1c3f5217f9482e0bed3bd0ea2905cdf56d628307cea48e49d8e00a6b09d","src/unix/bsd/freebsdlike/freebsd/freebsd14/x86_64.rs":"2df36a7f122f6d6e5753cfb4d22e915cc80f6bc91c0161b3daae55a481bfd052","src/unix/bsd/freebsdlike/freebsd/mod.rs":"26bda8cf730ab4868523a6ed7c513f51885cad30cdd02da905ff35b597576415","src/unix/bsd/freebsdlike/freebsd/powerpc.rs":"9ca3f82f88974e6db5569f2d76a5a3749b248a31747a6c0da5820492bdfeca42","src/unix/bsd/freebsdlike/freebsd/powerpc64.rs":"2dae3ecc87eac3b11657aa98915def55fc4b5c0de11fe26aae23329a54628a9a","src/unix/bsd/freebsdlike/freebsd/riscv64.rs":"fa4bed4c58cad24ba3395941c7fa6b11e089551a04714f9561078e400f5b2b62","src/unix/bsd/freebsdlike/freebsd/x86.rs":"c5005e3249eb7c93cfbac72a9e9272320d80ce7983da990ceb05a447f59a02c5","src/unix/bsd/freebsdlike/freebsd/x86_64/align.rs":"0e1f69a88fca1c32874b1daf5db3d446fefbe518dca497f096cc9168c39dde70","src/unix/bsd/freebsdlike/freebsd/x86_64/mod.rs":"51e4dd0c8ae247bb652feda5adad9333ea3bb30c750c3a3935e0b0e47d7803eb","src/unix/bsd/freebsdlike/mod.rs":"bd80ce2ff628ed5eaa856ebe8b6a8fb0e89d1c9728040b9ee6b967beb4bcf3a7","src/unix/bsd/mod.rs":"0c672b075b5616fca2cc56c00ee31c3f554dcbd2e88a7c2ba1437aa6e5604319","src/unix/bsd/netbsdlike/mod.rs":"34f60d73631f3c59936c87db1f62ddb8f693901c3cb199f7f370882e84d509fa","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"65dcb58d11e8d8028401a9d07ca3eb4cb4f053e04249cc877353449d84ccc4cb","src/unix/bsd/netbsdlike/netbsd/arm.rs":"58cdbb70b0d6f536551f0f3bb3725d2d75c4690db12c26c034e7d6ec4a924452","src/unix/bsd/netbsdlike/netbsd/mod.rs":"107a4aa396b8383c66e0ace2f941450b4b69146558cdc4d9fbe33eeab51760f1","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"ee7ff5d89d0ed22f531237b5059aa669df93a3b5c489fa641465ace8d405bf41","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"9489f4b3e4566f43bb12dfb92238960613dac7f6a45cc13068a8d152b902d7d9","src/unix/bsd/netbsdlike/netbsd/x86.rs":"20692320e36bfe028d1a34d16fe12ca77aa909cb02bda167376f98f1a09aefe7","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"1afe5ef46b14397cdd68664b5b232e4f5b035b6db1d4cf411c899d51ebca9f30","src/unix/bsd/netbsdlike/openbsd/aarch64.rs":"dd91931d373b7ecaf6e2de25adadee10d16fa9b12c2cbacdff3eb291e1ba36af","src/unix/bsd/netbsdlike/openbsd/arm.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/mips64.rs":"8532a189ae10c7d668d9d4065da8b05d124e09bd39442c9f74a7f231c43eca48","src/unix/bsd/netbsdlike/openbsd/mod.rs":"38b7d65a86701a75a9047a3ef3b006f09ffbb33ab2312174bcee889efb74e100","src/unix/bsd/netbsdlike/openbsd/powerpc.rs":"01580d261bc6447bb327a0d982181b7bdabfa066cee65a30373d3ced729ad307","src/unix/bsd/netbsdlike/openbsd/powerpc64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/riscv64.rs":"1dd5449dd1fd3d51e30ffdeeaece91d0aaf05c710e0ac699fecc5461cfa2c28e","src/unix/bsd/netbsdlike/openbsd/sparc64.rs":"d04fd287afbaa2c5df9d48c94e8374a532a3ba491b424ddf018270c7312f4085","src/unix/bsd/netbsdlike/openbsd/x86.rs":"6f7f5c4fde2a2259eb547890cbd86570cea04ef85347d7569e94e679448bec87","src/unix/bsd/netbsdlike/openbsd/x86_64.rs":"d31db31630289c85af3339dbe357998a21ca584cbae31607448fe2cf7675a4e1","src/unix/haiku/b32.rs":"a2efdbf7158a6da341e1db9176b0ab193ba88b449616239ed95dced11f54d87b","src/unix/haiku/b64.rs":"ff8115367d3d7d354f792d6176dfaaa26353f57056197b563bf4681f91ff7985","src/unix/haiku/mod.rs":"891f77891349938ffb2048db0446e28f2b7f78a0286042d0f0a9af6c2d4570af","src/unix/haiku/native.rs":"dbfcbf4954a79d1df2ff58e0590bbcb8c57dfc7a32392aa73ee4726b66bd6cc8","src/unix/haiku/x86_64.rs":"3ec3aeeb7ed208b8916f3e32d42bfd085ff5e16936a1a35d9a52789f043b7237","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"a1494a0bddf301cceb0d9b8529a84b5882fe855ceae77a1c4e8d6034e705e26c","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/linux_like/android/b32/arm.rs":"007391f35663762d8097a8a5ae301accb82c729713da0dfd9631c4bc69d9305e","src/unix/linux_like/android/b32/mod.rs":"7c173e0375119bf06a3081652faede95e5bcd6858e7576b7533d037978737c8f","src/unix/linux_like/android/b32/x86/align.rs":"812914e4241df82e32b12375ca3374615dc3a4bdd4cf31f0423c5815320c0dab","src/unix/linux_like/android/b32/x86/mod.rs":"81b4ab3ffc347e819f2f62c09dcd923d9d12d14b3d0e66ddc06ab78fa75a4b88","src/unix/linux_like/android/b64/aarch64/align.rs":"2179c3b1608fa4bf68840482bfc2b2fa3ee2faf6fcae3770f9e505cddca35c7b","src/unix/linux_like/android/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/android/b64/aarch64/mod.rs":"f12594f7a57654ea63794822761c1ef9029ee5cef6bbad498c928a5bff2dfade","src/unix/linux_like/android/b64/mod.rs":"71e4fcbe952bfa4a5f9022f3972e906917b38f729b9d8ef57cd5d179104894ac","src/unix/linux_like/android/b64/riscv64/align.rs":"0bf138f84e5327d8339bcd4adf071a6832b516445e597552c82bbd881095e3a8","src/unix/linux_like/android/b64/riscv64/mod.rs":"80e9f93fed838a48b4e2e8d77b95c72cfd7c0647bcce63851555c5ad16dad143","src/unix/linux_like/android/b64/x86_64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/android/b64/x86_64/mod.rs":"40be3b9dac9b52f3fd341fae4593a8c8c11a79ed694bcccf82f8810253c77492","src/unix/linux_like/android/mod.rs":"bf37384b99034eb1817303d9e1b35fe895da1010b024ada128e84059dad2c73b","src/unix/linux_like/emscripten/align.rs":"86c95cbed7a7161b1f23ee06843e7b0e2340ad92b2cb86fe2a8ef3e0e8c36216","src/unix/linux_like/emscripten/mod.rs":"24a6b0eadc011ee75c5bcbd18fe9f30d64006b71ec1416f6352e6015917240fc","src/unix/linux_like/emscripten/no_align.rs":"0128e4aa721a9902754828b61b5ec7d8a86619983ed1e0544a85d35b1051fad6","src/unix/linux_like/linux/align.rs":"d6c259942c8e843373accd180fc8f4f45f03544dfd21b93a8d02641ead3ef63e","src/unix/linux_like/linux/arch/generic/mod.rs":"46dd6634e564fb1b60c8a2d1018ef8f365d1a8ed26f162c99528922d06d14134","src/unix/linux_like/linux/arch/mips/mod.rs":"2d166054a586bb4bf6e4a4ba35f7574907b217225eff8f1a43adc4277e142460","src/unix/linux_like/linux/arch/mod.rs":"466a29622e47c6c7f1500682b2eb17f5566dd81b322cd6348f0fdd355cec593a","src/unix/linux_like/linux/arch/powerpc/mod.rs":"3f6da7b0fa7b394c7d4eea2bb3caa7a7729ab0d6c1491fef02206a912c41b815","src/unix/linux_like/linux/arch/sparc/mod.rs":"91593ec0440f1dd8f8e612028f432c44c14089286e2aca50e10511ab942db8c3","src/unix/linux_like/linux/gnu/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/gnu/b32/arm/align.rs":"6ec0eb3ee93f7ae99fd714b4deabfb5e97fbcefd8c26f5a45fb8e7150899cdeb","src/unix/linux_like/linux/gnu/b32/arm/mod.rs":"5bd3f6b3484e049ddaac95f411b0d82cbf1cd28e6a5defbc927bd917f5f7d299","src/unix/linux_like/linux/gnu/b32/m68k/align.rs":"8faa92f77a9232c035418d45331774e64a9a841d99c91791570a203bf2b45bcb","src/unix/linux_like/linux/gnu/b32/m68k/mod.rs":"a2a0a9400dae44086ebf579e0448e0676d4a3214d1ae7d13a024857251e23b6b","src/unix/linux_like/linux/gnu/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/gnu/b32/mips/mod.rs":"6b9a5dac6f937ddc1453e808e3c43502c87143332df9e43ac64fb8b1eda6c116","src/unix/linux_like/linux/gnu/b32/mod.rs":"8da281da578cdee972e952b118b903b370320897a7e335342a15e1359864bef2","src/unix/linux_like/linux/gnu/b32/powerpc.rs":"5c5d90326b54b57b98eff4745fe7a3fb02f053b2dc782241a73e807b491936a3","src/unix/linux_like/linux/gnu/b32/riscv32/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/gnu/b32/riscv32/mod.rs":"5e7c1e29aeb82fc422f45b73fb0cf3d13d0902300f9150d2755a9074f8d96999","src/unix/linux_like/linux/gnu/b32/sparc/align.rs":"21adbed27df73e2d1ed934aaf733a643003d7baf2bde9c48ea440895bcca6d41","src/unix/linux_like/linux/gnu/b32/sparc/mod.rs":"80894eece66e9348f45d1b07ad37c757ea694bbd10ed49d3f920b34e9f51a9a3","src/unix/linux_like/linux/gnu/b32/x86/align.rs":"e4bafdc4a519a7922a81b37a62bbfd1177a2f620890eef8f1fbc47162e9eb413","src/unix/linux_like/linux/gnu/b32/x86/mod.rs":"c703cc5e9de2dc31d9e5831bfb6f354d6e3518b2ae02263f68a9a70f1c0167e2","src/unix/linux_like/linux/gnu/b64/aarch64/align.rs":"ea39d5fd8ca5a71314127d1e1f542bca34ac566eac9a95662076d91ea4bee548","src/unix/linux_like/linux/gnu/b64/aarch64/ilp32.rs":"21a21503ef2e095f4371044915d4bfb07a8578011cb5c713cd9f45947b0b5730","src/unix/linux_like/linux/gnu/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/linux/gnu/b64/aarch64/lp64.rs":"e78c3cd197f44832338b414d1a9bc0d194f44c74db77bd7bf830c1fff62b2690","src/unix/linux_like/linux/gnu/b64/aarch64/mod.rs":"666beae35371cb54a4ad091764e0e3ca4983d5205179c119a8ff97d3ae301869","src/unix/linux_like/linux/gnu/b64/loongarch64/align.rs":"6616c38bf8cab53034dce9f968adae8fb7771334445a93876d000cfd08f117a8","src/unix/linux_like/linux/gnu/b64/loongarch64/mod.rs":"17e9478b6a5830f6b8f6bea4ccab712cfd1972cdfb43b97408a068e4ea924106","src/unix/linux_like/linux/gnu/b64/mips64/align.rs":"7169d07a9fd4716f7512719aec9fda5d8bed306dc0720ffc1b21696c9951e3c6","src/unix/linux_like/linux/gnu/b64/mips64/mod.rs":"80b4b97a41564290c510e68a1fb20cfd8424206f010e71a596f12877de886a71","src/unix/linux_like/linux/gnu/b64/mod.rs":"3c6555f30a7a8852757b31a542ea73fb6a16a6e27e838397e819278ad56e57a4","src/unix/linux_like/linux/gnu/b64/powerpc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/powerpc64/mod.rs":"a595e37c2325ceb40ef66c634bd3c255ad184a1d70ff8025e98a075f0ec67704","src/unix/linux_like/linux/gnu/b64/riscv64/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/gnu/b64/riscv64/mod.rs":"ef4b13477ffd8532fb6705ca3fa63a1f13e8d19ee39b083c5355dfce430c1a5b","src/unix/linux_like/linux/gnu/b64/s390x.rs":"788fde4fa1919859cc028b59da31de00449edd2b2c1530ae76134beac418b73c","src/unix/linux_like/linux/gnu/b64/sparc64/align.rs":"e29c4868bbecfa4a6cd8a2ad06193f3bbc78a468cc1dc9df83f002f1268130d9","src/unix/linux_like/linux/gnu/b64/sparc64/mod.rs":"c4fa0ede3f78b21a9982667922cccd0681bee3cb6d42208ea9958f65e93d6308","src/unix/linux_like/linux/gnu/b64/x86_64/align.rs":"62e822478356db4a73b6bbd1b36d825b893939ab4b308ec11b0578bcc4b49769","src/unix/linux_like/linux/gnu/b64/x86_64/mod.rs":"e37e0421290b152fe508883181c41225e09dd5452a6b085e8d807b3b54823028","src/unix/linux_like/linux/gnu/b64/x86_64/not_x32.rs":"c1b6345ce14f67d1b2e2f7f2c0ff9a074c07acbd348df69cb4558bda8c8fb9ae","src/unix/linux_like/linux/gnu/b64/x86_64/x32.rs":"3f4d2aeadb7d2620cad09564abdbfc5cf02eeb5a27f2bab8a4e9b4bdbdb258a5","src/unix/linux_like/linux/gnu/mod.rs":"e31aa4bd147c83d05dcd13baca4b9f676320a7713ff50de08b51d0c88f8241a3","src/unix/linux_like/linux/gnu/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/mod.rs":"8862912e65ae64dd26728ced492eacbdd3753b7a19432fc8fdf5a673ff7526c9","src/unix/linux_like/linux/musl/b32/arm/align.rs":"3e8ac052c1043764776b54c93ba4260e061df998631737a897d9d47d54f7b80c","src/unix/linux_like/linux/musl/b32/arm/mod.rs":"f5b217a93f99c2852f7fd1459f529798372fa7df84ee0cfd3d8cdd5b2021b8cf","src/unix/linux_like/linux/musl/b32/hexagon.rs":"226a8b64ce9c75abbbee6d2dceb0b44f7b6c750c4102ebd4d015194afee6666e","src/unix/linux_like/linux/musl/b32/mips/align.rs":"429fb5e005cb7143602d430098b6ebfb7d360685b194f333dfd587472ae954ee","src/unix/linux_like/linux/musl/b32/mips/mod.rs":"16a7a03d998a5db11be9ee81525c7faec4623383260e8bc125b1c53a050fde75","src/unix/linux_like/linux/musl/b32/mod.rs":"580e27c5ce3344df686f1ffc08fdfa2c282d1ceb623d778c50d210d4bd65ec7e","src/unix/linux_like/linux/musl/b32/powerpc.rs":"dc52adc264c34bce80753d6bd064e8fc4b8237fa1e5c5315ccb6c72df74c2813","src/unix/linux_like/linux/musl/b32/riscv32/align.rs":"efd2accf33b87de7c7547903359a5da896edc33cd6c719552c7474b60d4a5d48","src/unix/linux_like/linux/musl/b32/riscv32/mod.rs":"e57dc5562553aab6d0765e0ec266254aa52975f8757bfe97e0c6028fa7d5d37c","src/unix/linux_like/linux/musl/b32/x86/align.rs":"08e77fbd7435d7dec2ff56932433bece3f02e47ce810f89004a275a86d39cbe1","src/unix/linux_like/linux/musl/b32/x86/mod.rs":"7a1586f77bb693f0b319ec720c35963da056287fc42f8e2ccf1d5b2bcccf4fd6","src/unix/linux_like/linux/musl/b64/aarch64/align.rs":"6ba32725d24d7d8e6aa111f3b57aafa318f83b606abe96561329151829821133","src/unix/linux_like/linux/musl/b64/aarch64/int128.rs":"1735f6f5c56770d20dd426442f09724d9b2052b46a7cd82f23f3288a4a7276de","src/unix/linux_like/linux/musl/b64/aarch64/mod.rs":"31e75179cbb4e26425b3f5b052e358f593153da662884655e60801d852e55dc2","src/unix/linux_like/linux/musl/b64/mips64.rs":"9a5d29f666332bb056d0e2951e9de989aa1dc016075f009db3f2f628e0cdda8c","src/unix/linux_like/linux/musl/b64/mod.rs":"8c10627bd582cb272514e7350ae4743a65d489356eae039d2e7e55cd533fbbc8","src/unix/linux_like/linux/musl/b64/powerpc64.rs":"455dc0ffa55afc1db6ffaf461f6f2a7b49d31658bfebe0bb4efac5967a6f956c","src/unix/linux_like/linux/musl/b64/riscv64/align.rs":"d321491612be8d5c61b6ec2dc0111beb3a22e58803f99cd37543efe86621b119","src/unix/linux_like/linux/musl/b64/riscv64/mod.rs":"42d4b6d36807f37759094a732a321080cccdf498b174d632cebba147051de294","src/unix/linux_like/linux/musl/b64/s390x.rs":"d8a4fdfea0960ec284cae4facb8b0fb342e8aa41544cffacdcaf08c5a92a43f8","src/unix/linux_like/linux/musl/b64/x86_64/align.rs":"77309276ad7a42cbe59ca381f23590b7a143aded05555b34a5b307b808cbca6e","src/unix/linux_like/linux/musl/b64/x86_64/mod.rs":"7a877cd23b64be66d28e6b8dddae32d59a88d69115637539daf19381f4e39330","src/unix/linux_like/linux/musl/mod.rs":"8d8b50a0bf7ec53bd4d2ea92e8bfae14529f0beb3f22a65b55623f7086fee8ac","src/unix/linux_like/linux/no_align.rs":"da2a8721becaaaa528781f97f5d9aae6a982ae5d4f5f6d2ffc0150bed72319b3","src/unix/linux_like/linux/non_exhaustive.rs":"181a05bf94fdb911db83ce793b993bd6548a4115b306a7ef3c10f745a8fea3e9","src/unix/linux_like/linux/uclibc/align.rs":"9ed16138d8e439bd90930845a65eafa7ebd67366e6bf633936d44014f6e4c959","src/unix/linux_like/linux/uclibc/arm/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/arm/mod.rs":"bf2dcf0a468f386899f572324f3bc14be1974f570afdfff1075ca6c5dd57710d","src/unix/linux_like/linux/uclibc/arm/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips32/align.rs":"e4a3c27fe20a57b8d612c34cb05bc70646edb5cec7251957315afa53a7b9f936","src/unix/linux_like/linux/uclibc/mips/mips32/mod.rs":"d2fc251754458b697ef6e599f0a670ebca18f61e4026bf681be4e2baa896b9df","src/unix/linux_like/linux/uclibc/mips/mips32/no_align.rs":"9cd223135de75315840ff9c3fd5441ba1cb632b96b5c85a76f8316c86653db25","src/unix/linux_like/linux/uclibc/mips/mips64/align.rs":"a7bdcb18a37a2d91e64d5fad83ea3edc78f5412adb28f77ab077dbb26dd08b2d","src/unix/linux_like/linux/uclibc/mips/mips64/mod.rs":"256a428290a560163ef7dc7d18b27bd3c6ce9748a0f28d5dc7f82203ee228220","src/unix/linux_like/linux/uclibc/mips/mips64/no_align.rs":"4a18e3875698c85229599225ac3401a2a40da87e77b2ad4ef47c6fcd5a24ed30","src/unix/linux_like/linux/uclibc/mips/mod.rs":"367ec5483ad317e6ccba1ac0888da6cf088a8d32689214cc8d16129aa692260c","src/unix/linux_like/linux/uclibc/mod.rs":"1c3d25cddcfefa2bd17bdc81550826be31a08eef235e13f825f169a5029c8bca","src/unix/linux_like/linux/uclibc/no_align.rs":"3f28637046524618adaa1012e26cb7ffe94b9396e6b518cccdc69d59f274d709","src/unix/linux_like/linux/uclibc/x86_64/l4re.rs":"024eba5753e852dbdd212427351affe7e83f9916c1864bce414d7aa2618f192e","src/unix/linux_like/linux/uclibc/x86_64/mod.rs":"420dbea99e99091f333641e202960fa4bed0733de2a834e610708555be6bab4c","src/unix/linux_like/linux/uclibc/x86_64/other.rs":"42c3f71e58cabba373f6a55a623f3c31b85049eb64824c09c2b082b3b2d6a0a8","src/unix/linux_like/mod.rs":"759e65c13f7e49a6efd1a979c821c53c478648f7f00cb29da65d92904c7c6814","src/unix/mod.rs":"a55d80aae47b642e25810c2321626f4cc5168170230a1d73953dfca58709b774","src/unix/newlib/aarch64/mod.rs":"bac93836a9a57b2c710f32f852e92a4d11ad6759ab0fb6ad33e71d60e53278af","src/unix/newlib/align.rs":"28aaf87fafbc6b312622719d472d8cf65f9e5467d15339df5f73e66d8502b28a","src/unix/newlib/arm/mod.rs":"cbba6b3e957eceb496806e60de8725a23ff3fa0015983b4b4fa27b233732b526","src/unix/newlib/espidf/mod.rs":"816f235f4aa4baabba7f2606b31d0fdb03988c52194c966728de8690bf17299d","src/unix/newlib/generic.rs":"eab066d9f0a0f3eb53cc1073d01496bba0110989e1f6a59838afd19f870cd599","src/unix/newlib/horizon/mod.rs":"7cc5cc120437421db139bfa6a90b18168cd3070bdd0f5be96d40fe4c996f3ca1","src/unix/newlib/mod.rs":"47d853beaa42b4aa17532d7687715e00e1da6632fccbcdcb079382dbde5412ff","src/unix/newlib/no_align.rs":"e0743b2179495a9514bc3a4d1781e492878c4ec834ee0085d0891dd1712e82fb","src/unix/newlib/powerpc/mod.rs":"0202ffd57caf75b6afa2c9717750ffb96e375ac33df0ae9609a3f831be393b67","src/unix/no_align.rs":"c06e95373b9088266e0b14bba0954eef95f93fb2b01d951855e382d22de78e53","src/unix/nto/aarch64.rs":"4709c9afdc8d583be876598e7c238499ee3e8da5bd2baa614d9c7dd414851555","src/unix/nto/mod.rs":"a4ed355f33ab18041cf642e2f4d1a50dc96b5bce778715f04352c34d020961b5","src/unix/nto/neutrino.rs":"62198d95ccc0fe7ece6f9d5c0b29fc22303ef458886efb5e09aad524eca2ab7b","src/unix/nto/x86_64.rs":"a3e18e93c2999da1cd7a6f748a4b60c07aefb73d8ea2aafec19a84cfb040bc8e","src/unix/redox/mod.rs":"c8afea39f81e73ad4e9bf8a8f25138a200e616048e189debad80a6b7e08f710f","src/unix/solarish/compat.rs":"00f1ee3faec9da69204e42f025f6735dd13d894071a154425dcc43ecbdd06e7f","src/unix/solarish/illumos.rs":"cd93c2d84722bbf9933a92842a8998eb0b2afc962f50bc2546ad127b82809fa7","src/unix/solarish/mod.rs":"b1c0a1d347b30d45c85429b7236d234f5b2c86b9eec439e897c9371d856c187a","src/unix/solarish/solaris.rs":"41b350a89ddf01cd12a10f93640f92be53be0b0d976021cdc08da17bf3e72edf","src/unix/solarish/x86.rs":"e86e806df0caed72765040eaa2f3c883198d1aa91508540adf9b7008c77f522e","src/unix/solarish/x86_64.rs":"ec2b01f194eb8a6a27133c57681da195a949e03098f3ea1e847227a9c09ef5fc","src/unix/solarish/x86_common.rs":"ac869d9c3c95645c22460468391eb1982023c3a8e02b9e06a72e3aef3d5f1eac","src/vxworks/aarch64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/arm.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/mod.rs":"ff063860fc4fe3bca6332742764e29c1506884added21aa7722144c1fb3f4018","src/vxworks/powerpc.rs":"acb7968ce99fe3f4abdf39d98f8133d21a4fba435b8ef7084777cb181d788e88","src/vxworks/powerpc64.rs":"98f0afdc511cd02557e506c21fed6737585490a1dce7a9d4941d08c437762b99","src/vxworks/x86.rs":"552f007f38317620b23889cb7c49d1d115841252439060122f52f434fbc6e5ba","src/vxworks/x86_64.rs":"018d92be3ad628a129eff9f2f5dfbc0883d8b8e5f2fa917b900a7f98ed6b514a","src/wasi.rs":"b9a2ecc75fd2359a2484e93fdd1c539d2abc109cd9455d8f566476bcfb19df3d","src/windows/gnu/align.rs":"b2c13ec1b9f3b39a75c452c80c951dff9d0215e31d77e883b4502afb31794647","src/windows/gnu/mod.rs":"3c8c7edb7cdf5d0c44af936db2a94869585c69dfabeef30571b4f4e38375767a","src/windows/mod.rs":"5421b92f47cb69845f4323b1a14e533d8a17acc6412c58e67f4d41def749de2f","src/windows/msvc/mod.rs":"c068271e00fca6b62bc4bf44bcf142cfc38caeded9b6c4e01d1ceef3ccf986f4","tests/const_fn.rs":"cb75a1f0864f926aebe79118fc34d51a0d1ade2c20a394e7774c7e545f21f1f4"},"package":"201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"} \ No newline at end of file
diff --git a/vendor/libc/Cargo.toml b/vendor/libc/Cargo.toml
index 5c6208353..60e7b1674 100644
--- a/vendor/libc/Cargo.toml
+++ b/vendor/libc/Cargo.toml
@@ -11,7 +11,7 @@
[package]
name = "libc"
-version = "0.2.138"
+version = "0.2.139"
authors = ["The Rust Project Developers"]
build = "build.rs"
exclude = [
diff --git a/vendor/libc/src/fuchsia/mod.rs b/vendor/libc/src/fuchsia/mod.rs
index 7a9edada1..5c6aebde2 100644
--- a/vendor/libc/src/fuchsia/mod.rs
+++ b/vendor/libc/src/fuchsia/mod.rs
@@ -3402,11 +3402,16 @@ extern "C" {
pub fn feof(stream: *mut FILE) -> c_int;
pub fn ferror(stream: *mut FILE) -> c_int;
pub fn perror(s: *const c_char);
+ pub fn atof(s: *const c_char) -> c_double;
pub fn atoi(s: *const c_char) -> c_int;
+ pub fn atol(s: *const c_char) -> c_long;
+ pub fn atoll(s: *const c_char) -> c_longlong;
pub fn strtod(s: *const c_char, endp: *mut *mut c_char) -> c_double;
pub fn strtof(s: *const c_char, endp: *mut *mut c_char) -> c_float;
pub fn strtol(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_long;
+ pub fn strtoll(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_longlong;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulong;
+ pub fn strtoull(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulonglong;
pub fn calloc(nobj: size_t, size: size_t) -> *mut c_void;
pub fn malloc(size: size_t) -> *mut c_void;
pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void;
@@ -3448,7 +3453,6 @@ extern "C" {
pub fn memset(dest: *mut c_void, c: c_int, n: size_t) -> *mut c_void;
pub fn abs(i: c_int) -> c_int;
- pub fn atof(s: *const c_char) -> c_double;
pub fn labs(i: c_long) -> c_long;
pub fn rand() -> c_int;
pub fn srand(seed: c_uint);
diff --git a/vendor/libc/src/unix/bsd/mod.rs b/vendor/libc/src/unix/bsd/mod.rs
index d49e3c440..84e572eda 100644
--- a/vendor/libc/src/unix/bsd/mod.rs
+++ b/vendor/libc/src/unix/bsd/mod.rs
@@ -613,7 +613,6 @@ extern "C" {
pub fn strerror_r(errnum: ::c_int, buf: *mut c_char, buflen: ::size_t) -> ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
#[cfg_attr(
all(target_os = "freebsd", any(freebsd12, freebsd11, freebsd10)),
diff --git a/vendor/libc/src/unix/haiku/mod.rs b/vendor/libc/src/unix/haiku/mod.rs
index 005b1d9df..95ddadaee 100644
--- a/vendor/libc/src/unix/haiku/mod.rs
+++ b/vendor/libc/src/unix/haiku/mod.rs
@@ -1592,7 +1592,6 @@ extern "C" {
pub fn _errnop() -> *mut ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
pub fn srand(seed: ::c_uint);
diff --git a/vendor/libc/src/unix/hermit/mod.rs b/vendor/libc/src/unix/hermit/mod.rs
index eedfd28a4..6a656a859 100644
--- a/vendor/libc/src/unix/hermit/mod.rs
+++ b/vendor/libc/src/unix/hermit/mod.rs
@@ -966,7 +966,6 @@ extern "C" {
pub fn sem_init(sem: *mut sem_t, pshared: ::c_int, value: ::c_uint) -> ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
pub fn srand(seed: ::c_uint);
diff --git a/vendor/libc/src/unix/linux_like/android/b32/arm.rs b/vendor/libc/src/unix/linux_like/android/b32/arm.rs
index 33d2fa070..8b8e54955 100644
--- a/vendor/libc/src/unix/linux_like/android/b32/arm.rs
+++ b/vendor/libc/src/unix/linux_like/android/b32/arm.rs
@@ -501,6 +501,9 @@ pub const SYS_pwritev2: ::c_long = 393;
pub const SYS_pkey_mprotect: ::c_long = 394;
pub const SYS_pkey_alloc: ::c_long = 395;
pub const SYS_pkey_free: ::c_long = 396;
+pub const SYS_io_uring_setup: ::c_long = 425;
+pub const SYS_io_uring_enter: ::c_long = 426;
+pub const SYS_io_uring_register: ::c_long = 427;
// offsets in mcontext_t.gregs from sys/ucontext.h
pub const REG_R0: ::c_int = 0;
diff --git a/vendor/libc/src/unix/linux_like/android/b32/x86/mod.rs b/vendor/libc/src/unix/linux_like/android/b32/x86/mod.rs
index 6507cb4e0..9545ecbfb 100644
--- a/vendor/libc/src/unix/linux_like/android/b32/x86/mod.rs
+++ b/vendor/libc/src/unix/linux_like/android/b32/x86/mod.rs
@@ -533,6 +533,9 @@ pub const SYS_pwritev2: ::c_long = 379;
pub const SYS_pkey_mprotect: ::c_long = 380;
pub const SYS_pkey_alloc: ::c_long = 381;
pub const SYS_pkey_free: ::c_long = 382;
+pub const SYS_io_uring_setup: ::c_long = 425;
+pub const SYS_io_uring_enter: ::c_long = 426;
+pub const SYS_io_uring_register: ::c_long = 427;
// offsets in user_regs_structs, from sys/reg.h
pub const EBX: ::c_int = 0;
diff --git a/vendor/libc/src/unix/linux_like/android/b64/aarch64/mod.rs b/vendor/libc/src/unix/linux_like/android/b64/aarch64/mod.rs
index c4d442060..36871b084 100644
--- a/vendor/libc/src/unix/linux_like/android/b64/aarch64/mod.rs
+++ b/vendor/libc/src/unix/linux_like/android/b64/aarch64/mod.rs
@@ -374,6 +374,9 @@ pub const SYS_pwritev2: ::c_long = 287;
pub const SYS_pkey_mprotect: ::c_long = 288;
pub const SYS_pkey_alloc: ::c_long = 289;
pub const SYS_pkey_free: ::c_long = 290;
+pub const SYS_io_uring_setup: ::c_long = 425;
+pub const SYS_io_uring_enter: ::c_long = 426;
+pub const SYS_io_uring_register: ::c_long = 427;
pub const SYS_syscalls: ::c_long = 436;
cfg_if! {
diff --git a/vendor/libc/src/unix/linux_like/android/b64/x86_64/mod.rs b/vendor/libc/src/unix/linux_like/android/b64/x86_64/mod.rs
index d25b50775..1e3ee31c5 100644
--- a/vendor/libc/src/unix/linux_like/android/b64/x86_64/mod.rs
+++ b/vendor/libc/src/unix/linux_like/android/b64/x86_64/mod.rs
@@ -728,6 +728,9 @@ pub const SYS_pwritev2: ::c_long = 328;
pub const SYS_pkey_mprotect: ::c_long = 329;
pub const SYS_pkey_alloc: ::c_long = 330;
pub const SYS_pkey_free: ::c_long = 331;
+pub const SYS_io_uring_setup: ::c_long = 425;
+pub const SYS_io_uring_enter: ::c_long = 426;
+pub const SYS_io_uring_register: ::c_long = 427;
// offsets in user_regs_structs, from sys/reg.h
pub const R15: ::c_int = 0;
diff --git a/vendor/libc/src/unix/linux_like/android/mod.rs b/vendor/libc/src/unix/linux_like/android/mod.rs
index 61885582a..adec24a0a 100644
--- a/vendor/libc/src/unix/linux_like/android/mod.rs
+++ b/vendor/libc/src/unix/linux_like/android/mod.rs
@@ -2421,9 +2421,20 @@ pub const SND_CNT: usize = SND_MAX as usize + 1;
pub const UINPUT_VERSION: ::c_uint = 5;
pub const UINPUT_MAX_NAME_SIZE: usize = 80;
+// bionic/libc/kernel/uapi/linux/if_tun.h
pub const IFF_TUN: ::c_int = 0x0001;
pub const IFF_TAP: ::c_int = 0x0002;
+pub const IFF_NAPI: ::c_int = 0x0010;
+pub const IFF_NAPI_FRAGS: ::c_int = 0x0020;
pub const IFF_NO_PI: ::c_int = 0x1000;
+pub const IFF_ONE_QUEUE: ::c_int = 0x2000;
+pub const IFF_VNET_HDR: ::c_int = 0x4000;
+pub const IFF_TUN_EXCL: ::c_int = 0x8000;
+pub const IFF_MULTI_QUEUE: ::c_int = 0x0100;
+pub const IFF_ATTACH_QUEUE: ::c_int = 0x0200;
+pub const IFF_DETACH_QUEUE: ::c_int = 0x0400;
+pub const IFF_PERSIST: ::c_int = 0x0800;
+pub const IFF_NOFILTER: ::c_int = 0x1000;
// start android/platform/bionic/libc/kernel/uapi/linux/if_ether.h
// from https://android.googlesource.com/
@@ -2615,10 +2626,10 @@ pub const IN_Q_OVERFLOW: u32 = 0x0000_4000;
pub const IN_IGNORED: u32 = 0x0000_8000;
pub const IN_ONLYDIR: u32 = 0x0100_0000;
pub const IN_DONT_FOLLOW: u32 = 0x0200_0000;
-// pub const IN_EXCL_UNLINK: u32 = 0x0400_0000;
+pub const IN_EXCL_UNLINK: u32 = 0x0400_0000;
-// pub const IN_MASK_CREATE: u32 = 0x1000_0000;
-// pub const IN_MASK_ADD: u32 = 0x2000_0000;
+pub const IN_MASK_CREATE: u32 = 0x1000_0000;
+pub const IN_MASK_ADD: u32 = 0x2000_0000;
pub const IN_ISDIR: u32 = 0x4000_0000;
pub const IN_ONESHOT: u32 = 0x8000_0000;
@@ -2739,6 +2750,12 @@ pub const PF_VSOCK: ::c_int = AF_VSOCK;
pub const SOMAXCONN: ::c_int = 128;
+// sys/prctl.h
+pub const PR_SET_PDEATHSIG: ::c_int = 1;
+pub const PR_GET_PDEATHSIG: ::c_int = 2;
+pub const PR_GET_SECUREBITS: ::c_int = 27;
+pub const PR_SET_SECUREBITS: ::c_int = 28;
+
// sys/system_properties.h
pub const PROP_VALUE_MAX: ::c_int = 92;
pub const PROP_NAME_MAX: ::c_int = 32;
@@ -3446,6 +3463,10 @@ extern "C" {
pub fn gettid() -> ::pid_t;
+ pub fn getrandom(buf: *mut ::c_void, buflen: ::size_t, flags: ::c_uint) -> ::ssize_t;
+
+ pub fn pthread_setname_np(thread: ::pthread_t, name: *const ::c_char) -> ::c_int;
+
pub fn __system_property_set(__name: *const ::c_char, __value: *const ::c_char) -> ::c_int;
pub fn __system_property_get(__name: *const ::c_char, __value: *mut ::c_char) -> ::c_int;
pub fn __system_property_find(__name: *const ::c_char) -> *const prop_info;
diff --git a/vendor/libc/src/unix/linux_like/emscripten/mod.rs b/vendor/libc/src/unix/linux_like/emscripten/mod.rs
index 11fbb31c3..f2024900c 100644
--- a/vendor/libc/src/unix/linux_like/emscripten/mod.rs
+++ b/vendor/libc/src/unix/linux_like/emscripten/mod.rs
@@ -1763,7 +1763,6 @@ extern "C" {
pub fn strerror_r(errnum: ::c_int, buf: *mut c_char, buflen: ::size_t) -> ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
pub fn srand(seed: ::c_uint);
diff --git a/vendor/libc/src/unix/linux_like/linux/mod.rs b/vendor/libc/src/unix/linux_like/linux/mod.rs
index f0a0820c3..9658f0744 100644
--- a/vendor/libc/src/unix/linux_like/linux/mod.rs
+++ b/vendor/libc/src/unix/linux_like/linux/mod.rs
@@ -1583,6 +1583,10 @@ pub const AT_HWCAP2: ::c_ulong = 26;
pub const AT_EXECFN: ::c_ulong = 31;
+// defined in arch/<arch>/include/uapi/asm/auxvec.h but has the same value
+// wherever it is defined.
+pub const AT_SYSINFO_EHDR: ::c_ulong = 33;
+
pub const GLOB_ERR: ::c_int = 1 << 0;
pub const GLOB_MARK: ::c_int = 1 << 1;
pub const GLOB_NOSORT: ::c_int = 1 << 2;
@@ -3061,7 +3065,7 @@ pub const IN_Q_OVERFLOW: u32 = 0x0000_4000;
pub const IN_IGNORED: u32 = 0x0000_8000;
pub const IN_ONLYDIR: u32 = 0x0100_0000;
pub const IN_DONT_FOLLOW: u32 = 0x0200_0000;
-// pub const IN_EXCL_UNLINK: u32 = 0x0400_0000;
+pub const IN_EXCL_UNLINK: u32 = 0x0400_0000;
// linux/keyctl.h
pub const KEY_SPEC_THREAD_KEYRING: i32 = -1;
@@ -3107,8 +3111,8 @@ pub const KEYCTL_INSTANTIATE_IOV: u32 = 20;
pub const KEYCTL_INVALIDATE: u32 = 21;
pub const KEYCTL_GET_PERSISTENT: u32 = 22;
-// pub const IN_MASK_CREATE: u32 = 0x1000_0000;
-// pub const IN_MASK_ADD: u32 = 0x2000_0000;
+pub const IN_MASK_CREATE: u32 = 0x1000_0000;
+pub const IN_MASK_ADD: u32 = 0x2000_0000;
pub const IN_ISDIR: u32 = 0x4000_0000;
pub const IN_ONESHOT: u32 = 0x8000_0000;
@@ -3800,7 +3804,6 @@ extern "C" {
pub fn strerror_r(errnum: ::c_int, buf: *mut c_char, buflen: ::size_t) -> ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
pub fn srand(seed: ::c_uint);
diff --git a/vendor/libc/src/unix/linux_like/mod.rs b/vendor/libc/src/unix/linux_like/mod.rs
index 8e738d87b..e2e73b330 100644
--- a/vendor/libc/src/unix/linux_like/mod.rs
+++ b/vendor/libc/src/unix/linux_like/mod.rs
@@ -1615,6 +1615,14 @@ safe_f! {
pub {const} fn IPTOS_ECN(x: u8) -> u8 {
x & ::IPTOS_ECN_MASK
}
+
+ #[allow(ellipsis_inclusive_range_patterns)]
+ pub {const} fn KERNEL_VERSION(a: u32, b: u32, c: u32) -> u32 {
+ ((a << 16) + (b << 8)) + match c {
+ 0 ... 255 => c,
+ _ => 255,
+ }
+ }
}
extern "C" {
diff --git a/vendor/libc/src/unix/mod.rs b/vendor/libc/src/unix/mod.rs
index fb9ebf792..826b83518 100644
--- a/vendor/libc/src/unix/mod.rs
+++ b/vendor/libc/src/unix/mod.rs
@@ -32,6 +32,9 @@ cfg_if! {
if #[cfg(any(target_os = "espidf", target_os = "horizon"))] {
pub type uid_t = ::c_ushort;
pub type gid_t = ::c_ushort;
+ } else if #[cfg(target_os = "nto")] {
+ pub type uid_t = i32;
+ pub type gid_t = i32;
} else {
pub type uid_t = u32;
pub type gid_t = u32;
@@ -209,25 +212,31 @@ pub const INT_MAX: c_int = 2147483647;
pub const SIG_DFL: sighandler_t = 0 as sighandler_t;
pub const SIG_IGN: sighandler_t = 1 as sighandler_t;
pub const SIG_ERR: sighandler_t = !0 as sighandler_t;
-
-pub const DT_UNKNOWN: u8 = 0;
-pub const DT_FIFO: u8 = 1;
-pub const DT_CHR: u8 = 2;
-pub const DT_DIR: u8 = 4;
-pub const DT_BLK: u8 = 6;
-pub const DT_REG: u8 = 8;
-pub const DT_LNK: u8 = 10;
-pub const DT_SOCK: u8 = 12;
-
+cfg_if! {
+ if #[cfg(not(target_os = "nto"))] {
+ pub const DT_UNKNOWN: u8 = 0;
+ pub const DT_FIFO: u8 = 1;
+ pub const DT_CHR: u8 = 2;
+ pub const DT_DIR: u8 = 4;
+ pub const DT_BLK: u8 = 6;
+ pub const DT_REG: u8 = 8;
+ pub const DT_LNK: u8 = 10;
+ pub const DT_SOCK: u8 = 12;
+ }
+}
cfg_if! {
if #[cfg(not(target_os = "redox"))] {
pub const FD_CLOEXEC: ::c_int = 0x1;
}
}
-pub const USRQUOTA: ::c_int = 0;
-pub const GRPQUOTA: ::c_int = 1;
-
+cfg_if! {
+ if #[cfg(not(target_os = "nto"))]
+ {
+ pub const USRQUOTA: ::c_int = 0;
+ pub const GRPQUOTA: ::c_int = 1;
+ }
+}
pub const SIGIOT: ::c_int = 6;
pub const S_ISUID: ::mode_t = 0x800;
@@ -281,9 +290,13 @@ cfg_if! {
pub const LOG_PRIMASK: ::c_int = 7;
pub const LOG_FACMASK: ::c_int = 0x3f8;
-pub const PRIO_MIN: ::c_int = -20;
-pub const PRIO_MAX: ::c_int = 20;
-
+cfg_if! {
+ if #[cfg(not(target_os = "nto"))]
+ {
+ pub const PRIO_MIN: ::c_int = -20;
+ pub const PRIO_MAX: ::c_int = 20;
+ }
+}
pub const IPPROTO_ICMP: ::c_int = 1;
pub const IPPROTO_ICMPV6: ::c_int = 58;
pub const IPPROTO_TCP: ::c_int = 6;
@@ -361,7 +374,9 @@ cfg_if! {
target_os = "tvos",
target_os = "watchos",
target_os = "android",
- target_os = "openbsd"))] {
+ target_os = "openbsd",
+ target_os = "nto",
+ ))] {
#[link(name = "c")]
#[link(name = "m")]
extern {}
@@ -453,8 +468,6 @@ extern "C" {
link_name = "freopen$UNIX2003"
)]
pub fn freopen(filename: *const c_char, mode: *const c_char, file: *mut FILE) -> *mut FILE;
- pub fn fmemopen(buf: *mut c_void, size: size_t, mode: *const c_char) -> *mut FILE;
- pub fn open_memstream(ptr: *mut *mut c_char, sizeloc: *mut size_t) -> *mut FILE;
pub fn fflush(file: *mut FILE) -> c_int;
pub fn fclose(file: *mut FILE) -> c_int;
@@ -492,7 +505,10 @@ extern "C" {
pub fn ferror(stream: *mut FILE) -> c_int;
pub fn clearerr(stream: *mut FILE);
pub fn perror(s: *const c_char);
+ pub fn atof(s: *const c_char) -> c_double;
pub fn atoi(s: *const c_char) -> c_int;
+ pub fn atol(s: *const c_char) -> c_long;
+ pub fn atoll(s: *const c_char) -> c_longlong;
#[cfg_attr(
all(target_os = "macos", target_arch = "x86"),
link_name = "strtod$UNIX2003"
@@ -500,7 +516,9 @@ extern "C" {
pub fn strtod(s: *const c_char, endp: *mut *mut c_char) -> c_double;
pub fn strtof(s: *const c_char, endp: *mut *mut c_char) -> c_float;
pub fn strtol(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_long;
+ pub fn strtoll(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_longlong;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulong;
+ pub fn strtoull(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulonglong;
pub fn calloc(nobj: size_t, size: size_t) -> *mut c_void;
pub fn malloc(size: size_t) -> *mut c_void;
pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void;
@@ -508,7 +526,6 @@ extern "C" {
pub fn abort() -> !;
pub fn exit(status: c_int) -> !;
pub fn _exit(status: c_int) -> !;
- pub fn atexit(cb: extern "C" fn()) -> c_int;
#[cfg_attr(
all(target_os = "macos", target_arch = "x86"),
link_name = "system$UNIX2003"
@@ -1162,8 +1179,6 @@ extern "C" {
optlen: *mut ::socklen_t,
) -> ::c_int;
pub fn raise(signum: ::c_int) -> ::c_int;
- #[cfg_attr(target_os = "netbsd", link_name = "__sigaction14")]
- pub fn sigaction(signum: ::c_int, act: *const sigaction, oldact: *mut sigaction) -> ::c_int;
#[cfg_attr(target_os = "netbsd", link_name = "__utimes50")]
pub fn utimes(filename: *const ::c_char, times: *const ::timeval) -> ::c_int;
@@ -1325,8 +1340,6 @@ extern "C" {
pub fn statvfs(path: *const c_char, buf: *mut statvfs) -> ::c_int;
pub fn fstatvfs(fd: ::c_int, buf: *mut statvfs) -> ::c_int;
- pub fn readlink(path: *const c_char, buf: *mut c_char, bufsz: ::size_t) -> ::ssize_t;
-
#[cfg_attr(target_os = "netbsd", link_name = "__sigemptyset14")]
pub fn sigemptyset(set: *mut sigset_t) -> ::c_int;
#[cfg_attr(target_os = "netbsd", link_name = "__sigaddset14")]
@@ -1347,23 +1360,6 @@ extern "C" {
pub fn mkfifo(path: *const c_char, mode: mode_t) -> ::c_int;
- #[cfg_attr(
- all(target_os = "macos", target_arch = "x86_64"),
- link_name = "pselect$1050"
- )]
- #[cfg_attr(
- all(target_os = "macos", target_arch = "x86"),
- link_name = "pselect$UNIX2003"
- )]
- #[cfg_attr(target_os = "netbsd", link_name = "__pselect50")]
- pub fn pselect(
- nfds: ::c_int,
- readfds: *mut fd_set,
- writefds: *mut fd_set,
- errorfds: *mut fd_set,
- timeout: *const timespec,
- sigmask: *const sigset_t,
- ) -> ::c_int;
pub fn fseeko(stream: *mut ::FILE, offset: ::off_t, whence: ::c_int) -> ::c_int;
pub fn ftello(stream: *mut ::FILE) -> ::off_t;
#[cfg_attr(
@@ -1411,7 +1407,8 @@ extern "C" {
cfg_if! {
if #[cfg(not(any(target_os = "emscripten",
target_os = "android",
- target_os = "haiku")))] {
+ target_os = "haiku",
+ target_os = "nto")))] {
extern "C" {
pub fn adjtime(delta: *const timeval, olddelta: *mut timeval) -> ::c_int;
pub fn stpncpy(dst: *mut c_char, src: *const c_char, n: size_t) -> *mut c_char;
@@ -1420,7 +1417,7 @@ cfg_if! {
}
cfg_if! {
- if #[cfg(not(target_env = "uclibc"))] {
+ if #[cfg(not(any(target_env = "uclibc", target_os = "nto")))] {
extern "C" {
pub fn open_wmemstream(
ptr: *mut *mut wchar_t,
@@ -1439,12 +1436,8 @@ cfg_if! {
link_name = "pause$UNIX2003")]
pub fn pause() -> ::c_int;
- pub fn readlinkat(dirfd: ::c_int,
- pathname: *const ::c_char,
- buf: *mut ::c_char,
- bufsiz: ::size_t) -> ::ssize_t;
pub fn mkdirat(dirfd: ::c_int, pathname: *const ::c_char,
- mode: ::mode_t) -> ::c_int;
+ mode: ::mode_t) -> ::c_int;
pub fn openat(dirfd: ::c_int, pathname: *const ::c_char,
flags: ::c_int, ...) -> ::c_int;
@@ -1475,7 +1468,64 @@ cfg_if! {
}
cfg_if! {
- if #[cfg(not(any(target_os = "solaris", target_os = "illumos")))] {
+ if #[cfg(target_os = "nto")] {
+ extern {
+ pub fn readlinkat(dirfd: ::c_int,
+ pathname: *const ::c_char,
+ buf: *mut ::c_char,
+ bufsiz: ::size_t) -> ::c_int;
+ pub fn readlink(path: *const c_char, buf: *mut c_char, bufsz: ::size_t) -> ::c_int;
+ pub fn pselect(
+ nfds: ::c_int,
+ readfds: *mut fd_set,
+ writefds: *mut fd_set,
+ errorfds: *mut fd_set,
+ timeout: *mut timespec,
+ sigmask: *const sigset_t,
+ ) -> ::c_int;
+ }
+ } else {
+ extern {
+ pub fn readlinkat(dirfd: ::c_int,
+ pathname: *const ::c_char,
+ buf: *mut ::c_char,
+ bufsiz: ::size_t) -> ::ssize_t;
+ pub fn fmemopen(buf: *mut c_void, size: size_t, mode: *const c_char) -> *mut FILE;
+ pub fn open_memstream(ptr: *mut *mut c_char, sizeloc: *mut size_t) -> *mut FILE;
+ pub fn atexit(cb: extern "C" fn()) -> c_int;
+ #[cfg_attr(target_os = "netbsd", link_name = "__sigaction14")]
+ pub fn sigaction(
+ signum: ::c_int,
+ act: *const sigaction,
+ oldact: *mut sigaction
+ ) -> ::c_int;
+ pub fn readlink(path: *const c_char, buf: *mut c_char, bufsz: ::size_t) -> ::ssize_t;
+ #[cfg_attr(
+ all(target_os = "macos", target_arch = "x86_64"),
+ link_name = "pselect$1050"
+ )]
+ #[cfg_attr(
+ all(target_os = "macos", target_arch = "x86"),
+ link_name = "pselect$UNIX2003"
+ )]
+ #[cfg_attr(target_os = "netbsd", link_name = "__pselect50")]
+ pub fn pselect(
+ nfds: ::c_int,
+ readfds: *mut fd_set,
+ writefds: *mut fd_set,
+ errorfds: *mut fd_set,
+ timeout: *const timespec,
+ sigmask: *const sigset_t,
+ ) -> ::c_int;
+ }
+ }
+}
+
+cfg_if! {
+ if #[cfg(not(any(target_os = "solaris",
+ target_os = "illumos",
+ target_os = "nto",
+ )))] {
extern {
pub fn cfmakeraw(termios: *mut ::termios);
pub fn cfsetspeed(termios: *mut ::termios,
@@ -1517,6 +1567,9 @@ cfg_if! {
} else if #[cfg(target_os = "redox")] {
mod redox;
pub use self::redox::*;
+ } else if #[cfg(target_os = "nto")] {
+ mod nto;
+ pub use self::nto::*;
} else {
// Unknown target_os
}
diff --git a/vendor/libc/src/unix/newlib/mod.rs b/vendor/libc/src/unix/newlib/mod.rs
index 1a694691a..3875f1cb4 100644
--- a/vendor/libc/src/unix/newlib/mod.rs
+++ b/vendor/libc/src/unix/newlib/mod.rs
@@ -621,7 +621,6 @@ extern "C" {
pub fn sem_init(sem: *mut sem_t, pshared: ::c_int, value: ::c_uint) -> ::c_int;
pub fn abs(i: ::c_int) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
pub fn srand(seed: ::c_uint);
diff --git a/vendor/libc/src/unix/nto/aarch64.rs b/vendor/libc/src/unix/nto/aarch64.rs
new file mode 100644
index 000000000..6faf8159c
--- /dev/null
+++ b/vendor/libc/src/unix/nto/aarch64.rs
@@ -0,0 +1,36 @@
+pub type c_char = u8;
+pub type wchar_t = u32;
+pub type c_long = i64;
+pub type c_ulong = u64;
+pub type time_t = i64;
+
+s! {
+ pub struct aarch64_qreg_t {
+ pub qlo: u64,
+ pub qhi: u64,
+ }
+
+ pub struct aarch64_fpu_registers {
+ pub reg: [::aarch64_qreg_t; 32],
+ pub fpsr: u32,
+ pub fpcr: u32,
+ }
+
+ pub struct aarch64_cpu_registers {
+ pub gpr: [u64; 32],
+ pub elr: u64,
+ pub pstate: u64,
+ }
+
+ #[repr(align(16))]
+ pub struct mcontext_t {
+ pub cpu: ::aarch64_cpu_registers,
+ pub fpu: ::aarch64_fpu_registers,
+ }
+
+ pub struct stack_t {
+ pub ss_sp: *mut ::c_void,
+ pub ss_size: ::size_t,
+ pub ss_flags: ::c_int,
+ }
+}
diff --git a/vendor/libc/src/unix/nto/mod.rs b/vendor/libc/src/unix/nto/mod.rs
new file mode 100644
index 000000000..45e97ceac
--- /dev/null
+++ b/vendor/libc/src/unix/nto/mod.rs
@@ -0,0 +1,3286 @@
+pub type clock_t = u32;
+
+pub type sa_family_t = u8;
+pub type speed_t = ::c_uint;
+pub type tcflag_t = ::c_uint;
+pub type clockid_t = ::c_int;
+pub type timer_t = ::c_int;
+pub type key_t = ::c_uint;
+pub type id_t = ::c_int;
+
+pub type useconds_t = u32;
+pub type dev_t = u32;
+pub type socklen_t = u32;
+pub type mode_t = u32;
+pub type rlim64_t = u64;
+pub type mqd_t = ::c_int;
+pub type nfds_t = ::c_uint;
+pub type idtype_t = ::c_uint;
+pub type errno_t = ::c_int;
+pub type rsize_t = c_ulong;
+
+pub type Elf32_Half = u16;
+pub type Elf32_Word = u32;
+pub type Elf32_Off = u32;
+pub type Elf32_Addr = u32;
+pub type Elf32_Lword = u64;
+pub type Elf32_Sword = i32;
+
+pub type Elf64_Half = u16;
+pub type Elf64_Word = u32;
+pub type Elf64_Off = u64;
+pub type Elf64_Addr = u64;
+pub type Elf64_Xword = u64;
+pub type Elf64_Sxword = i64;
+pub type Elf64_Lword = u64;
+pub type Elf64_Sword = i32;
+
+pub type Elf32_Section = u16;
+pub type Elf64_Section = u16;
+
+pub type _Time32t = u32;
+
+pub type pthread_t = ::c_int;
+pub type regoff_t = ::ssize_t;
+
+pub type nlink_t = u32;
+pub type blksize_t = u32;
+pub type suseconds_t = i32;
+
+pub type ino_t = u64;
+pub type off_t = i64;
+pub type blkcnt_t = u64;
+pub type msgqnum_t = u64;
+pub type msglen_t = u64;
+pub type fsblkcnt_t = u64;
+pub type fsfilcnt_t = u64;
+pub type rlim_t = u64;
+pub type posix_spawn_file_actions_t = *mut ::c_void;
+pub type posix_spawnattr_t = ::uintptr_t;
+
+pub type pthread_mutex_t = ::sync_t;
+pub type pthread_mutexattr_t = ::_sync_attr;
+pub type pthread_cond_t = ::sync_t;
+pub type pthread_condattr_t = ::_sync_attr;
+pub type pthread_rwlockattr_t = ::_sync_attr;
+pub type pthread_key_t = ::c_int;
+pub type pthread_spinlock_t = sync_t;
+pub type pthread_barrierattr_t = _sync_attr;
+pub type sem_t = sync_t;
+
+pub type nl_item = ::c_int;
+
+#[cfg_attr(feature = "extra_traits", derive(Debug))]
+pub enum timezone {}
+impl ::Copy for timezone {}
+impl ::Clone for timezone {
+ fn clone(&self) -> timezone {
+ *self
+ }
+}
+
+s! {
+ pub struct ip_mreq {
+ pub imr_multiaddr: in_addr,
+ pub imr_interface: in_addr,
+ }
+
+ #[repr(packed)]
+ pub struct in_addr {
+ pub s_addr: ::in_addr_t,
+ }
+
+ pub struct sockaddr {
+ pub sa_len: u8,
+ pub sa_family: sa_family_t,
+ pub sa_data: [::c_char; 14],
+ }
+
+ pub struct sockaddr_in {
+ pub sin_len: u8,
+ pub sin_family: sa_family_t,
+ pub sin_port: ::in_port_t,
+ pub sin_addr: ::in_addr,
+ pub sin_zero: [i8; 8],
+ }
+
+ pub struct sockaddr_in6 {
+ pub sin6_len: u8,
+ pub sin6_family: sa_family_t,
+ pub sin6_port: ::in_port_t,
+ pub sin6_flowinfo: u32,
+ pub sin6_addr: ::in6_addr,
+ pub sin6_scope_id: u32,
+ }
+
+ // The order of the `ai_addr` field in this struct is crucial
+ // for converting between the Rust and C types.
+ pub struct addrinfo {
+ pub ai_flags: ::c_int,
+ pub ai_family: ::c_int,
+ pub ai_socktype: ::c_int,
+ pub ai_protocol: ::c_int,
+ pub ai_addrlen: socklen_t,
+ pub ai_canonname: *mut c_char,
+ pub ai_addr: *mut ::sockaddr,
+ pub ai_next: *mut addrinfo,
+ }
+
+ pub struct fd_set {
+ fds_bits: [::c_uint; 2 * FD_SETSIZE / ULONG_SIZE],
+ }
+
+ pub struct tm {
+ pub tm_sec: ::c_int,
+ pub tm_min: ::c_int,
+ pub tm_hour: ::c_int,
+ pub tm_mday: ::c_int,
+ pub tm_mon: ::c_int,
+ pub tm_year: ::c_int,
+ pub tm_wday: ::c_int,
+ pub tm_yday: ::c_int,
+ pub tm_isdst: ::c_int,
+ pub tm_gmtoff: ::c_long,
+ pub tm_zone: *const ::c_char,
+ }
+
+ #[repr(align(8))]
+ pub struct sched_param {
+ pub sched_priority: ::c_int,
+ pub sched_curpriority: ::c_int,
+ pub reserved: [::c_int; 10],
+ }
+
+ #[repr(align(8))]
+ pub struct __sched_param {
+ pub __sched_priority: ::c_int,
+ pub __sched_curpriority: ::c_int,
+ pub reserved: [::c_int; 10],
+ }
+
+ pub struct Dl_info {
+ pub dli_fname: *const ::c_char,
+ pub dli_fbase: *mut ::c_void,
+ pub dli_sname: *const ::c_char,
+ pub dli_saddr: *mut ::c_void,
+ }
+
+ pub struct lconv {
+ pub currency_symbol: *mut ::c_char,
+ pub int_curr_symbol: *mut ::c_char,
+ pub mon_decimal_point: *mut ::c_char,
+ pub mon_grouping: *mut ::c_char,
+ pub mon_thousands_sep: *mut ::c_char,
+ pub negative_sign: *mut ::c_char,
+ pub positive_sign: *mut ::c_char,
+ pub frac_digits: ::c_char,
+ pub int_frac_digits: ::c_char,
+ pub n_cs_precedes: ::c_char,
+ pub n_sep_by_space: ::c_char,
+ pub n_sign_posn: ::c_char,
+ pub p_cs_precedes: ::c_char,
+ pub p_sep_by_space: ::c_char,
+ pub p_sign_posn: ::c_char,
+
+ pub int_n_cs_precedes: ::c_char,
+ pub int_n_sep_by_space: ::c_char,
+ pub int_n_sign_posn: ::c_char,
+ pub int_p_cs_precedes: ::c_char,
+ pub int_p_sep_by_space: ::c_char,
+ pub int_p_sign_posn: ::c_char,
+
+ pub decimal_point: *mut ::c_char,
+ pub grouping: *mut ::c_char,
+ pub thousands_sep: *mut ::c_char,
+
+ pub _Frac_grouping: *mut ::c_char,
+ pub _Frac_sep: *mut ::c_char,
+ pub _False: *mut ::c_char,
+ pub _True: *mut ::c_char,
+
+ pub _No: *mut ::c_char,
+ pub _Yes: *mut ::c_char,
+ pub _Nostr: *mut ::c_char,
+ pub _Yesstr: *mut ::c_char,
+ pub _Reserved: [*mut ::c_char; 8],
+ }
+
+ pub struct in_pktinfo {
+ pub ipi_addr: ::in_addr,
+ pub ipi_ifindex: ::c_uint,
+ }
+
+ pub struct ifaddrs {
+ pub ifa_next: *mut ifaddrs,
+ pub ifa_name: *mut c_char,
+ pub ifa_flags: ::c_uint,
+ pub ifa_addr: *mut ::sockaddr,
+ pub ifa_netmask: *mut ::sockaddr,
+ pub ifa_dstaddr: *mut ::sockaddr,
+ pub ifa_data: *mut ::c_void
+ }
+
+ pub struct arpreq {
+ pub arp_pa: ::sockaddr,
+ pub arp_ha: ::sockaddr,
+ pub arp_flags: ::c_int,
+ }
+
+ #[repr(packed)]
+ pub struct arphdr {
+ pub ar_hrd: u16,
+ pub ar_pro: u16,
+ pub ar_hln: u8,
+ pub ar_pln: u8,
+ pub ar_op: u16,
+ }
+
+ pub struct mmsghdr {
+ pub msg_hdr: ::msghdr,
+ pub msg_len: ::c_uint,
+ }
+
+ #[repr(align(8))]
+ pub struct siginfo_t {
+ pub si_signo: ::c_int,
+ pub si_code: ::c_int,
+ pub si_errno: ::c_int,
+ __data: [u8; 36], // union
+ }
+
+ pub struct sigaction {
+ pub sa_sigaction: ::sighandler_t,
+ pub sa_flags: ::c_int,
+ pub sa_mask: ::sigset_t,
+ }
+
+ pub struct _sync {
+ _union: ::c_uint,
+ __owner: ::c_uint,
+ }
+ pub struct rlimit64 {
+ pub rlim_cur: rlim64_t,
+ pub rlim_max: rlim64_t,
+ }
+
+ pub struct glob_t {
+ pub gl_pathc: ::size_t,
+ pub gl_matchc: ::c_int,
+ pub gl_pathv: *mut *mut c_char,
+ pub gl_offs: ::size_t,
+ pub gl_flags: ::c_int,
+ pub gl_errfunc: extern "C" fn(*const ::c_char, ::c_int) -> ::c_int,
+
+ __unused1: *mut ::c_void,
+ __unused2: *mut ::c_void,
+ __unused3: *mut ::c_void,
+ __unused4: *mut ::c_void,
+ __unused5: *mut ::c_void,
+ }
+
+ pub struct passwd {
+ pub pw_name: *mut ::c_char,
+ pub pw_passwd: *mut ::c_char,
+ pub pw_uid: ::uid_t,
+ pub pw_gid: ::gid_t,
+ pub pw_age: *mut ::c_char,
+ pub pw_comment: *mut ::c_char,
+ pub pw_gecos: *mut ::c_char,
+ pub pw_dir: *mut ::c_char,
+ pub pw_shell: *mut ::c_char,
+ }
+
+ pub struct if_nameindex {
+ pub if_index: ::c_uint,
+ pub if_name: *mut ::c_char,
+ }
+
+ pub struct sembuf {
+ pub sem_num: ::c_ushort,
+ pub sem_op: ::c_short,
+ pub sem_flg: ::c_short,
+ }
+
+ pub struct Elf32_Ehdr {
+ pub e_ident: [::c_uchar; 16],
+ pub e_type: Elf32_Half,
+ pub e_machine: Elf32_Half,
+ pub e_version: Elf32_Word,
+ pub e_entry: Elf32_Addr,
+ pub e_phoff: Elf32_Off,
+ pub e_shoff: Elf32_Off,
+ pub e_flags: Elf32_Word,
+ pub e_ehsize: Elf32_Half,
+ pub e_phentsize: Elf32_Half,
+ pub e_phnum: Elf32_Half,
+ pub e_shentsize: Elf32_Half,
+ pub e_shnum: Elf32_Half,
+ pub e_shstrndx: Elf32_Half,
+ }
+
+ pub struct Elf64_Ehdr {
+ pub e_ident: [::c_uchar; 16],
+ pub e_type: Elf64_Half,
+ pub e_machine: Elf64_Half,
+ pub e_version: Elf64_Word,
+ pub e_entry: Elf64_Addr,
+ pub e_phoff: Elf64_Off,
+ pub e_shoff: Elf64_Off,
+ pub e_flags: Elf64_Word,
+ pub e_ehsize: Elf64_Half,
+ pub e_phentsize: Elf64_Half,
+ pub e_phnum: Elf64_Half,
+ pub e_shentsize: Elf64_Half,
+ pub e_shnum: Elf64_Half,
+ pub e_shstrndx: Elf64_Half,
+ }
+
+ pub struct Elf32_Sym {
+ pub st_name: Elf32_Word,
+ pub st_value: Elf32_Addr,
+ pub st_size: Elf32_Word,
+ pub st_info: ::c_uchar,
+ pub st_other: ::c_uchar,
+ pub st_shndx: Elf32_Section,
+ }
+
+ pub struct Elf64_Sym {
+ pub st_name: Elf64_Word,
+ pub st_info: ::c_uchar,
+ pub st_other: ::c_uchar,
+ pub st_shndx: Elf64_Section,
+ pub st_value: Elf64_Addr,
+ pub st_size: Elf64_Xword,
+ }
+
+ pub struct Elf32_Phdr {
+ pub p_type: Elf32_Word,
+ pub p_offset: Elf32_Off,
+ pub p_vaddr: Elf32_Addr,
+ pub p_paddr: Elf32_Addr,
+ pub p_filesz: Elf32_Word,
+ pub p_memsz: Elf32_Word,
+ pub p_flags: Elf32_Word,
+ pub p_align: Elf32_Word,
+ }
+
+ pub struct Elf64_Phdr {
+ pub p_type: Elf64_Word,
+ pub p_flags: Elf64_Word,
+ pub p_offset: Elf64_Off,
+ pub p_vaddr: Elf64_Addr,
+ pub p_paddr: Elf64_Addr,
+ pub p_filesz: Elf64_Xword,
+ pub p_memsz: Elf64_Xword,
+ pub p_align: Elf64_Xword,
+ }
+
+ pub struct Elf32_Shdr {
+ pub sh_name: Elf32_Word,
+ pub sh_type: Elf32_Word,
+ pub sh_flags: Elf32_Word,
+ pub sh_addr: Elf32_Addr,
+ pub sh_offset: Elf32_Off,
+ pub sh_size: Elf32_Word,
+ pub sh_link: Elf32_Word,
+ pub sh_info: Elf32_Word,
+ pub sh_addralign: Elf32_Word,
+ pub sh_entsize: Elf32_Word,
+ }
+
+ pub struct Elf64_Shdr {
+ pub sh_name: Elf64_Word,
+ pub sh_type: Elf64_Word,
+ pub sh_flags: Elf64_Xword,
+ pub sh_addr: Elf64_Addr,
+ pub sh_offset: Elf64_Off,
+ pub sh_size: Elf64_Xword,
+ pub sh_link: Elf64_Word,
+ pub sh_info: Elf64_Word,
+ pub sh_addralign: Elf64_Xword,
+ pub sh_entsize: Elf64_Xword,
+ }
+
+ pub struct in6_pktinfo {
+ pub ipi6_addr: ::in6_addr,
+ pub ipi6_ifindex: ::c_uint,
+ }
+
+ pub struct inotify_event {
+ pub wd: ::c_int,
+ pub mask: u32,
+ pub cookie: u32,
+ pub len: u32
+ }
+
+ pub struct regmatch_t {
+ pub rm_so: regoff_t,
+ pub rm_eo: regoff_t,
+ }
+
+ pub struct msghdr {
+ pub msg_name: *mut ::c_void,
+ pub msg_namelen: ::socklen_t,
+ pub msg_iov: *mut ::iovec,
+ pub msg_iovlen: ::c_int,
+ pub msg_control: *mut ::c_void,
+ pub msg_controllen: ::socklen_t,
+ pub msg_flags: ::c_int,
+ }
+
+ pub struct cmsghdr {
+ pub cmsg_len: ::socklen_t,
+ pub cmsg_level: ::c_int,
+ pub cmsg_type: ::c_int,
+ }
+
+ pub struct termios {
+ pub c_iflag: ::tcflag_t,
+ pub c_oflag: ::tcflag_t,
+ pub c_cflag: ::tcflag_t,
+ pub c_lflag: ::tcflag_t,
+ pub c_cc: [::cc_t; ::NCCS],
+ __reserved: [::c_uint; 3],
+ pub c_ispeed: ::speed_t,
+ pub c_ospeed: ::speed_t,
+ }
+
+ pub struct mallinfo {
+ pub arena: ::c_int,
+ pub ordblks: ::c_int,
+ pub smblks: ::c_int,
+ pub hblks: ::c_int,
+ pub hblkhd: ::c_int,
+ pub usmblks: ::c_int,
+ pub fsmblks: ::c_int,
+ pub uordblks: ::c_int,
+ pub fordblks: ::c_int,
+ pub keepcost: ::c_int,
+ }
+
+ pub struct flock {
+ pub l_type: i16,
+ pub l_whence: i16,
+ pub l_zero1: i32,
+ pub l_start: ::off_t,
+ pub l_len: ::off_t,
+ pub l_pid: ::pid_t,
+ pub l_sysid: u32,
+ }
+
+ pub struct statvfs {
+ pub f_bsize: ::c_ulong,
+ pub f_frsize: ::c_ulong,
+ pub f_blocks: ::fsblkcnt_t,
+ pub f_bfree: ::fsblkcnt_t,
+ pub f_bavail: ::fsblkcnt_t,
+ pub f_files: ::fsfilcnt_t,
+ pub f_ffree: ::fsfilcnt_t,
+ pub f_favail: ::fsfilcnt_t,
+ pub f_fsid: ::c_ulong,
+ pub f_basetype: [::c_char; 16],
+ pub f_flag: ::c_ulong,
+ pub f_namemax: ::c_ulong,
+ f_filler: [::c_uint; 21],
+ }
+
+ pub struct aiocb {
+ pub aio_fildes: ::c_int,
+ pub aio_reqprio: ::c_int,
+ pub aio_offset: off_t,
+ pub aio_buf: *mut ::c_void,
+ pub aio_nbytes: ::size_t,
+ pub aio_sigevent: ::sigevent,
+ pub aio_lio_opcode: ::c_int,
+ pub _aio_lio_state: *mut ::c_void,
+ _aio_pad: [::c_int; 3],
+ pub _aio_next: *mut ::aiocb,
+ pub _aio_flag: ::c_uint,
+ pub _aio_iotype: ::c_uint,
+ pub _aio_result: ::ssize_t,
+ pub _aio_error: ::c_uint,
+ pub _aio_suspend: *mut ::c_void,
+ pub _aio_plist: *mut ::c_void,
+ pub _aio_policy: ::c_int,
+ pub _aio_param: ::__sched_param,
+ }
+
+ pub struct pthread_attr_t {
+ __data1: ::c_long,
+ __data2: [u8; 96]
+ }
+
+ pub struct ipc_perm {
+ pub uid: ::uid_t,
+ pub gid: ::gid_t,
+ pub cuid: ::uid_t,
+ pub cgid: ::gid_t,
+ pub mode: ::mode_t,
+ pub seq: ::c_uint,
+ pub key: ::key_t,
+ _reserved: [::c_int; 4],
+ }
+
+ pub struct regex_t {
+ re_magic: ::c_int,
+ re_nsub: ::size_t,
+ re_endp: *const ::c_char,
+ re_g: *mut ::c_void,
+ }
+
+ pub struct _thread_attr {
+ pub __flags: ::c_int,
+ pub __stacksize: ::size_t,
+ pub __stackaddr: *mut ::c_void,
+ pub __exitfunc: ::Option<unsafe extern "C" fn(_fake: *mut ::c_void)>,
+ pub __policy: ::c_int,
+ pub __param: ::__sched_param,
+ pub __guardsize: ::c_uint,
+ pub __prealloc: ::c_uint,
+ __spare: [::c_int; 2],
+ }
+
+ pub struct _sync_attr {
+ pub __protocol: ::c_int,
+ pub __flags: ::c_int,
+ pub __prioceiling: ::c_int,
+ pub __clockid: ::c_int,
+ pub __count: ::c_int,
+ __reserved: [::c_int; 3],
+ }
+
+ pub struct sockcred {
+ pub sc_uid: ::uid_t,
+ pub sc_euid: ::uid_t,
+ pub sc_gid: ::gid_t,
+ pub sc_egid: ::gid_t,
+ pub sc_ngroups: ::c_int,
+ pub sc_groups: [::gid_t; 1],
+ }
+
+ pub struct bpf_program {
+ pub bf_len: ::c_uint,
+ pub bf_insns: *mut ::bpf_insn,
+ }
+
+ pub struct bpf_stat {
+ pub bs_recv: u64,
+ pub bs_drop: u64,
+ pub bs_capt: u64,
+ bs_padding: [u64; 13],
+ }
+
+ pub struct bpf_version {
+ pub bv_major: ::c_ushort,
+ pub bv_minor: ::c_ushort,
+ }
+
+ pub struct bpf_hdr {
+ pub bh_tstamp: ::timeval,
+ pub bh_caplen: u32,
+ pub bh_datalen: u32,
+ pub bh_hdrlen: u16,
+ }
+
+ pub struct bpf_insn {
+ pub code: u16,
+ pub jt: ::c_uchar,
+ pub jf: ::c_uchar,
+ pub k: u32,
+ }
+
+ pub struct bpf_dltlist {
+ pub bfl_len: ::c_uint,
+ pub bfl_list: *mut ::c_uint,
+ }
+
+ pub struct unpcbid {
+ pub unp_pid: ::pid_t,
+ pub unp_euid: ::uid_t,
+ pub unp_egid: ::gid_t,
+ }
+
+ pub struct dl_phdr_info {
+ pub dlpi_addr: ::Elf64_Addr,
+ pub dlpi_name: *const ::c_char,
+ pub dlpi_phdr: *const ::Elf64_Phdr,
+ pub dlpi_phnum: ::Elf64_Half,
+ }
+
+ #[repr(align(8))]
+ pub struct ucontext_t {
+ pub uc_link: *mut ucontext_t,
+ pub uc_sigmask: ::sigset_t,
+ pub uc_stack: stack_t,
+ pub uc_mcontext: mcontext_t,
+ }
+}
+
+s_no_extra_traits! {
+ pub struct sockaddr_un {
+ pub sun_len: u8,
+ pub sun_family: sa_family_t,
+ pub sun_path: [::c_char; 104]
+ }
+
+ pub struct sockaddr_storage {
+ pub ss_len: u8,
+ pub ss_family: sa_family_t,
+ __ss_pad1: [::c_char; 6],
+ __ss_align: i64,
+ __ss_pad2: [::c_char; 112],
+ }
+
+ pub struct utsname {
+ pub sysname: [::c_char; _SYSNAME_SIZE],
+ pub nodename: [::c_char; _SYSNAME_SIZE],
+ pub release: [::c_char; _SYSNAME_SIZE],
+ pub version: [::c_char; _SYSNAME_SIZE],
+ pub machine: [::c_char; _SYSNAME_SIZE],
+ }
+
+ pub struct sigevent {
+ pub sigev_notify: ::c_int,
+ __sigev_un1: usize, // union
+ pub sigev_value: ::sigval,
+ __sigev_un2: usize, // union
+
+ }
+ pub struct dirent {
+ pub d_ino: ::ino_t,
+ pub d_offset: ::off_t,
+ pub d_reclen: ::c_short,
+ pub d_namelen: ::c_short,
+ pub d_name: [::c_char; 1], // flex array
+ }
+
+ pub struct dirent_extra {
+ pub d_datalen: u16,
+ pub d_type: u16,
+ pub d_reserved: u32,
+ }
+
+ pub struct stat {
+ pub st_ino: ::ino_t,
+ pub st_size: ::off_t,
+ pub st_dev: ::dev_t,
+ pub st_rdev: ::dev_t,
+ pub st_uid: ::uid_t,
+ pub st_gid: ::gid_t,
+ pub __old_st_mtime: ::_Time32t,
+ pub __old_st_atime: ::_Time32t,
+ pub __old_st_ctime: ::_Time32t,
+ pub st_mode: ::mode_t,
+ pub st_nlink: ::nlink_t,
+ pub st_blocksize: ::blksize_t,
+ pub st_nblocks: i32,
+ pub st_blksize: ::blksize_t,
+ pub st_blocks: ::blkcnt_t,
+ pub st_mtim: ::timespec,
+ pub st_atim: ::timespec,
+ pub st_ctim: ::timespec,
+ }
+
+ pub struct sigset_t {
+ __val: [u32; 2],
+ }
+
+ pub struct mq_attr {
+ pub mq_maxmsg: ::c_long,
+ pub mq_msgsize: ::c_long,
+ pub mq_flags: ::c_long,
+ pub mq_curmsgs: ::c_long,
+ pub mq_sendwait: ::c_long,
+ pub mq_recvwait: ::c_long,
+ }
+
+ pub struct msg {
+ pub msg_next: *mut ::msg,
+ pub msg_type: ::c_long,
+ pub msg_ts: ::c_ushort,
+ pub msg_spot: ::c_short,
+ _pad: [u8; 4],
+ }
+
+ pub struct msqid_ds {
+ pub msg_perm: ::ipc_perm,
+ pub msg_first: *mut ::msg,
+ pub msg_last: *mut ::msg,
+ pub msg_cbytes: ::msglen_t,
+ pub msg_qnum: ::msgqnum_t,
+ pub msg_qbytes: ::msglen_t,
+ pub msg_lspid: ::pid_t,
+ pub msg_lrpid: ::pid_t,
+ pub msg_stime: ::time_t,
+ msg_pad1: ::c_long,
+ pub msg_rtime: ::time_t,
+ msg_pad2: ::c_long,
+ pub msg_ctime: ::time_t,
+ msg_pad3: ::c_long,
+ msg_pad4: [::c_long; 4],
+ }
+
+ pub struct sockaddr_dl {
+ pub sdl_len: ::c_uchar,
+ pub sdl_family: ::sa_family_t,
+ pub sdl_index: u16,
+ pub sdl_type: ::c_uchar,
+ pub sdl_nlen: ::c_uchar,
+ pub sdl_alen: ::c_uchar,
+ pub sdl_slen: ::c_uchar,
+ pub sdl_data: [::c_char; 12],
+ }
+
+ pub struct sync_t {
+ __u: ::c_uint, // union
+ pub __owner: ::c_uint,
+ }
+
+ #[repr(align(4))]
+ pub struct pthread_barrier_t { // union
+ __pad: [u8; 28], // union
+ }
+
+ pub struct pthread_rwlock_t {
+ pub __active: ::c_int,
+ pub __blockedwriters: ::c_int,
+ pub __blockedreaders: ::c_int,
+ pub __heavy: ::c_int,
+ pub __lock: ::pthread_mutex_t, // union
+ pub __rcond: ::pthread_cond_t, // union
+ pub __wcond: ::pthread_cond_t, // union
+ pub __owner: ::c_uint,
+ pub __spare: ::c_uint,
+ }
+}
+
+cfg_if! {
+ if #[cfg(feature = "extra_traits")] {
+ impl PartialEq for sockaddr_un {
+ fn eq(&self, other: &sockaddr_un) -> bool {
+ self.sun_len == other.sun_len
+ && self.sun_family == other.sun_family
+ && self
+ .sun_path
+ .iter()
+ .zip(other.sun_path.iter())
+ .all(|(a,b)| a == b)
+ }
+ }
+
+ impl Eq for sockaddr_un {}
+
+ impl ::fmt::Debug for sockaddr_un {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ f.debug_struct("sockaddr_un")
+ .field("sun_len", &self.sun_len)
+ .field("sun_family", &self.sun_family)
+ // FIXME: .field("sun_path", &self.sun_path)
+ .finish()
+ }
+ }
+
+ impl ::hash::Hash for sockaddr_un {
+ fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
+ self.sun_len.hash(state);
+ self.sun_family.hash(state);
+ self.sun_path.hash(state);
+ }
+ }
+
+ impl PartialEq for utsname {
+ fn eq(&self, other: &utsname) -> bool {
+ self.sysname
+ .iter()
+ .zip(other.sysname.iter())
+ .all(|(a,b)| a == b)
+ && self
+ .nodename
+ .iter()
+ .zip(other.nodename.iter())
+ .all(|(a,b)| a == b)
+ && self
+ .release
+ .iter()
+ .zip(other.release.iter())
+ .all(|(a,b)| a == b)
+ && self
+ .version
+ .iter()
+ .zip(other.version.iter())
+ .all(|(a,b)| a == b)
+ && self
+ .machine
+ .iter()
+ .zip(other.machine.iter())
+ .all(|(a,b)| a == b)
+ }
+ }
+
+ impl Eq for utsname {}
+
+ impl ::fmt::Debug for utsname {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ f.debug_struct("utsname")
+ // FIXME: .field("sysname", &self.sysname)
+ // FIXME: .field("nodename", &self.nodename)
+ // FIXME: .field("release", &self.release)
+ // FIXME: .field("version", &self.version)
+ // FIXME: .field("machine", &self.machine)
+ .finish()
+ }
+ }
+
+ impl ::hash::Hash for utsname {
+ fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
+ self.sysname.hash(state);
+ self.nodename.hash(state);
+ self.release.hash(state);
+ self.version.hash(state);
+ self.machine.hash(state);
+ }
+ }
+
+ impl PartialEq for mq_attr {
+ fn eq(&self, other: &mq_attr) -> bool {
+ self.mq_maxmsg == other.mq_maxmsg &&
+ self.mq_msgsize == other.mq_msgsize &&
+ self.mq_flags == other.mq_flags &&
+ self.mq_curmsgs == other.mq_curmsgs &&
+ self.mq_msgsize == other.mq_msgsize &&
+ self.mq_sendwait == other.mq_sendwait &&
+ self.mq_recvwait == other.mq_recvwait
+ }
+ }
+
+ impl Eq for mq_attr {}
+
+ impl ::fmt::Debug for mq_attr {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ f.debug_struct("mq_attr")
+ .field("mq_maxmsg", &self.mq_maxmsg)
+ .field("mq_msgsize", &self.mq_msgsize)
+ .field("mq_flags", &self.mq_flags)
+ .field("mq_curmsgs", &self.mq_curmsgs)
+ .field("mq_msgsize", &self.mq_msgsize)
+ .field("mq_sendwait", &self.mq_sendwait)
+ .field("mq_recvwait", &self.mq_recvwait)
+ .finish()
+ }
+ }
+
+ impl PartialEq for sockaddr_storage {
+ fn eq(&self, other: &sockaddr_storage) -> bool {
+ self.ss_len == other.ss_len
+ && self.ss_family == other.ss_family
+ && self.__ss_pad1 == other.__ss_pad1
+ && self.__ss_align == other.__ss_align
+ && self
+ .__ss_pad2
+ .iter()
+ .zip(other.__ss_pad2.iter())
+ .all(|(a, b)| a == b)
+ }
+ }
+
+ impl Eq for sockaddr_storage {}
+
+ impl ::fmt::Debug for sockaddr_storage {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ f.debug_struct("sockaddr_storage")
+ .field("ss_len", &self.ss_len)
+ .field("ss_family", &self.ss_family)
+ .field("__ss_pad1", &self.__ss_pad1)
+ .field("__ss_align", &self.__ss_align)
+ // FIXME: .field("__ss_pad2", &self.__ss_pad2)
+ .finish()
+ }
+ }
+
+ impl ::hash::Hash for sockaddr_storage {
+ fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
+ self.ss_len.hash(state);
+ self.ss_family.hash(state);
+ self.__ss_pad1.hash(state);
+ self.__ss_align.hash(state);
+ self.__ss_pad2.hash(state);
+ }
+ }
+
+ impl PartialEq for dirent {
+ fn eq(&self, other: &dirent) -> bool {
+ self.d_ino == other.d_ino
+ && self.d_offset == other.d_offset
+ && self.d_reclen == other.d_reclen
+ && self.d_namelen == other.d_namelen
+ && self
+ .d_name[..self.d_namelen as _]
+ .iter()
+ .zip(other.d_name.iter())
+ .all(|(a,b)| a == b)
+ }
+ }
+
+ impl Eq for dirent {}
+
+ impl ::fmt::Debug for dirent {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ f.debug_struct("dirent")
+ .field("d_ino", &self.d_ino)
+ .field("d_offset", &self.d_offset)
+ .field("d_reclen", &self.d_reclen)
+ .field("d_namelen", &self.d_namelen)
+ .field("d_name", &&self.d_name[..self.d_namelen as _])
+ .finish()
+ }
+ }
+
+ impl ::hash::Hash for dirent {
+ fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
+ self.d_ino.hash(state);
+ self.d_offset.hash(state);
+ self.d_reclen.hash(state);
+ self.d_namelen.hash(state);
+ self.d_name[..self.d_namelen as _].hash(state);
+ }
+ }
+ }
+}
+
+pub const _SYSNAME_SIZE: usize = 256 + 1;
+pub const RLIM_INFINITY: ::rlim_t = 0xfffffffffffffffd;
+pub const O_LARGEFILE: ::c_int = 0o0100000;
+
+// intentionally not public, only used for fd_set
+cfg_if! {
+ if #[cfg(target_pointer_width = "32")] {
+ const ULONG_SIZE: usize = 32;
+ } else if #[cfg(target_pointer_width = "64")] {
+ const ULONG_SIZE: usize = 64;
+ } else {
+ // Unknown target_pointer_width
+ }
+}
+
+pub const EXIT_FAILURE: ::c_int = 1;
+pub const EXIT_SUCCESS: ::c_int = 0;
+pub const RAND_MAX: ::c_int = 32767;
+pub const EOF: ::c_int = -1;
+pub const SEEK_SET: ::c_int = 0;
+pub const SEEK_CUR: ::c_int = 1;
+pub const SEEK_END: ::c_int = 2;
+pub const _IOFBF: ::c_int = 0;
+pub const _IONBF: ::c_int = 2;
+pub const _IOLBF: ::c_int = 1;
+
+pub const F_DUPFD: ::c_int = 0;
+pub const F_GETFD: ::c_int = 1;
+pub const F_SETFD: ::c_int = 2;
+pub const F_GETFL: ::c_int = 3;
+pub const F_SETFL: ::c_int = 4;
+
+pub const F_DUPFD_CLOEXEC: ::c_int = 5;
+
+pub const SIGTRAP: ::c_int = 5;
+
+pub const CLOCK_REALTIME: ::clockid_t = 0;
+pub const CLOCK_MONOTONIC: ::clockid_t = 2;
+pub const CLOCK_PROCESS_CPUTIME_ID: ::clockid_t = 3;
+pub const CLOCK_THREAD_CPUTIME_ID: ::clockid_t = 4;
+pub const TIMER_ABSTIME: ::c_uint = 0x80000000;
+
+pub const RUSAGE_SELF: ::c_int = 0;
+
+pub const F_OK: ::c_int = 0;
+pub const X_OK: ::c_int = 1;
+pub const W_OK: ::c_int = 2;
+pub const R_OK: ::c_int = 4;
+
+pub const STDIN_FILENO: ::c_int = 0;
+pub const STDOUT_FILENO: ::c_int = 1;
+pub const STDERR_FILENO: ::c_int = 2;
+
+pub const SIGHUP: ::c_int = 1;
+pub const SIGINT: ::c_int = 2;
+pub const SIGQUIT: ::c_int = 3;
+pub const SIGILL: ::c_int = 4;
+pub const SIGABRT: ::c_int = 6;
+pub const SIGFPE: ::c_int = 8;
+pub const SIGKILL: ::c_int = 9;
+pub const SIGSEGV: ::c_int = 11;
+pub const SIGPIPE: ::c_int = 13;
+pub const SIGALRM: ::c_int = 14;
+pub const SIGTERM: ::c_int = 15;
+
+pub const PROT_NONE: ::c_int = 0x00000000;
+pub const PROT_READ: ::c_int = 0x00000100;
+pub const PROT_WRITE: ::c_int = 0x00000200;
+pub const PROT_EXEC: ::c_int = 0x00000400;
+
+pub const MAP_FILE: ::c_int = 0;
+pub const MAP_SHARED: ::c_int = 1;
+pub const MAP_PRIVATE: ::c_int = 2;
+pub const MAP_FIXED: ::c_int = 0x10;
+
+pub const MAP_FAILED: *mut ::c_void = !0 as *mut ::c_void;
+
+pub const MS_ASYNC: ::c_int = 1;
+pub const MS_INVALIDATE: ::c_int = 4;
+pub const MS_SYNC: ::c_int = 2;
+
+pub const SCM_RIGHTS: ::c_int = 0x01;
+pub const SCM_TIMESTAMP: ::c_int = 0x02;
+pub const SCM_CREDS: ::c_int = 0x04;
+
+pub const MAP_TYPE: ::c_int = 0x3;
+
+pub const IFF_UP: ::c_int = 0x00000001;
+pub const IFF_BROADCAST: ::c_int = 0x00000002;
+pub const IFF_DEBUG: ::c_int = 0x00000004;
+pub const IFF_LOOPBACK: ::c_int = 0x00000008;
+pub const IFF_POINTOPOINT: ::c_int = 0x00000010;
+pub const IFF_NOTRAILERS: ::c_int = 0x00000020;
+pub const IFF_RUNNING: ::c_int = 0x00000040;
+pub const IFF_NOARP: ::c_int = 0x00000080;
+pub const IFF_PROMISC: ::c_int = 0x00000100;
+pub const IFF_ALLMULTI: ::c_int = 0x00000200;
+pub const IFF_MULTICAST: ::c_int = 0x00008000;
+
+pub const AF_UNSPEC: ::c_int = 0;
+pub const AF_UNIX: ::c_int = AF_LOCAL;
+pub const AF_LOCAL: ::c_int = 1;
+pub const AF_INET: ::c_int = 2;
+pub const AF_IPX: ::c_int = 23;
+pub const AF_APPLETALK: ::c_int = 16;
+pub const AF_INET6: ::c_int = 24;
+pub const AF_ROUTE: ::c_int = 17;
+pub const AF_SNA: ::c_int = 11;
+pub const AF_BLUETOOTH: ::c_int = 31;
+pub const AF_ISDN: ::c_int = 26;
+
+pub const PF_UNSPEC: ::c_int = AF_UNSPEC;
+pub const PF_UNIX: ::c_int = PF_LOCAL;
+pub const PF_LOCAL: ::c_int = AF_LOCAL;
+pub const PF_INET: ::c_int = AF_INET;
+pub const PF_IPX: ::c_int = AF_IPX;
+pub const PF_APPLETALK: ::c_int = AF_APPLETALK;
+pub const PF_INET6: ::c_int = AF_INET6;
+pub const pseudo_AF_KEY: ::c_int = 29;
+pub const PF_KEY: ::c_int = pseudo_AF_KEY;
+pub const PF_ROUTE: ::c_int = AF_ROUTE;
+pub const PF_SNA: ::c_int = AF_SNA;
+
+pub const PF_BLUETOOTH: ::c_int = AF_BLUETOOTH;
+pub const PF_ISDN: ::c_int = AF_ISDN;
+
+pub const SOMAXCONN: ::c_int = 128;
+
+pub const MSG_OOB: ::c_int = 0x0001;
+pub const MSG_PEEK: ::c_int = 0x0002;
+pub const MSG_DONTROUTE: ::c_int = 0x0004;
+pub const MSG_CTRUNC: ::c_int = 0x0020;
+pub const MSG_TRUNC: ::c_int = 0x0010;
+pub const MSG_DONTWAIT: ::c_int = 0x0080;
+pub const MSG_EOR: ::c_int = 0x0008;
+pub const MSG_WAITALL: ::c_int = 0x0040;
+pub const MSG_NOSIGNAL: ::c_int = 0x0800;
+pub const MSG_WAITFORONE: ::c_int = 0x2000;
+
+pub const IP_TOS: ::c_int = 3;
+pub const IP_TTL: ::c_int = 4;
+pub const IP_HDRINCL: ::c_int = 2;
+pub const IP_OPTIONS: ::c_int = 1;
+pub const IP_RECVOPTS: ::c_int = 5;
+pub const IP_RETOPTS: ::c_int = 8;
+pub const IP_PKTINFO: ::c_int = 25;
+pub const IP_IPSEC_POLICY_COMPAT: ::c_int = 22;
+pub const IP_MULTICAST_IF: ::c_int = 9;
+pub const IP_MULTICAST_TTL: ::c_int = 10;
+pub const IP_MULTICAST_LOOP: ::c_int = 11;
+pub const IP_ADD_MEMBERSHIP: ::c_int = 12;
+pub const IP_DROP_MEMBERSHIP: ::c_int = 13;
+pub const IP_DEFAULT_MULTICAST_TTL: ::c_int = 1;
+pub const IP_DEFAULT_MULTICAST_LOOP: ::c_int = 1;
+
+pub const IPPROTO_HOPOPTS: ::c_int = 0;
+pub const IPPROTO_IGMP: ::c_int = 2;
+pub const IPPROTO_IPIP: ::c_int = 4;
+pub const IPPROTO_EGP: ::c_int = 8;
+pub const IPPROTO_PUP: ::c_int = 12;
+pub const IPPROTO_IDP: ::c_int = 22;
+pub const IPPROTO_TP: ::c_int = 29;
+pub const IPPROTO_ROUTING: ::c_int = 43;
+pub const IPPROTO_FRAGMENT: ::c_int = 44;
+pub const IPPROTO_RSVP: ::c_int = 46;
+pub const IPPROTO_GRE: ::c_int = 47;
+pub const IPPROTO_ESP: ::c_int = 50;
+pub const IPPROTO_AH: ::c_int = 51;
+pub const IPPROTO_NONE: ::c_int = 59;
+pub const IPPROTO_DSTOPTS: ::c_int = 60;
+pub const IPPROTO_ENCAP: ::c_int = 98;
+pub const IPPROTO_PIM: ::c_int = 103;
+pub const IPPROTO_SCTP: ::c_int = 132;
+pub const IPPROTO_RAW: ::c_int = 255;
+pub const IPPROTO_MAX: ::c_int = 256;
+pub const IPPROTO_CARP: ::c_int = 112;
+pub const IPPROTO_DIVERT: ::c_int = 259;
+pub const IPPROTO_DONE: ::c_int = 257;
+pub const IPPROTO_EON: ::c_int = 80;
+pub const IPPROTO_ETHERIP: ::c_int = 97;
+pub const IPPROTO_GGP: ::c_int = 3;
+pub const IPPROTO_IPCOMP: ::c_int = 108;
+pub const IPPROTO_MOBILE: ::c_int = 55;
+
+pub const IPV6_RTHDR_LOOSE: ::c_int = 0;
+pub const IPV6_RTHDR_STRICT: ::c_int = 1;
+pub const IPV6_UNICAST_HOPS: ::c_int = 4;
+pub const IPV6_MULTICAST_IF: ::c_int = 9;
+pub const IPV6_MULTICAST_HOPS: ::c_int = 10;
+pub const IPV6_MULTICAST_LOOP: ::c_int = 11;
+pub const IPV6_JOIN_GROUP: ::c_int = 12;
+pub const IPV6_LEAVE_GROUP: ::c_int = 13;
+pub const IPV6_CHECKSUM: ::c_int = 26;
+pub const IPV6_V6ONLY: ::c_int = 27;
+pub const IPV6_IPSEC_POLICY_COMPAT: ::c_int = 28;
+pub const IPV6_RTHDRDSTOPTS: ::c_int = 35;
+pub const IPV6_RECVPKTINFO: ::c_int = 36;
+pub const IPV6_RECVHOPLIMIT: ::c_int = 37;
+pub const IPV6_RECVRTHDR: ::c_int = 38;
+pub const IPV6_RECVHOPOPTS: ::c_int = 39;
+pub const IPV6_RECVDSTOPTS: ::c_int = 40;
+pub const IPV6_RECVPATHMTU: ::c_int = 43;
+pub const IPV6_PATHMTU: ::c_int = 44;
+pub const IPV6_PKTINFO: ::c_int = 46;
+pub const IPV6_HOPLIMIT: ::c_int = 47;
+pub const IPV6_NEXTHOP: ::c_int = 48;
+pub const IPV6_HOPOPTS: ::c_int = 49;
+pub const IPV6_DSTOPTS: ::c_int = 50;
+pub const IPV6_RECVTCLASS: ::c_int = 57;
+pub const IPV6_TCLASS: ::c_int = 61;
+pub const IPV6_DONTFRAG: ::c_int = 62;
+
+pub const TCP_NODELAY: ::c_int = 0x01;
+pub const TCP_MAXSEG: ::c_int = 0x02;
+pub const TCP_MD5SIG: ::c_int = 0x10;
+pub const TCP_KEEPALIVE: ::c_int = 0x04;
+
+pub const SHUT_RD: ::c_int = 0;
+pub const SHUT_WR: ::c_int = 1;
+pub const SHUT_RDWR: ::c_int = 2;
+
+pub const LOCK_SH: ::c_int = 0x1;
+pub const LOCK_EX: ::c_int = 0x2;
+pub const LOCK_NB: ::c_int = 0x4;
+pub const LOCK_UN: ::c_int = 0x8;
+
+pub const SS_ONSTACK: ::c_int = 1;
+pub const SS_DISABLE: ::c_int = 2;
+
+pub const PATH_MAX: ::c_int = 1024;
+
+pub const UIO_MAXIOV: ::c_int = 1024;
+
+pub const FD_SETSIZE: usize = 256;
+
+pub const TCIOFF: ::c_int = 0x0002;
+pub const TCION: ::c_int = 0x0003;
+pub const TCOOFF: ::c_int = 0x0000;
+pub const TCOON: ::c_int = 0x0001;
+pub const TCIFLUSH: ::c_int = 0;
+pub const TCOFLUSH: ::c_int = 1;
+pub const TCIOFLUSH: ::c_int = 2;
+pub const NL0: ::tcflag_t = 0x000;
+pub const NL1: ::tcflag_t = 0x100;
+pub const TAB0: ::tcflag_t = 0x0000;
+pub const CR0: ::tcflag_t = 0x000;
+pub const FF0: ::tcflag_t = 0x0000;
+pub const BS0: ::tcflag_t = 0x0000;
+pub const VT0: ::tcflag_t = 0x0000;
+pub const VERASE: usize = 2;
+pub const VKILL: usize = 3;
+pub const VINTR: usize = 0;
+pub const VQUIT: usize = 1;
+pub const VLNEXT: usize = 15;
+pub const IGNBRK: ::tcflag_t = 0x00000001;
+pub const BRKINT: ::tcflag_t = 0x00000002;
+pub const IGNPAR: ::tcflag_t = 0x00000004;
+pub const PARMRK: ::tcflag_t = 0x00000008;
+pub const INPCK: ::tcflag_t = 0x00000010;
+pub const ISTRIP: ::tcflag_t = 0x00000020;
+pub const INLCR: ::tcflag_t = 0x00000040;
+pub const IGNCR: ::tcflag_t = 0x00000080;
+pub const ICRNL: ::tcflag_t = 0x00000100;
+pub const IXANY: ::tcflag_t = 0x00000800;
+pub const IMAXBEL: ::tcflag_t = 0x00002000;
+pub const OPOST: ::tcflag_t = 0x00000001;
+pub const CS5: ::tcflag_t = 0x00;
+pub const ECHO: ::tcflag_t = 0x00000008;
+pub const OCRNL: ::tcflag_t = 0x00000008;
+pub const ONOCR: ::tcflag_t = 0x00000010;
+pub const ONLRET: ::tcflag_t = 0x00000020;
+pub const OFILL: ::tcflag_t = 0x00000040;
+pub const OFDEL: ::tcflag_t = 0x00000080;
+
+pub const WNOHANG: ::c_int = 0x0040;
+pub const WUNTRACED: ::c_int = 0x0004;
+pub const WSTOPPED: ::c_int = WUNTRACED;
+pub const WEXITED: ::c_int = 0x0001;
+pub const WCONTINUED: ::c_int = 0x0008;
+pub const WNOWAIT: ::c_int = 0x0080;
+pub const WTRAPPED: ::c_int = 0x0002;
+
+pub const RTLD_LOCAL: ::c_int = 0x0200;
+pub const RTLD_LAZY: ::c_int = 0x0001;
+
+pub const POSIX_FADV_NORMAL: ::c_int = 0;
+pub const POSIX_FADV_RANDOM: ::c_int = 2;
+pub const POSIX_FADV_SEQUENTIAL: ::c_int = 1;
+pub const POSIX_FADV_WILLNEED: ::c_int = 3;
+
+pub const AT_FDCWD: ::c_int = -100;
+pub const AT_EACCESS: ::c_int = 0x0001;
+pub const AT_SYMLINK_NOFOLLOW: ::c_int = 0x0002;
+pub const AT_SYMLINK_FOLLOW: ::c_int = 0x0004;
+pub const AT_REMOVEDIR: ::c_int = 0x0008;
+
+pub const LOG_CRON: ::c_int = 9 << 3;
+pub const LOG_AUTHPRIV: ::c_int = 10 << 3;
+pub const LOG_FTP: ::c_int = 11 << 3;
+pub const LOG_PERROR: ::c_int = 0x20;
+
+pub const PIPE_BUF: usize = 5120;
+
+pub const CLD_EXITED: ::c_int = 1;
+pub const CLD_KILLED: ::c_int = 2;
+pub const CLD_DUMPED: ::c_int = 3;
+pub const CLD_TRAPPED: ::c_int = 4;
+pub const CLD_STOPPED: ::c_int = 5;
+pub const CLD_CONTINUED: ::c_int = 6;
+
+pub const UTIME_OMIT: c_long = 0x40000002;
+pub const UTIME_NOW: c_long = 0x40000001;
+
+pub const POLLIN: ::c_short = POLLRDNORM | POLLRDBAND;
+pub const POLLPRI: ::c_short = 0x0008;
+pub const POLLOUT: ::c_short = 0x0002;
+pub const POLLERR: ::c_short = 0x0020;
+pub const POLLHUP: ::c_short = 0x0040;
+pub const POLLNVAL: ::c_short = 0x1000;
+pub const POLLRDNORM: ::c_short = 0x0001;
+pub const POLLRDBAND: ::c_short = 0x0004;
+
+pub const IPTOS_LOWDELAY: u8 = 0x10;
+pub const IPTOS_THROUGHPUT: u8 = 0x08;
+pub const IPTOS_RELIABILITY: u8 = 0x04;
+pub const IPTOS_MINCOST: u8 = 0x02;
+
+pub const IPTOS_PREC_NETCONTROL: u8 = 0xe0;
+pub const IPTOS_PREC_INTERNETCONTROL: u8 = 0xc0;
+pub const IPTOS_PREC_CRITIC_ECP: u8 = 0xa0;
+pub const IPTOS_PREC_FLASHOVERRIDE: u8 = 0x80;
+pub const IPTOS_PREC_FLASH: u8 = 0x60;
+pub const IPTOS_PREC_IMMEDIATE: u8 = 0x40;
+pub const IPTOS_PREC_PRIORITY: u8 = 0x20;
+pub const IPTOS_PREC_ROUTINE: u8 = 0x00;
+
+pub const IPTOS_ECN_MASK: u8 = 0x03;
+pub const IPTOS_ECN_ECT1: u8 = 0x01;
+pub const IPTOS_ECN_ECT0: u8 = 0x02;
+pub const IPTOS_ECN_CE: u8 = 0x03;
+
+pub const IPOPT_CONTROL: u8 = 0x00;
+pub const IPOPT_RESERVED1: u8 = 0x20;
+pub const IPOPT_RESERVED2: u8 = 0x60;
+pub const IPOPT_LSRR: u8 = 131;
+pub const IPOPT_RR: u8 = 7;
+pub const IPOPT_SSRR: u8 = 137;
+pub const IPDEFTTL: u8 = 64;
+pub const IPOPT_OPTVAL: u8 = 0;
+pub const IPOPT_OLEN: u8 = 1;
+pub const IPOPT_OFFSET: u8 = 2;
+pub const IPOPT_MINOFF: u8 = 4;
+pub const IPOPT_NOP: u8 = 1;
+pub const IPOPT_EOL: u8 = 0;
+pub const IPOPT_TS: u8 = 68;
+pub const IPOPT_TS_TSONLY: u8 = 0;
+pub const IPOPT_TS_TSANDADDR: u8 = 1;
+pub const IPOPT_TS_PRESPEC: u8 = 3;
+
+pub const MAX_IPOPTLEN: u8 = 40;
+pub const IPVERSION: u8 = 4;
+pub const MAXTTL: u8 = 255;
+
+pub const ARPHRD_ETHER: u16 = 1;
+pub const ARPHRD_IEEE802: u16 = 6;
+pub const ARPHRD_ARCNET: u16 = 7;
+pub const ARPHRD_IEEE1394: u16 = 24;
+
+pub const SOL_SOCKET: ::c_int = 0xffff;
+
+pub const SO_DEBUG: ::c_int = 0x0001;
+pub const SO_REUSEADDR: ::c_int = 0x0004;
+pub const SO_TYPE: ::c_int = 0x1008;
+pub const SO_ERROR: ::c_int = 0x1007;
+pub const SO_DONTROUTE: ::c_int = 0x0010;
+pub const SO_BROADCAST: ::c_int = 0x0020;
+pub const SO_SNDBUF: ::c_int = 0x1001;
+pub const SO_RCVBUF: ::c_int = 0x1002;
+pub const SO_KEEPALIVE: ::c_int = 0x0008;
+pub const SO_OOBINLINE: ::c_int = 0x0100;
+pub const SO_LINGER: ::c_int = 0x0080;
+pub const SO_REUSEPORT: ::c_int = 0x0200;
+pub const SO_RCVLOWAT: ::c_int = 0x1004;
+pub const SO_SNDLOWAT: ::c_int = 0x1003;
+pub const SO_RCVTIMEO: ::c_int = 0x1006;
+pub const SO_SNDTIMEO: ::c_int = 0x1005;
+pub const SO_BINDTODEVICE: ::c_int = 0x0800;
+pub const SO_TIMESTAMP: ::c_int = 0x0400;
+pub const SO_ACCEPTCONN: ::c_int = 0x0002;
+
+pub const TIOCM_LE: ::c_int = 0x0100;
+pub const TIOCM_DTR: ::c_int = 0x0001;
+pub const TIOCM_RTS: ::c_int = 0x0002;
+pub const TIOCM_ST: ::c_int = 0x0200;
+pub const TIOCM_SR: ::c_int = 0x0400;
+pub const TIOCM_CTS: ::c_int = 0x1000;
+pub const TIOCM_CAR: ::c_int = TIOCM_CD;
+pub const TIOCM_CD: ::c_int = 0x8000;
+pub const TIOCM_RNG: ::c_int = TIOCM_RI;
+pub const TIOCM_RI: ::c_int = 0x4000;
+pub const TIOCM_DSR: ::c_int = 0x2000;
+
+pub const SCHED_OTHER: ::c_int = 3;
+pub const SCHED_FIFO: ::c_int = 1;
+pub const SCHED_RR: ::c_int = 2;
+
+pub const IPC_PRIVATE: ::key_t = 0;
+
+pub const IPC_CREAT: ::c_int = 0o001000;
+pub const IPC_EXCL: ::c_int = 0o002000;
+pub const IPC_NOWAIT: ::c_int = 0o004000;
+
+pub const IPC_RMID: ::c_int = 0;
+pub const IPC_SET: ::c_int = 1;
+pub const IPC_STAT: ::c_int = 2;
+
+pub const MSG_NOERROR: ::c_int = 0o010000;
+
+pub const LOG_NFACILITIES: ::c_int = 24;
+
+pub const SEM_FAILED: *mut ::sem_t = 0xFFFFFFFFFFFFFFFF as *mut sem_t;
+
+pub const AI_PASSIVE: ::c_int = 0x00000001;
+pub const AI_CANONNAME: ::c_int = 0x00000002;
+pub const AI_NUMERICHOST: ::c_int = 0x00000004;
+
+pub const AI_NUMERICSERV: ::c_int = 0x00000008;
+
+pub const EAI_BADFLAGS: ::c_int = 3;
+pub const EAI_NONAME: ::c_int = 8;
+pub const EAI_AGAIN: ::c_int = 2;
+pub const EAI_FAIL: ::c_int = 4;
+pub const EAI_NODATA: ::c_int = 7;
+pub const EAI_FAMILY: ::c_int = 5;
+pub const EAI_SOCKTYPE: ::c_int = 10;
+pub const EAI_SERVICE: ::c_int = 9;
+pub const EAI_MEMORY: ::c_int = 6;
+pub const EAI_SYSTEM: ::c_int = 11;
+pub const EAI_OVERFLOW: ::c_int = 14;
+
+pub const NI_NUMERICHOST: ::c_int = 0x00000002;
+pub const NI_NUMERICSERV: ::c_int = 0x00000008;
+pub const NI_NOFQDN: ::c_int = 0x00000001;
+pub const NI_NAMEREQD: ::c_int = 0x00000004;
+pub const NI_DGRAM: ::c_int = 0x00000010;
+
+pub const AIO_CANCELED: ::c_int = 0;
+pub const AIO_NOTCANCELED: ::c_int = 2;
+pub const AIO_ALLDONE: ::c_int = 1;
+pub const LIO_READ: ::c_int = 1;
+pub const LIO_WRITE: ::c_int = 2;
+pub const LIO_NOP: ::c_int = 0;
+pub const LIO_WAIT: ::c_int = 1;
+pub const LIO_NOWAIT: ::c_int = 0;
+
+pub const ITIMER_REAL: ::c_int = 0;
+pub const ITIMER_VIRTUAL: ::c_int = 1;
+pub const ITIMER_PROF: ::c_int = 2;
+
+pub const POSIX_SPAWN_RESETIDS: ::c_int = 0x00000010;
+pub const POSIX_SPAWN_SETPGROUP: ::c_int = 0x00000001;
+pub const POSIX_SPAWN_SETSIGDEF: ::c_int = 0x00000004;
+pub const POSIX_SPAWN_SETSIGMASK: ::c_int = 0x00000002;
+pub const POSIX_SPAWN_SETSCHEDPARAM: ::c_int = 0x00000400;
+pub const POSIX_SPAWN_SETSCHEDULER: ::c_int = 0x00000040;
+
+pub const IPTOS_ECN_NOT_ECT: u8 = 0x00;
+
+pub const RTF_UP: ::c_ushort = 0x0001;
+pub const RTF_GATEWAY: ::c_ushort = 0x0002;
+
+pub const RTF_HOST: ::c_ushort = 0x0004;
+pub const RTF_DYNAMIC: ::c_ushort = 0x0010;
+pub const RTF_MODIFIED: ::c_ushort = 0x0020;
+pub const RTF_REJECT: ::c_ushort = 0x0008;
+pub const RTF_STATIC: ::c_ushort = 0x0800;
+pub const RTF_XRESOLVE: ::c_ushort = 0x0200;
+pub const RTF_BROADCAST: u32 = 0x80000;
+pub const RTM_NEWADDR: u16 = 0xc;
+pub const RTM_DELADDR: u16 = 0xd;
+pub const RTA_DST: ::c_ushort = 0x1;
+pub const RTA_GATEWAY: ::c_ushort = 0x2;
+
+pub const UDP_ENCAP: ::c_int = 100;
+
+pub const IN_ACCESS: u32 = 0x00000001;
+pub const IN_MODIFY: u32 = 0x00000002;
+pub const IN_ATTRIB: u32 = 0x00000004;
+pub const IN_CLOSE_WRITE: u32 = 0x00000008;
+pub const IN_CLOSE_NOWRITE: u32 = 0x00000010;
+pub const IN_CLOSE: u32 = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE;
+pub const IN_OPEN: u32 = 0x00000020;
+pub const IN_MOVED_FROM: u32 = 0x00000040;
+pub const IN_MOVED_TO: u32 = 0x00000080;
+pub const IN_MOVE: u32 = IN_MOVED_FROM | IN_MOVED_TO;
+pub const IN_CREATE: u32 = 0x00000100;
+pub const IN_DELETE: u32 = 0x00000200;
+pub const IN_DELETE_SELF: u32 = 0x00000400;
+pub const IN_MOVE_SELF: u32 = 0x00000800;
+pub const IN_UNMOUNT: u32 = 0x00002000;
+pub const IN_Q_OVERFLOW: u32 = 0x00004000;
+pub const IN_IGNORED: u32 = 0x00008000;
+pub const IN_ONLYDIR: u32 = 0x01000000;
+pub const IN_DONT_FOLLOW: u32 = 0x02000000;
+
+pub const IN_ISDIR: u32 = 0x40000000;
+pub const IN_ONESHOT: u32 = 0x80000000;
+
+pub const REG_EXTENDED: ::c_int = 0o0001;
+pub const REG_ICASE: ::c_int = 0o0002;
+pub const REG_NEWLINE: ::c_int = 0o0010;
+pub const REG_NOSUB: ::c_int = 0o0004;
+
+pub const REG_NOTBOL: ::c_int = 0o00001;
+pub const REG_NOTEOL: ::c_int = 0o00002;
+
+pub const REG_ENOSYS: ::c_int = 17;
+pub const REG_NOMATCH: ::c_int = 1;
+pub const REG_BADPAT: ::c_int = 2;
+pub const REG_ECOLLATE: ::c_int = 3;
+pub const REG_ECTYPE: ::c_int = 4;
+pub const REG_EESCAPE: ::c_int = 5;
+pub const REG_ESUBREG: ::c_int = 6;
+pub const REG_EBRACK: ::c_int = 7;
+pub const REG_EPAREN: ::c_int = 8;
+pub const REG_EBRACE: ::c_int = 9;
+pub const REG_BADBR: ::c_int = 10;
+pub const REG_ERANGE: ::c_int = 11;
+pub const REG_ESPACE: ::c_int = 12;
+pub const REG_BADRPT: ::c_int = 13;
+
+// errno.h
+pub const EOK: ::c_int = 0;
+pub const EWOULDBLOCK: ::c_int = EAGAIN;
+pub const EPERM: ::c_int = 1;
+pub const ENOENT: ::c_int = 2;
+pub const ESRCH: ::c_int = 3;
+pub const EINTR: ::c_int = 4;
+pub const EIO: ::c_int = 5;
+pub const ENXIO: ::c_int = 6;
+pub const E2BIG: ::c_int = 7;
+pub const ENOEXEC: ::c_int = 8;
+pub const EBADF: ::c_int = 9;
+pub const ECHILD: ::c_int = 10;
+pub const EAGAIN: ::c_int = 11;
+pub const ENOMEM: ::c_int = 12;
+pub const EACCES: ::c_int = 13;
+pub const EFAULT: ::c_int = 14;
+pub const ENOTBLK: ::c_int = 15;
+pub const EBUSY: ::c_int = 16;
+pub const EEXIST: ::c_int = 17;
+pub const EXDEV: ::c_int = 18;
+pub const ENODEV: ::c_int = 19;
+pub const ENOTDIR: ::c_int = 20;
+pub const EISDIR: ::c_int = 21;
+pub const EINVAL: ::c_int = 22;
+pub const ENFILE: ::c_int = 23;
+pub const EMFILE: ::c_int = 24;
+pub const ENOTTY: ::c_int = 25;
+pub const ETXTBSY: ::c_int = 26;
+pub const EFBIG: ::c_int = 27;
+pub const ENOSPC: ::c_int = 28;
+pub const ESPIPE: ::c_int = 29;
+pub const EROFS: ::c_int = 30;
+pub const EMLINK: ::c_int = 31;
+pub const EPIPE: ::c_int = 32;
+pub const EDOM: ::c_int = 33;
+pub const ERANGE: ::c_int = 34;
+pub const ENOMSG: ::c_int = 35;
+pub const EIDRM: ::c_int = 36;
+pub const ECHRNG: ::c_int = 37;
+pub const EL2NSYNC: ::c_int = 38;
+pub const EL3HLT: ::c_int = 39;
+pub const EL3RST: ::c_int = 40;
+pub const ELNRNG: ::c_int = 41;
+pub const EUNATCH: ::c_int = 42;
+pub const ENOCSI: ::c_int = 43;
+pub const EL2HLT: ::c_int = 44;
+pub const EDEADLK: ::c_int = 45;
+pub const ENOLCK: ::c_int = 46;
+pub const ECANCELED: ::c_int = 47;
+pub const EDQUOT: ::c_int = 49;
+pub const EBADE: ::c_int = 50;
+pub const EBADR: ::c_int = 51;
+pub const EXFULL: ::c_int = 52;
+pub const ENOANO: ::c_int = 53;
+pub const EBADRQC: ::c_int = 54;
+pub const EBADSLT: ::c_int = 55;
+pub const EDEADLOCK: ::c_int = 56;
+pub const EBFONT: ::c_int = 57;
+pub const EOWNERDEAD: ::c_int = 58;
+pub const ENOSTR: ::c_int = 60;
+pub const ENODATA: ::c_int = 61;
+pub const ETIME: ::c_int = 62;
+pub const ENOSR: ::c_int = 63;
+pub const ENONET: ::c_int = 64;
+pub const ENOPKG: ::c_int = 65;
+pub const EREMOTE: ::c_int = 66;
+pub const ENOLINK: ::c_int = 67;
+pub const EADV: ::c_int = 68;
+pub const ESRMNT: ::c_int = 69;
+pub const ECOMM: ::c_int = 70;
+pub const EPROTO: ::c_int = 71;
+pub const EMULTIHOP: ::c_int = 74;
+pub const EBADMSG: ::c_int = 77;
+pub const ENAMETOOLONG: ::c_int = 78;
+pub const EOVERFLOW: ::c_int = 79;
+pub const ENOTUNIQ: ::c_int = 80;
+pub const EBADFD: ::c_int = 81;
+pub const EREMCHG: ::c_int = 82;
+pub const ELIBACC: ::c_int = 83;
+pub const ELIBBAD: ::c_int = 84;
+pub const ELIBSCN: ::c_int = 85;
+pub const ELIBMAX: ::c_int = 86;
+pub const ELIBEXEC: ::c_int = 87;
+pub const EILSEQ: ::c_int = 88;
+pub const ENOSYS: ::c_int = 89;
+pub const ELOOP: ::c_int = 90;
+pub const ERESTART: ::c_int = 91;
+pub const ESTRPIPE: ::c_int = 92;
+pub const ENOTEMPTY: ::c_int = 93;
+pub const EUSERS: ::c_int = 94;
+pub const ENOTRECOVERABLE: ::c_int = 95;
+pub const EOPNOTSUPP: ::c_int = 103;
+pub const EFPOS: ::c_int = 110;
+pub const ESTALE: ::c_int = 122;
+pub const EINPROGRESS: ::c_int = 236;
+pub const EALREADY: ::c_int = 237;
+pub const ENOTSOCK: ::c_int = 238;
+pub const EDESTADDRREQ: ::c_int = 239;
+pub const EMSGSIZE: ::c_int = 240;
+pub const EPROTOTYPE: ::c_int = 241;
+pub const ENOPROTOOPT: ::c_int = 242;
+pub const EPROTONOSUPPORT: ::c_int = 243;
+pub const ESOCKTNOSUPPORT: ::c_int = 244;
+pub const EPFNOSUPPORT: ::c_int = 246;
+pub const EAFNOSUPPORT: ::c_int = 247;
+pub const EADDRINUSE: ::c_int = 248;
+pub const EADDRNOTAVAIL: ::c_int = 249;
+pub const ENETDOWN: ::c_int = 250;
+pub const ENETUNREACH: ::c_int = 251;
+pub const ENETRESET: ::c_int = 252;
+pub const ECONNABORTED: ::c_int = 253;
+pub const ECONNRESET: ::c_int = 254;
+pub const ENOBUFS: ::c_int = 255;
+pub const EISCONN: ::c_int = 256;
+pub const ENOTCONN: ::c_int = 257;
+pub const ESHUTDOWN: ::c_int = 258;
+pub const ETOOMANYREFS: ::c_int = 259;
+pub const ETIMEDOUT: ::c_int = 260;
+pub const ECONNREFUSED: ::c_int = 261;
+pub const EHOSTDOWN: ::c_int = 264;
+pub const EHOSTUNREACH: ::c_int = 265;
+pub const EBADRPC: ::c_int = 272;
+pub const ERPCMISMATCH: ::c_int = 273;
+pub const EPROGUNAVAIL: ::c_int = 274;
+pub const EPROGMISMATCH: ::c_int = 275;
+pub const EPROCUNAVAIL: ::c_int = 276;
+pub const ENOREMOTE: ::c_int = 300;
+pub const ENONDP: ::c_int = 301;
+pub const EBADFSYS: ::c_int = 302;
+pub const EMORE: ::c_int = 309;
+pub const ECTRLTERM: ::c_int = 310;
+pub const ENOLIC: ::c_int = 311;
+pub const ESRVRFAULT: ::c_int = 312;
+pub const EENDIAN: ::c_int = 313;
+pub const ESECTYPEINVAL: ::c_int = 314;
+
+pub const RUSAGE_CHILDREN: ::c_int = -1;
+pub const L_tmpnam: ::c_uint = 255;
+
+pub const _PC_LINK_MAX: ::c_int = 1;
+pub const _PC_MAX_CANON: ::c_int = 2;
+pub const _PC_MAX_INPUT: ::c_int = 3;
+pub const _PC_NAME_MAX: ::c_int = 4;
+pub const _PC_PATH_MAX: ::c_int = 5;
+pub const _PC_PIPE_BUF: ::c_int = 6;
+pub const _PC_CHOWN_RESTRICTED: ::c_int = 9;
+pub const _PC_NO_TRUNC: ::c_int = 7;
+pub const _PC_VDISABLE: ::c_int = 8;
+pub const _PC_SYNC_IO: ::c_int = 14;
+pub const _PC_ASYNC_IO: ::c_int = 12;
+pub const _PC_PRIO_IO: ::c_int = 13;
+pub const _PC_SOCK_MAXBUF: ::c_int = 15;
+pub const _PC_FILESIZEBITS: ::c_int = 16;
+pub const _PC_REC_INCR_XFER_SIZE: ::c_int = 22;
+pub const _PC_REC_MAX_XFER_SIZE: ::c_int = 23;
+pub const _PC_REC_MIN_XFER_SIZE: ::c_int = 24;
+pub const _PC_REC_XFER_ALIGN: ::c_int = 25;
+pub const _PC_ALLOC_SIZE_MIN: ::c_int = 21;
+pub const _PC_SYMLINK_MAX: ::c_int = 17;
+pub const _PC_2_SYMLINKS: ::c_int = 20;
+
+pub const _SC_PAGE_SIZE: ::c_int = _SC_PAGESIZE;
+pub const _SC_ARG_MAX: ::c_int = 1;
+pub const _SC_CHILD_MAX: ::c_int = 2;
+pub const _SC_CLK_TCK: ::c_int = 3;
+pub const _SC_NGROUPS_MAX: ::c_int = 4;
+pub const _SC_OPEN_MAX: ::c_int = 5;
+pub const _SC_JOB_CONTROL: ::c_int = 6;
+pub const _SC_SAVED_IDS: ::c_int = 7;
+pub const _SC_VERSION: ::c_int = 8;
+pub const _SC_PASS_MAX: ::c_int = 9;
+pub const _SC_PAGESIZE: ::c_int = 11;
+pub const _SC_XOPEN_VERSION: ::c_int = 12;
+pub const _SC_STREAM_MAX: ::c_int = 13;
+pub const _SC_TZNAME_MAX: ::c_int = 14;
+pub const _SC_AIO_LISTIO_MAX: ::c_int = 15;
+pub const _SC_AIO_MAX: ::c_int = 16;
+pub const _SC_AIO_PRIO_DELTA_MAX: ::c_int = 17;
+pub const _SC_DELAYTIMER_MAX: ::c_int = 18;
+pub const _SC_MQ_OPEN_MAX: ::c_int = 19;
+pub const _SC_MQ_PRIO_MAX: ::c_int = 20;
+pub const _SC_RTSIG_MAX: ::c_int = 21;
+pub const _SC_SEM_NSEMS_MAX: ::c_int = 22;
+pub const _SC_SEM_VALUE_MAX: ::c_int = 23;
+pub const _SC_SIGQUEUE_MAX: ::c_int = 24;
+pub const _SC_TIMER_MAX: ::c_int = 25;
+pub const _SC_ASYNCHRONOUS_IO: ::c_int = 26;
+pub const _SC_FSYNC: ::c_int = 27;
+pub const _SC_MAPPED_FILES: ::c_int = 28;
+pub const _SC_MEMLOCK: ::c_int = 29;
+pub const _SC_MEMLOCK_RANGE: ::c_int = 30;
+pub const _SC_MEMORY_PROTECTION: ::c_int = 31;
+pub const _SC_MESSAGE_PASSING: ::c_int = 32;
+pub const _SC_PRIORITIZED_IO: ::c_int = 33;
+pub const _SC_PRIORITY_SCHEDULING: ::c_int = 34;
+pub const _SC_REALTIME_SIGNALS: ::c_int = 35;
+pub const _SC_SEMAPHORES: ::c_int = 36;
+pub const _SC_SHARED_MEMORY_OBJECTS: ::c_int = 37;
+pub const _SC_SYNCHRONIZED_IO: ::c_int = 38;
+pub const _SC_TIMERS: ::c_int = 39;
+pub const _SC_GETGR_R_SIZE_MAX: ::c_int = 40;
+pub const _SC_GETPW_R_SIZE_MAX: ::c_int = 41;
+pub const _SC_LOGIN_NAME_MAX: ::c_int = 42;
+pub const _SC_THREAD_DESTRUCTOR_ITERATIONS: ::c_int = 43;
+pub const _SC_THREAD_KEYS_MAX: ::c_int = 44;
+pub const _SC_THREAD_STACK_MIN: ::c_int = 45;
+pub const _SC_THREAD_THREADS_MAX: ::c_int = 46;
+pub const _SC_TTY_NAME_MAX: ::c_int = 47;
+pub const _SC_THREADS: ::c_int = 48;
+pub const _SC_THREAD_ATTR_STACKADDR: ::c_int = 49;
+pub const _SC_THREAD_ATTR_STACKSIZE: ::c_int = 50;
+pub const _SC_THREAD_PRIORITY_SCHEDULING: ::c_int = 51;
+pub const _SC_THREAD_PRIO_INHERIT: ::c_int = 52;
+pub const _SC_THREAD_PRIO_PROTECT: ::c_int = 53;
+pub const _SC_THREAD_PROCESS_SHARED: ::c_int = 54;
+pub const _SC_THREAD_SAFE_FUNCTIONS: ::c_int = 55;
+pub const _SC_2_CHAR_TERM: ::c_int = 56;
+pub const _SC_2_C_BIND: ::c_int = 57;
+pub const _SC_2_C_DEV: ::c_int = 58;
+pub const _SC_2_C_VERSION: ::c_int = 59;
+pub const _SC_2_FORT_DEV: ::c_int = 60;
+pub const _SC_2_FORT_RUN: ::c_int = 61;
+pub const _SC_2_LOCALEDEF: ::c_int = 62;
+pub const _SC_2_SW_DEV: ::c_int = 63;
+pub const _SC_2_UPE: ::c_int = 64;
+pub const _SC_2_VERSION: ::c_int = 65;
+pub const _SC_ATEXIT_MAX: ::c_int = 66;
+pub const _SC_AVPHYS_PAGES: ::c_int = 67;
+pub const _SC_BC_BASE_MAX: ::c_int = 68;
+pub const _SC_BC_DIM_MAX: ::c_int = 69;
+pub const _SC_BC_SCALE_MAX: ::c_int = 70;
+pub const _SC_BC_STRING_MAX: ::c_int = 71;
+pub const _SC_CHARCLASS_NAME_MAX: ::c_int = 72;
+pub const _SC_CHAR_BIT: ::c_int = 73;
+pub const _SC_CHAR_MAX: ::c_int = 74;
+pub const _SC_CHAR_MIN: ::c_int = 75;
+pub const _SC_COLL_WEIGHTS_MAX: ::c_int = 76;
+pub const _SC_EQUIV_CLASS_MAX: ::c_int = 77;
+pub const _SC_EXPR_NEST_MAX: ::c_int = 78;
+pub const _SC_INT_MAX: ::c_int = 79;
+pub const _SC_INT_MIN: ::c_int = 80;
+pub const _SC_LINE_MAX: ::c_int = 81;
+pub const _SC_LONG_BIT: ::c_int = 82;
+pub const _SC_MB_LEN_MAX: ::c_int = 83;
+pub const _SC_NL_ARGMAX: ::c_int = 84;
+pub const _SC_NL_LANGMAX: ::c_int = 85;
+pub const _SC_NL_MSGMAX: ::c_int = 86;
+pub const _SC_NL_NMAX: ::c_int = 87;
+pub const _SC_NL_SETMAX: ::c_int = 88;
+pub const _SC_NL_TEXTMAX: ::c_int = 89;
+pub const _SC_NPROCESSORS_CONF: ::c_int = 90;
+pub const _SC_NPROCESSORS_ONLN: ::c_int = 91;
+pub const _SC_NZERO: ::c_int = 92;
+pub const _SC_PHYS_PAGES: ::c_int = 93;
+pub const _SC_PII: ::c_int = 94;
+pub const _SC_PII_INTERNET: ::c_int = 95;
+pub const _SC_PII_INTERNET_DGRAM: ::c_int = 96;
+pub const _SC_PII_INTERNET_STREAM: ::c_int = 97;
+pub const _SC_PII_OSI: ::c_int = 98;
+pub const _SC_PII_OSI_CLTS: ::c_int = 99;
+pub const _SC_PII_OSI_COTS: ::c_int = 100;
+pub const _SC_PII_OSI_M: ::c_int = 101;
+pub const _SC_PII_SOCKET: ::c_int = 102;
+pub const _SC_PII_XTI: ::c_int = 103;
+pub const _SC_POLL: ::c_int = 104;
+pub const _SC_RE_DUP_MAX: ::c_int = 105;
+pub const _SC_SCHAR_MAX: ::c_int = 106;
+pub const _SC_SCHAR_MIN: ::c_int = 107;
+pub const _SC_SELECT: ::c_int = 108;
+pub const _SC_SHRT_MAX: ::c_int = 109;
+pub const _SC_SHRT_MIN: ::c_int = 110;
+pub const _SC_SSIZE_MAX: ::c_int = 111;
+pub const _SC_T_IOV_MAX: ::c_int = 112;
+pub const _SC_UCHAR_MAX: ::c_int = 113;
+pub const _SC_UINT_MAX: ::c_int = 114;
+pub const _SC_UIO_MAXIOV: ::c_int = 115;
+pub const _SC_ULONG_MAX: ::c_int = 116;
+pub const _SC_USHRT_MAX: ::c_int = 117;
+pub const _SC_WORD_BIT: ::c_int = 118;
+pub const _SC_XOPEN_CRYPT: ::c_int = 119;
+pub const _SC_XOPEN_ENH_I18N: ::c_int = 120;
+pub const _SC_XOPEN_SHM: ::c_int = 121;
+pub const _SC_XOPEN_UNIX: ::c_int = 122;
+pub const _SC_XOPEN_XCU_VERSION: ::c_int = 123;
+pub const _SC_XOPEN_XPG2: ::c_int = 124;
+pub const _SC_XOPEN_XPG3: ::c_int = 125;
+pub const _SC_XOPEN_XPG4: ::c_int = 126;
+pub const _SC_XBS5_ILP32_OFF32: ::c_int = 127;
+pub const _SC_XBS5_ILP32_OFFBIG: ::c_int = 128;
+pub const _SC_XBS5_LP64_OFF64: ::c_int = 129;
+pub const _SC_XBS5_LPBIG_OFFBIG: ::c_int = 130;
+pub const _SC_ADVISORY_INFO: ::c_int = 131;
+pub const _SC_CPUTIME: ::c_int = 132;
+pub const _SC_SPAWN: ::c_int = 133;
+pub const _SC_SPORADIC_SERVER: ::c_int = 134;
+pub const _SC_THREAD_CPUTIME: ::c_int = 135;
+pub const _SC_THREAD_SPORADIC_SERVER: ::c_int = 136;
+pub const _SC_TIMEOUTS: ::c_int = 137;
+pub const _SC_BARRIERS: ::c_int = 138;
+pub const _SC_CLOCK_SELECTION: ::c_int = 139;
+pub const _SC_MONOTONIC_CLOCK: ::c_int = 140;
+pub const _SC_READER_WRITER_LOCKS: ::c_int = 141;
+pub const _SC_SPIN_LOCKS: ::c_int = 142;
+pub const _SC_TYPED_MEMORY_OBJECTS: ::c_int = 143;
+pub const _SC_TRACE_EVENT_FILTER: ::c_int = 144;
+pub const _SC_TRACE: ::c_int = 145;
+pub const _SC_TRACE_INHERIT: ::c_int = 146;
+pub const _SC_TRACE_LOG: ::c_int = 147;
+pub const _SC_2_PBS: ::c_int = 148;
+pub const _SC_2_PBS_ACCOUNTING: ::c_int = 149;
+pub const _SC_2_PBS_CHECKPOINT: ::c_int = 150;
+pub const _SC_2_PBS_LOCATE: ::c_int = 151;
+pub const _SC_2_PBS_MESSAGE: ::c_int = 152;
+pub const _SC_2_PBS_TRACK: ::c_int = 153;
+pub const _SC_HOST_NAME_MAX: ::c_int = 154;
+pub const _SC_IOV_MAX: ::c_int = 155;
+pub const _SC_IPV6: ::c_int = 156;
+pub const _SC_RAW_SOCKETS: ::c_int = 157;
+pub const _SC_REGEXP: ::c_int = 158;
+pub const _SC_SHELL: ::c_int = 159;
+pub const _SC_SS_REPL_MAX: ::c_int = 160;
+pub const _SC_SYMLOOP_MAX: ::c_int = 161;
+pub const _SC_TRACE_EVENT_NAME_MAX: ::c_int = 162;
+pub const _SC_TRACE_NAME_MAX: ::c_int = 163;
+pub const _SC_TRACE_SYS_MAX: ::c_int = 164;
+pub const _SC_TRACE_USER_EVENT_MAX: ::c_int = 165;
+pub const _SC_V6_ILP32_OFF32: ::c_int = 166;
+pub const _SC_V6_ILP32_OFFBIG: ::c_int = 167;
+pub const _SC_V6_LP64_OFF64: ::c_int = 168;
+pub const _SC_V6_LPBIG_OFFBIG: ::c_int = 169;
+pub const _SC_XOPEN_REALTIME: ::c_int = 170;
+pub const _SC_XOPEN_REALTIME_THREADS: ::c_int = 171;
+pub const _SC_XOPEN_LEGACY: ::c_int = 172;
+pub const _SC_XOPEN_STREAMS: ::c_int = 173;
+pub const _SC_V7_ILP32_OFF32: ::c_int = 176;
+pub const _SC_V7_ILP32_OFFBIG: ::c_int = 177;
+pub const _SC_V7_LP64_OFF64: ::c_int = 178;
+pub const _SC_V7_LPBIG_OFFBIG: ::c_int = 179;
+
+pub const GLOB_ERR: ::c_int = 0x0001;
+pub const GLOB_MARK: ::c_int = 0x0002;
+pub const GLOB_NOSORT: ::c_int = 0x0004;
+pub const GLOB_DOOFFS: ::c_int = 0x0008;
+pub const GLOB_NOCHECK: ::c_int = 0x0010;
+pub const GLOB_APPEND: ::c_int = 0x0020;
+pub const GLOB_NOESCAPE: ::c_int = 0x0040;
+
+pub const GLOB_NOSPACE: ::c_int = 1;
+pub const GLOB_ABORTED: ::c_int = 2;
+pub const GLOB_NOMATCH: ::c_int = 3;
+
+pub const S_IEXEC: mode_t = ::S_IXUSR;
+pub const S_IWRITE: mode_t = ::S_IWUSR;
+pub const S_IREAD: mode_t = ::S_IRUSR;
+
+pub const S_IFIFO: ::mode_t = 0x1000;
+pub const S_IFCHR: ::mode_t = 0x2000;
+pub const S_IFDIR: ::mode_t = 0x4000;
+pub const S_IFBLK: ::mode_t = 0x6000;
+pub const S_IFREG: ::mode_t = 0x8000;
+pub const S_IFLNK: ::mode_t = 0xA000;
+pub const S_IFSOCK: ::mode_t = 0xC000;
+pub const S_IFMT: ::mode_t = 0xF000;
+
+pub const S_IXOTH: ::mode_t = 0o000001;
+pub const S_IWOTH: ::mode_t = 0o000002;
+pub const S_IROTH: ::mode_t = 0o000004;
+pub const S_IRWXO: ::mode_t = 0o000007;
+pub const S_IXGRP: ::mode_t = 0o000010;
+pub const S_IWGRP: ::mode_t = 0o000020;
+pub const S_IRGRP: ::mode_t = 0o000040;
+pub const S_IRWXG: ::mode_t = 0o000070;
+pub const S_IXUSR: ::mode_t = 0o000100;
+pub const S_IWUSR: ::mode_t = 0o000200;
+pub const S_IRUSR: ::mode_t = 0o000400;
+pub const S_IRWXU: ::mode_t = 0o000700;
+
+pub const F_LOCK: ::c_int = 1;
+pub const F_TEST: ::c_int = 3;
+pub const F_TLOCK: ::c_int = 2;
+pub const F_ULOCK: ::c_int = 0;
+
+pub const ST_RDONLY: ::c_ulong = 0x01;
+pub const ST_NOSUID: ::c_ulong = 0x04;
+pub const ST_NOEXEC: ::c_ulong = 0x02;
+pub const ST_NOATIME: ::c_ulong = 0x20;
+
+pub const RTLD_NEXT: *mut ::c_void = -3i64 as *mut ::c_void;
+pub const RTLD_DEFAULT: *mut ::c_void = -2i64 as *mut ::c_void;
+pub const RTLD_NODELETE: ::c_int = 0x1000;
+pub const RTLD_NOW: ::c_int = 0x0002;
+
+pub const EMPTY: ::c_short = 0;
+pub const RUN_LVL: ::c_short = 1;
+pub const BOOT_TIME: ::c_short = 2;
+pub const NEW_TIME: ::c_short = 4;
+pub const OLD_TIME: ::c_short = 3;
+pub const INIT_PROCESS: ::c_short = 5;
+pub const LOGIN_PROCESS: ::c_short = 6;
+pub const USER_PROCESS: ::c_short = 7;
+pub const DEAD_PROCESS: ::c_short = 8;
+pub const ACCOUNTING: ::c_short = 9;
+
+pub const ENOTSUP: ::c_int = 48;
+
+pub const BUFSIZ: ::c_uint = 1024;
+pub const TMP_MAX: ::c_uint = 26 * 26 * 26;
+pub const FOPEN_MAX: ::c_uint = 16;
+pub const FILENAME_MAX: ::c_uint = 255;
+
+pub const NI_MAXHOST: ::socklen_t = 1025;
+pub const M_KEEP: ::c_int = 4;
+pub const REG_STARTEND: ::c_int = 0o00004;
+pub const VEOF: usize = 4;
+
+pub const RTLD_GLOBAL: ::c_int = 0x0100;
+pub const RTLD_NOLOAD: ::c_int = 0x0004;
+
+pub const O_RDONLY: ::c_int = 0o000000;
+pub const O_WRONLY: ::c_int = 0o000001;
+pub const O_RDWR: ::c_int = 0o000002;
+
+pub const O_EXEC: ::c_int = 0o00003;
+pub const O_ASYNC: ::c_int = 0o0200000;
+pub const O_NDELAY: ::c_int = O_NONBLOCK;
+pub const O_TRUNC: ::c_int = 0o001000;
+pub const O_CLOEXEC: ::c_int = 0o020000;
+pub const O_DIRECTORY: ::c_int = 0o4000000;
+pub const O_ACCMODE: ::c_int = 0o000007;
+pub const O_APPEND: ::c_int = 0o000010;
+pub const O_CREAT: ::c_int = 0o000400;
+pub const O_EXCL: ::c_int = 0o002000;
+pub const O_NOCTTY: ::c_int = 0o004000;
+pub const O_NONBLOCK: ::c_int = 0o000200;
+pub const O_SYNC: ::c_int = 0o000040;
+pub const O_RSYNC: ::c_int = 0o000100;
+pub const O_DSYNC: ::c_int = 0o000020;
+pub const O_NOFOLLOW: ::c_int = 0o010000;
+
+pub const POSIX_FADV_DONTNEED: ::c_int = 4;
+pub const POSIX_FADV_NOREUSE: ::c_int = 5;
+
+pub const SOCK_SEQPACKET: ::c_int = 5;
+pub const SOCK_STREAM: ::c_int = 1;
+pub const SOCK_DGRAM: ::c_int = 2;
+pub const SOCK_RAW: ::c_int = 3;
+pub const SOCK_RDM: ::c_int = 4;
+pub const SOCK_CLOEXEC: ::c_int = 0x10000000;
+
+pub const SA_SIGINFO: ::c_int = 0x0002;
+pub const SA_NOCLDWAIT: ::c_int = 0x0020;
+pub const SA_NODEFER: ::c_int = 0x0010;
+pub const SA_RESETHAND: ::c_int = 0x0004;
+pub const SA_NOCLDSTOP: ::c_int = 0x0001;
+
+pub const SIGTTIN: ::c_int = 26;
+pub const SIGTTOU: ::c_int = 27;
+pub const SIGXCPU: ::c_int = 30;
+pub const SIGXFSZ: ::c_int = 31;
+pub const SIGVTALRM: ::c_int = 28;
+pub const SIGPROF: ::c_int = 29;
+pub const SIGWINCH: ::c_int = 20;
+pub const SIGCHLD: ::c_int = 18;
+pub const SIGBUS: ::c_int = 10;
+pub const SIGUSR1: ::c_int = 16;
+pub const SIGUSR2: ::c_int = 17;
+pub const SIGCONT: ::c_int = 25;
+pub const SIGSTOP: ::c_int = 23;
+pub const SIGTSTP: ::c_int = 24;
+pub const SIGURG: ::c_int = 21;
+pub const SIGIO: ::c_int = SIGPOLL;
+pub const SIGSYS: ::c_int = 12;
+pub const SIGPOLL: ::c_int = 22;
+pub const SIGPWR: ::c_int = 19;
+pub const SIG_SETMASK: ::c_int = 2;
+pub const SIG_BLOCK: ::c_int = 0;
+pub const SIG_UNBLOCK: ::c_int = 1;
+
+pub const POLLWRNORM: ::c_short = ::POLLOUT;
+pub const POLLWRBAND: ::c_short = 0x0010;
+
+pub const F_SETLK: ::c_int = 106;
+pub const F_SETLKW: ::c_int = 107;
+pub const F_ALLOCSP: ::c_int = 110;
+pub const F_FREESP: ::c_int = 111;
+pub const F_GETLK: ::c_int = 114;
+
+pub const F_RDLCK: ::c_int = 1;
+pub const F_WRLCK: ::c_int = 2;
+pub const F_UNLCK: ::c_int = 3;
+
+pub const NCCS: usize = 40;
+
+pub const MAP_ANON: ::c_int = MAP_ANONYMOUS;
+pub const MAP_ANONYMOUS: ::c_int = 0x00080000;
+
+pub const MCL_CURRENT: ::c_int = 0x000000001;
+pub const MCL_FUTURE: ::c_int = 0x000000002;
+
+pub const _TIO_CBAUD: ::tcflag_t = 15;
+pub const CBAUD: ::tcflag_t = _TIO_CBAUD;
+pub const TAB1: ::tcflag_t = 0x0800;
+pub const TAB2: ::tcflag_t = 0x1000;
+pub const TAB3: ::tcflag_t = 0x1800;
+pub const CR1: ::tcflag_t = 0x200;
+pub const CR2: ::tcflag_t = 0x400;
+pub const CR3: ::tcflag_t = 0x600;
+pub const FF1: ::tcflag_t = 0x8000;
+pub const BS1: ::tcflag_t = 0x2000;
+pub const VT1: ::tcflag_t = 0x4000;
+pub const VWERASE: usize = 14;
+pub const VREPRINT: usize = 12;
+pub const VSUSP: usize = 10;
+pub const VSTART: usize = 8;
+pub const VSTOP: usize = 9;
+pub const VDISCARD: usize = 13;
+pub const VTIME: usize = 17;
+pub const IXON: ::tcflag_t = 0x00000400;
+pub const IXOFF: ::tcflag_t = 0x00001000;
+pub const ONLCR: ::tcflag_t = 0x00000004;
+pub const CSIZE: ::tcflag_t = 0x00000030;
+pub const CS6: ::tcflag_t = 0x10;
+pub const CS7: ::tcflag_t = 0x20;
+pub const CS8: ::tcflag_t = 0x30;
+pub const CSTOPB: ::tcflag_t = 0x00000040;
+pub const CREAD: ::tcflag_t = 0x00000080;
+pub const PARENB: ::tcflag_t = 0x00000100;
+pub const PARODD: ::tcflag_t = 0x00000200;
+pub const HUPCL: ::tcflag_t = 0x00000400;
+pub const CLOCAL: ::tcflag_t = 0x00000800;
+pub const ECHOKE: ::tcflag_t = 0x00000800;
+pub const ECHOE: ::tcflag_t = 0x00000010;
+pub const ECHOK: ::tcflag_t = 0x00000020;
+pub const ECHONL: ::tcflag_t = 0x00000040;
+pub const ECHOCTL: ::tcflag_t = 0x00000200;
+pub const ISIG: ::tcflag_t = 0x00000001;
+pub const ICANON: ::tcflag_t = 0x00000002;
+pub const NOFLSH: ::tcflag_t = 0x00000080;
+pub const OLCUC: ::tcflag_t = 0x00000002;
+pub const NLDLY: ::tcflag_t = 0x00000100;
+pub const CRDLY: ::tcflag_t = 0x00000600;
+pub const TABDLY: ::tcflag_t = 0x00001800;
+pub const BSDLY: ::tcflag_t = 0x00002000;
+pub const FFDLY: ::tcflag_t = 0x00008000;
+pub const VTDLY: ::tcflag_t = 0x00004000;
+pub const XTABS: ::tcflag_t = 0x1800;
+
+pub const B0: ::speed_t = 0;
+pub const B50: ::speed_t = 1;
+pub const B75: ::speed_t = 2;
+pub const B110: ::speed_t = 3;
+pub const B134: ::speed_t = 4;
+pub const B150: ::speed_t = 5;
+pub const B200: ::speed_t = 6;
+pub const B300: ::speed_t = 7;
+pub const B600: ::speed_t = 8;
+pub const B1200: ::speed_t = 9;
+pub const B1800: ::speed_t = 10;
+pub const B2400: ::speed_t = 11;
+pub const B4800: ::speed_t = 12;
+pub const B9600: ::speed_t = 13;
+pub const B19200: ::speed_t = 14;
+pub const B38400: ::speed_t = 15;
+pub const EXTA: ::speed_t = 14;
+pub const EXTB: ::speed_t = 15;
+pub const B57600: ::speed_t = 57600;
+pub const B115200: ::speed_t = 115200;
+
+pub const VEOL: usize = 5;
+pub const VEOL2: usize = 6;
+pub const VMIN: usize = 16;
+pub const IEXTEN: ::tcflag_t = 0x00008000;
+pub const TOSTOP: ::tcflag_t = 0x00000100;
+
+pub const TCSANOW: ::c_int = 0x0001;
+pub const TCSADRAIN: ::c_int = 0x0002;
+pub const TCSAFLUSH: ::c_int = 0x0004;
+
+pub const HW_MACHINE: ::c_int = 1;
+pub const HW_MODEL: ::c_int = 2;
+pub const HW_NCPU: ::c_int = 3;
+pub const HW_BYTEORDER: ::c_int = 4;
+pub const HW_PHYSMEM: ::c_int = 5;
+pub const HW_USERMEM: ::c_int = 6;
+pub const HW_PAGESIZE: ::c_int = 7;
+pub const HW_DISKNAMES: ::c_int = 8;
+pub const HW_IOSTATS: ::c_int = 9;
+pub const HW_MACHINE_ARCH: ::c_int = 10;
+pub const HW_ALIGNBYTES: ::c_int = 11;
+pub const HW_CNMAGIC: ::c_int = 12;
+pub const HW_PHYSMEM64: ::c_int = 13;
+pub const HW_USERMEM64: ::c_int = 14;
+pub const HW_IOSTATNAMES: ::c_int = 15;
+pub const HW_MAXID: ::c_int = 15;
+
+pub const CTL_UNSPEC: ::c_int = 0;
+pub const CTL_KERN: ::c_int = 1;
+pub const CTL_VM: ::c_int = 2;
+pub const CTL_VFS: ::c_int = 3;
+pub const CTL_NET: ::c_int = 4;
+pub const CTL_DEBUG: ::c_int = 5;
+pub const CTL_HW: ::c_int = 6;
+pub const CTL_MACHDEP: ::c_int = 7;
+pub const CTL_USER: ::c_int = 8;
+pub const CTL_QNX: ::c_int = 9;
+pub const CTL_PROC: ::c_int = 10;
+pub const CTL_VENDOR: ::c_int = 11;
+pub const CTL_EMUL: ::c_int = 12;
+pub const CTL_SECURITY: ::c_int = 13;
+pub const CTL_MAXID: ::c_int = 14;
+
+pub const DAY_1: ::nl_item = 8;
+pub const DAY_2: ::nl_item = 9;
+pub const DAY_3: ::nl_item = 10;
+pub const DAY_4: ::nl_item = 11;
+pub const DAY_5: ::nl_item = 12;
+pub const DAY_6: ::nl_item = 13;
+pub const DAY_7: ::nl_item = 14;
+
+pub const MON_1: ::nl_item = 22;
+pub const MON_2: ::nl_item = 23;
+pub const MON_3: ::nl_item = 24;
+pub const MON_4: ::nl_item = 25;
+pub const MON_5: ::nl_item = 26;
+pub const MON_6: ::nl_item = 27;
+pub const MON_7: ::nl_item = 28;
+pub const MON_8: ::nl_item = 29;
+pub const MON_9: ::nl_item = 30;
+pub const MON_10: ::nl_item = 31;
+pub const MON_11: ::nl_item = 32;
+pub const MON_12: ::nl_item = 33;
+
+pub const ABDAY_1: ::nl_item = 15;
+pub const ABDAY_2: ::nl_item = 16;
+pub const ABDAY_3: ::nl_item = 17;
+pub const ABDAY_4: ::nl_item = 18;
+pub const ABDAY_5: ::nl_item = 19;
+pub const ABDAY_6: ::nl_item = 20;
+pub const ABDAY_7: ::nl_item = 21;
+
+pub const ABMON_1: ::nl_item = 34;
+pub const ABMON_2: ::nl_item = 35;
+pub const ABMON_3: ::nl_item = 36;
+pub const ABMON_4: ::nl_item = 37;
+pub const ABMON_5: ::nl_item = 38;
+pub const ABMON_6: ::nl_item = 39;
+pub const ABMON_7: ::nl_item = 40;
+pub const ABMON_8: ::nl_item = 41;
+pub const ABMON_9: ::nl_item = 42;
+pub const ABMON_10: ::nl_item = 43;
+pub const ABMON_11: ::nl_item = 44;
+pub const ABMON_12: ::nl_item = 45;
+
+pub const AF_ARP: ::c_int = 28;
+pub const AF_CCITT: ::c_int = 10;
+pub const AF_CHAOS: ::c_int = 5;
+pub const AF_CNT: ::c_int = 21;
+pub const AF_COIP: ::c_int = 20;
+pub const AF_DATAKIT: ::c_int = 9;
+pub const AF_DECnet: ::c_int = 12;
+pub const AF_DLI: ::c_int = 13;
+pub const AF_E164: ::c_int = 26;
+pub const AF_ECMA: ::c_int = 8;
+pub const AF_HYLINK: ::c_int = 15;
+pub const AF_IEEE80211: ::c_int = 32;
+pub const AF_IMPLINK: ::c_int = 3;
+pub const AF_ISO: ::c_int = 7;
+pub const AF_LAT: ::c_int = 14;
+pub const AF_LINK: ::c_int = 18;
+pub const AF_NATM: ::c_int = 27;
+pub const AF_NS: ::c_int = 6;
+pub const AF_OSI: ::c_int = 7;
+pub const AF_PUP: ::c_int = 4;
+pub const ALT_DIGITS: ::nl_item = 50;
+pub const AM_STR: ::nl_item = 6;
+pub const B76800: ::speed_t = 76800;
+
+pub const BIOCFLUSH: ::c_int = 17000;
+pub const BIOCGBLEN: ::c_int = 1074020966;
+pub const BIOCGDLT: ::c_int = 1074020970;
+pub const BIOCGDLTLIST: ::c_int = -1072676233;
+pub const BIOCGETIF: ::c_int = 1083196011;
+pub const BIOCGHDRCMPLT: ::c_int = 1074020980;
+pub const BIOCGRTIMEOUT: ::c_int = 1074807406;
+pub const BIOCGSEESENT: ::c_int = 1074020984;
+pub const BIOCGSTATS: ::c_int = 1082147439;
+pub const BIOCIMMEDIATE: ::c_int = -2147204496;
+pub const BIOCPROMISC: ::c_int = 17001;
+pub const BIOCSBLEN: ::c_int = -1073462682;
+pub const BIOCSDLT: ::c_int = -2147204490;
+pub const BIOCSETF: ::c_int = -2146418073;
+pub const BIOCSETIF: ::c_int = -2138029460;
+pub const BIOCSHDRCMPLT: ::c_int = -2147204491;
+pub const BIOCSRTIMEOUT: ::c_int = -2146418067;
+pub const BIOCSSEESENT: ::c_int = -2147204487;
+pub const BIOCVERSION: ::c_int = 1074020977;
+
+pub const BPF_ALIGNMENT: usize = ::mem::size_of::<::c_long>();
+pub const CHAR_BIT: usize = 8;
+pub const CODESET: ::nl_item = 1;
+pub const CRNCYSTR: ::nl_item = 55;
+
+pub const D_FLAG_FILTER: ::c_int = 0x00000001;
+pub const D_FLAG_STAT: ::c_int = 0x00000002;
+pub const D_FLAG_STAT_FORM_MASK: ::c_int = 0x000000f0;
+pub const D_FLAG_STAT_FORM_T32_2001: ::c_int = 0x00000010;
+pub const D_FLAG_STAT_FORM_T32_2008: ::c_int = 0x00000020;
+pub const D_FLAG_STAT_FORM_T64_2008: ::c_int = 0x00000030;
+pub const D_FLAG_STAT_FORM_UNSET: ::c_int = 0x00000000;
+
+pub const D_FMT: ::nl_item = 3;
+pub const D_GETFLAG: ::c_int = 1;
+pub const D_SETFLAG: ::c_int = 2;
+pub const D_T_FMT: ::nl_item = 2;
+pub const ERA: ::nl_item = 46;
+pub const ERA_D_FMT: ::nl_item = 47;
+pub const ERA_D_T_FMT: ::nl_item = 48;
+pub const ERA_T_FMT: ::nl_item = 49;
+pub const RADIXCHAR: ::nl_item = 51;
+pub const THOUSEP: ::nl_item = 52;
+pub const YESEXPR: ::nl_item = 53;
+pub const NOEXPR: ::nl_item = 54;
+pub const F_GETOWN: ::c_int = 35;
+
+pub const FIONBIO: ::c_int = -2147195266;
+pub const FIOASYNC: ::c_int = -2147195267;
+pub const FIOCLEX: ::c_int = 26113;
+pub const FIOGETOWN: ::c_int = 1074030203;
+pub const FIONCLEX: ::c_int = 26114;
+pub const FIONREAD: ::c_int = 1074030207;
+pub const FIONSPACE: ::c_int = 1074030200;
+pub const FIONWRITE: ::c_int = 1074030201;
+pub const FIOSETOWN: ::c_int = -2147195268;
+
+pub const F_SETOWN: ::c_int = 36;
+pub const IFF_ACCEPTRTADV: ::c_int = 0x40000000;
+pub const IFF_IP6FORWARDING: ::c_int = 0x20000000;
+pub const IFF_LINK0: ::c_int = 0x00001000;
+pub const IFF_LINK1: ::c_int = 0x00002000;
+pub const IFF_LINK2: ::c_int = 0x00004000;
+pub const IFF_OACTIVE: ::c_int = 0x00000400;
+pub const IFF_SHIM: ::c_int = 0x80000000;
+pub const IFF_SIMPLEX: ::c_int = 0x00000800;
+pub const IHFLOW: tcflag_t = 0x00000001;
+pub const IIDLE: tcflag_t = 0x00000008;
+pub const IP_RECVDSTADDR: ::c_int = 7;
+pub const IP_RECVIF: ::c_int = 20;
+pub const IPTOS_ECN_NOTECT: u8 = 0x00;
+pub const IUCLC: tcflag_t = 0x00000200;
+pub const IUTF8: tcflag_t = 0x0004000;
+
+pub const KERN_ARGMAX: ::c_int = 8;
+pub const KERN_ARND: ::c_int = 81;
+pub const KERN_BOOTTIME: ::c_int = 21;
+pub const KERN_CLOCKRATE: ::c_int = 12;
+pub const KERN_FILE: ::c_int = 15;
+pub const KERN_HOSTID: ::c_int = 11;
+pub const KERN_HOSTNAME: ::c_int = 10;
+pub const KERN_IOV_MAX: ::c_int = 38;
+pub const KERN_JOB_CONTROL: ::c_int = 19;
+pub const KERN_LOGSIGEXIT: ::c_int = 46;
+pub const KERN_MAXFILES: ::c_int = 7;
+pub const KERN_MAXID: ::c_int = 83;
+pub const KERN_MAXPROC: ::c_int = 6;
+pub const KERN_MAXVNODES: ::c_int = 5;
+pub const KERN_NGROUPS: ::c_int = 18;
+pub const KERN_OSRELEASE: ::c_int = 2;
+pub const KERN_OSREV: ::c_int = 3;
+pub const KERN_OSTYPE: ::c_int = 1;
+pub const KERN_POSIX1: ::c_int = 17;
+pub const KERN_PROC: ::c_int = 14;
+pub const KERN_PROC_ALL: ::c_int = 0;
+pub const KERN_PROC_ARGS: ::c_int = 48;
+pub const KERN_PROC_ENV: ::c_int = 3;
+pub const KERN_PROC_GID: ::c_int = 7;
+pub const KERN_PROC_PGRP: ::c_int = 2;
+pub const KERN_PROC_PID: ::c_int = 1;
+pub const KERN_PROC_RGID: ::c_int = 8;
+pub const KERN_PROC_RUID: ::c_int = 6;
+pub const KERN_PROC_SESSION: ::c_int = 3;
+pub const KERN_PROC_TTY: ::c_int = 4;
+pub const KERN_PROC_UID: ::c_int = 5;
+pub const KERN_PROF: ::c_int = 16;
+pub const KERN_SAVED_IDS: ::c_int = 20;
+pub const KERN_SECURELVL: ::c_int = 9;
+pub const KERN_VERSION: ::c_int = 4;
+pub const KERN_VNODE: ::c_int = 13;
+
+pub const LC_ALL: ::c_int = 63;
+pub const LC_COLLATE: ::c_int = 1;
+pub const LC_CTYPE: ::c_int = 2;
+pub const LC_MESSAGES: ::c_int = 32;
+pub const LC_MONETARY: ::c_int = 4;
+pub const LC_NUMERIC: ::c_int = 8;
+pub const LC_TIME: ::c_int = 16;
+
+pub const LOCAL_CONNWAIT: ::c_int = 0x0002;
+pub const LOCAL_CREDS: ::c_int = 0x0001;
+pub const LOCAL_PEEREID: ::c_int = 0x0003;
+
+pub const MAP_STACK: ::c_int = 0x00001000;
+pub const MNT_NOEXEC: ::c_int = 0x02;
+pub const MNT_NOSUID: ::c_int = 0x04;
+pub const MNT_RDONLY: ::c_int = 0x01;
+
+pub const MSG_NOTIFICATION: ::c_int = 0x0400;
+
+pub const NET_RT_DUMP: ::c_int = 1;
+pub const NET_RT_FLAGS: ::c_int = 2;
+pub const NET_RT_IFLIST: ::c_int = 4;
+pub const NI_NUMERICSCOPE: ::c_int = 0x00000040;
+pub const OHFLOW: tcflag_t = 0x00000002;
+pub const P_ALL: idtype_t = 0;
+pub const PARSTK: tcflag_t = 0x00000004;
+pub const PF_ARP: ::c_int = 28;
+pub const PF_CCITT: ::c_int = 10;
+pub const PF_CHAOS: ::c_int = 5;
+pub const PF_CNT: ::c_int = 21;
+pub const PF_COIP: ::c_int = 20;
+pub const PF_DATAKIT: ::c_int = 9;
+pub const PF_DECnet: ::c_int = 12;
+pub const PF_DLI: ::c_int = 13;
+pub const PF_ECMA: ::c_int = 8;
+pub const PF_HYLINK: ::c_int = 15;
+pub const PF_IMPLINK: ::c_int = 3;
+pub const PF_ISO: ::c_int = 7;
+pub const PF_LAT: ::c_int = 14;
+pub const PF_LINK: ::c_int = 18;
+pub const PF_NATM: ::c_int = 27;
+pub const PF_OSI: ::c_int = 7;
+pub const PF_PIP: ::c_int = 25;
+pub const PF_PUP: ::c_int = 4;
+pub const PF_RTIP: ::c_int = 22;
+pub const PF_XTP: ::c_int = 19;
+pub const PM_STR: ::nl_item = 7;
+pub const POSIX_MADV_DONTNEED: ::c_int = 4;
+pub const POSIX_MADV_NORMAL: ::c_int = 0;
+pub const POSIX_MADV_RANDOM: ::c_int = 2;
+pub const POSIX_MADV_SEQUENTIAL: ::c_int = 1;
+pub const POSIX_MADV_WILLNEED: ::c_int = 3;
+pub const _POSIX_VDISABLE: ::c_int = 0;
+pub const P_PGID: idtype_t = 2;
+pub const P_PID: idtype_t = 1;
+pub const PRIO_PGRP: ::c_int = 1;
+pub const PRIO_PROCESS: ::c_int = 0;
+pub const PRIO_USER: ::c_int = 2;
+pub const pseudo_AF_HDRCMPLT: ::c_int = 30;
+pub const pseudo_AF_PIP: ::c_int = 25;
+pub const pseudo_AF_RTIP: ::c_int = 22;
+pub const pseudo_AF_XTP: ::c_int = 19;
+pub const REG_ASSERT: ::c_int = 15;
+pub const REG_ATOI: ::c_int = 255;
+pub const REG_BACKR: ::c_int = 0x400;
+pub const REG_BASIC: ::c_int = 0x00;
+pub const REG_DUMP: ::c_int = 0x80;
+pub const REG_EMPTY: ::c_int = 14;
+pub const REG_INVARG: ::c_int = 16;
+pub const REG_ITOA: ::c_int = 0o400;
+pub const REG_LARGE: ::c_int = 0x200;
+pub const REG_NOSPEC: ::c_int = 0x10;
+pub const REG_OK: ::c_int = 0;
+pub const REG_PEND: ::c_int = 0x20;
+pub const REG_TRACE: ::c_int = 0x100;
+
+pub const RLIMIT_AS: ::c_int = 6;
+pub const RLIMIT_CORE: ::c_int = 4;
+pub const RLIMIT_CPU: ::c_int = 0;
+pub const RLIMIT_DATA: ::c_int = 2;
+pub const RLIMIT_FSIZE: ::c_int = 1;
+pub const RLIMIT_MEMLOCK: ::c_int = 7;
+pub const RLIMIT_NOFILE: ::c_int = 5;
+pub const RLIMIT_NPROC: ::c_int = 8;
+pub const RLIMIT_RSS: ::c_int = 6;
+pub const RLIMIT_STACK: ::c_int = 3;
+pub const RLIMIT_VMEM: ::c_int = 6;
+pub const RLIM_NLIMITS: ::c_int = 14;
+
+pub const SCHED_ADJTOHEAD: ::c_int = 5;
+pub const SCHED_ADJTOTAIL: ::c_int = 6;
+pub const SCHED_MAXPOLICY: ::c_int = 7;
+pub const SCHED_SETPRIO: ::c_int = 7;
+pub const SCHED_SPORADIC: ::c_int = 4;
+
+pub const SHM_ANON: *mut ::c_char = -1isize as *mut ::c_char;
+pub const SIGCLD: ::c_int = SIGCHLD;
+pub const SIGDEADLK: ::c_int = 7;
+pub const SIGEMT: ::c_int = 7;
+pub const SIGEV_NONE: ::c_int = 0;
+pub const SIGEV_SIGNAL: ::c_int = 129;
+pub const SIGEV_THREAD: ::c_int = 135;
+pub const SIOCGIFADDR: ::c_int = -1064277727;
+pub const SO_FIB: ::c_int = 0x100a;
+pub const SO_OVERFLOWED: ::c_int = 0x1009;
+pub const SO_SETFIB: ::c_int = 0x100a;
+pub const SO_TXPRIO: ::c_int = 0x100b;
+pub const SO_USELOOPBACK: ::c_int = 0x0040;
+pub const SO_VLANPRIO: ::c_int = 0x100c;
+pub const _SS_ALIGNSIZE: usize = ::mem::size_of::<i64>();
+pub const _SS_MAXSIZE: usize = 128;
+pub const _SS_PAD1SIZE: usize = _SS_ALIGNSIZE - 2;
+pub const _SS_PAD2SIZE: usize = _SS_MAXSIZE - 2 - _SS_PAD1SIZE - _SS_ALIGNSIZE;
+pub const TC_CPOSIX: tcflag_t = CLOCAL | CREAD | CSIZE | CSTOPB | HUPCL | PARENB | PARODD;
+pub const TCGETS: ::c_int = 0x404c540d;
+pub const TC_IPOSIX: tcflag_t =
+ BRKINT | ICRNL | IGNBRK | IGNPAR | INLCR | INPCK | ISTRIP | IXOFF | IXON | PARMRK;
+pub const TC_LPOSIX: tcflag_t =
+ ECHO | ECHOE | ECHOK | ECHONL | ICANON | IEXTEN | ISIG | NOFLSH | TOSTOP;
+pub const TC_OPOSIX: tcflag_t = OPOST;
+pub const T_FMT_AMPM: ::nl_item = 5;
+
+pub const TIOCCBRK: ::c_int = 29818;
+pub const TIOCCDTR: ::c_int = 29816;
+pub const TIOCDRAIN: ::c_int = 29790;
+pub const TIOCEXCL: ::c_int = 29709;
+pub const TIOCFLUSH: ::c_int = -2147191792;
+pub const TIOCGETA: ::c_int = 1078752275;
+pub const TIOCGPGRP: ::c_int = 1074033783;
+pub const TIOCGWINSZ: ::c_int = 1074295912;
+pub const TIOCMBIC: ::c_int = -2147191701;
+pub const TIOCMBIS: ::c_int = -2147191700;
+pub const TIOCMGET: ::c_int = 1074033770;
+pub const TIOCMSET: ::c_int = -2147191699;
+pub const TIOCNOTTY: ::c_int = 29809;
+pub const TIOCNXCL: ::c_int = 29710;
+pub const TIOCOUTQ: ::c_int = 1074033779;
+pub const TIOCPKT: ::c_int = -2147191696;
+pub const TIOCPKT_DATA: ::c_int = 0x00;
+pub const TIOCPKT_DOSTOP: ::c_int = 0x20;
+pub const TIOCPKT_FLUSHREAD: ::c_int = 0x01;
+pub const TIOCPKT_FLUSHWRITE: ::c_int = 0x02;
+pub const TIOCPKT_IOCTL: ::c_int = 0x40;
+pub const TIOCPKT_NOSTOP: ::c_int = 0x10;
+pub const TIOCPKT_START: ::c_int = 0x08;
+pub const TIOCPKT_STOP: ::c_int = 0x04;
+pub const TIOCSBRK: ::c_int = 29819;
+pub const TIOCSCTTY: ::c_int = 29793;
+pub const TIOCSDTR: ::c_int = 29817;
+pub const TIOCSETA: ::c_int = -2142473196;
+pub const TIOCSETAF: ::c_int = -2142473194;
+pub const TIOCSETAW: ::c_int = -2142473195;
+pub const TIOCSPGRP: ::c_int = -2147191690;
+pub const TIOCSTART: ::c_int = 29806;
+pub const TIOCSTI: ::c_int = -2147388302;
+pub const TIOCSTOP: ::c_int = 29807;
+pub const TIOCSWINSZ: ::c_int = -2146929561;
+
+pub const USER_CS_PATH: ::c_int = 1;
+pub const USER_BC_BASE_MAX: ::c_int = 2;
+pub const USER_BC_DIM_MAX: ::c_int = 3;
+pub const USER_BC_SCALE_MAX: ::c_int = 4;
+pub const USER_BC_STRING_MAX: ::c_int = 5;
+pub const USER_COLL_WEIGHTS_MAX: ::c_int = 6;
+pub const USER_EXPR_NEST_MAX: ::c_int = 7;
+pub const USER_LINE_MAX: ::c_int = 8;
+pub const USER_RE_DUP_MAX: ::c_int = 9;
+pub const USER_POSIX2_VERSION: ::c_int = 10;
+pub const USER_POSIX2_C_BIND: ::c_int = 11;
+pub const USER_POSIX2_C_DEV: ::c_int = 12;
+pub const USER_POSIX2_CHAR_TERM: ::c_int = 13;
+pub const USER_POSIX2_FORT_DEV: ::c_int = 14;
+pub const USER_POSIX2_FORT_RUN: ::c_int = 15;
+pub const USER_POSIX2_LOCALEDEF: ::c_int = 16;
+pub const USER_POSIX2_SW_DEV: ::c_int = 17;
+pub const USER_POSIX2_UPE: ::c_int = 18;
+pub const USER_STREAM_MAX: ::c_int = 19;
+pub const USER_TZNAME_MAX: ::c_int = 20;
+pub const USER_ATEXIT_MAX: ::c_int = 21;
+pub const USER_MAXID: ::c_int = 22;
+
+pub const VDOWN: usize = 31;
+pub const VINS: usize = 32;
+pub const VDEL: usize = 33;
+pub const VRUB: usize = 34;
+pub const VCAN: usize = 35;
+pub const VHOME: usize = 36;
+pub const VEND: usize = 37;
+pub const VSPARE3: usize = 38;
+pub const VSPARE4: usize = 39;
+pub const VSWTCH: usize = 7;
+pub const VDSUSP: usize = 11;
+pub const VFWD: usize = 18;
+pub const VLOGIN: usize = 19;
+pub const VPREFIX: usize = 20;
+pub const VSUFFIX: usize = 24;
+pub const VLEFT: usize = 28;
+pub const VRIGHT: usize = 29;
+pub const VUP: usize = 30;
+pub const XCASE: tcflag_t = 0x00000004;
+
+pub const PTHREAD_CREATE_JOINABLE: ::c_int = 0x00;
+pub const PTHREAD_CREATE_DETACHED: ::c_int = 0x01;
+
+pub const PTHREAD_MUTEX_ERRORCHECK: ::c_int = 1;
+pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2;
+pub const PTHREAD_MUTEX_NORMAL: ::c_int = 3;
+pub const PTHREAD_STACK_MIN: ::size_t = 256;
+pub const PTHREAD_MUTEX_DEFAULT: ::c_int = 0;
+pub const PTHREAD_MUTEX_STALLED: ::c_int = 0x00;
+pub const PTHREAD_MUTEX_ROBUST: ::c_int = 0x10;
+pub const PTHREAD_PROCESS_PRIVATE: ::c_int = 0x00;
+pub const PTHREAD_PROCESS_SHARED: ::c_int = 0x01;
+
+pub const PTHREAD_KEYS_MAX: usize = 128;
+
+pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t {
+ __u: 0x80000000,
+ __owner: 0xffffffff,
+};
+pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = pthread_cond_t {
+ __u: CLOCK_REALTIME as u32,
+ __owner: 0xfffffffb,
+};
+pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = pthread_rwlock_t {
+ __active: 0,
+ __blockedwriters: 0,
+ __blockedreaders: 0,
+ __heavy: 0,
+ __lock: PTHREAD_MUTEX_INITIALIZER,
+ __rcond: PTHREAD_COND_INITIALIZER,
+ __wcond: PTHREAD_COND_INITIALIZER,
+ __owner: -2i32 as ::c_uint,
+ __spare: 0,
+};
+
+const_fn! {
+ {const} fn _CMSG_ALIGN(len: usize) -> usize {
+ len + ::mem::size_of::<usize>() - 1 & !(::mem::size_of::<usize>() - 1)
+ }
+
+ {const} fn _ALIGN(p: usize, b: usize) -> usize {
+ (p + b - 1) & !(b-1)
+ }
+}
+
+f! {
+ pub fn CMSG_FIRSTHDR(mhdr: *const msghdr) -> *mut cmsghdr {
+ if (*mhdr).msg_controllen as usize >= ::mem::size_of::<cmsghdr>() {
+ (*mhdr).msg_control as *mut cmsghdr
+ } else {
+ 0 as *mut cmsghdr
+ }
+ }
+
+ pub fn CMSG_NXTHDR(mhdr: *const ::msghdr, cmsg: *const ::cmsghdr)
+ -> *mut ::cmsghdr
+ {
+ let msg = _CMSG_ALIGN((*cmsg).cmsg_len as usize);
+ let next = cmsg as usize + msg + _CMSG_ALIGN(::mem::size_of::<::cmsghdr>());
+ if next > (*mhdr).msg_control as usize + (*mhdr).msg_controllen as usize {
+ 0 as *mut ::cmsghdr
+ } else {
+ (cmsg as usize + msg) as *mut ::cmsghdr
+ }
+ }
+
+ pub fn CMSG_DATA(cmsg: *const ::cmsghdr) -> *mut ::c_uchar {
+ (cmsg as *mut ::c_uchar)
+ .offset(_CMSG_ALIGN(::mem::size_of::<::cmsghdr>()) as isize)
+ }
+
+ pub fn CMSG_LEN(length: ::c_uint) -> ::c_uint {
+ _CMSG_ALIGN(::mem::size_of::<::cmsghdr>()) as ::c_uint + length
+ }
+
+ pub {const} fn CMSG_SPACE(length: ::c_uint) -> ::c_uint {
+ (_CMSG_ALIGN(::mem::size_of::<cmsghdr>()) + _CMSG_ALIGN(length as usize) )
+ as ::c_uint
+ }
+
+ pub fn FD_CLR(fd: ::c_int, set: *mut fd_set) -> () {
+ let fd = fd as usize;
+ let size = ::mem::size_of_val(&(*set).fds_bits[0]) * 8;
+ (*set).fds_bits[fd / size] &= !(1 << (fd % size));
+ return
+ }
+
+ pub fn FD_ISSET(fd: ::c_int, set: *const fd_set) -> bool {
+ let fd = fd as usize;
+ let size = ::mem::size_of_val(&(*set).fds_bits[0]) * 8;
+ return ((*set).fds_bits[fd / size] & (1 << (fd % size))) != 0
+ }
+
+ pub fn FD_SET(fd: ::c_int, set: *mut fd_set) -> () {
+ let fd = fd as usize;
+ let size = ::mem::size_of_val(&(*set).fds_bits[0]) * 8;
+ (*set).fds_bits[fd / size] |= 1 << (fd % size);
+ return
+ }
+
+ pub fn FD_ZERO(set: *mut fd_set) -> () {
+ for slot in (*set).fds_bits.iter_mut() {
+ *slot = 0;
+ }
+ }
+
+ pub fn _DEXTRA_FIRST(_d: *const dirent) -> *mut ::dirent_extra {
+ let _f = &((*(_d)).d_name) as *const _;
+ let _s = _d as usize;
+
+ _ALIGN(_s + _f as usize - _s + (*_d).d_namelen as usize + 1, 8) as *mut ::dirent_extra
+ }
+
+ pub fn _DEXTRA_VALID(_x: *const ::dirent_extra, _d: *const dirent) -> bool {
+ let sz = _x as usize - _d as usize + ::mem::size_of::<::dirent_extra>();
+ let rsz = (*_d).d_reclen as usize;
+
+ if sz > rsz || sz + (*_x).d_datalen as usize > rsz {
+ false
+ } else {
+ true
+ }
+ }
+
+ pub fn _DEXTRA_NEXT(_x: *const ::dirent_extra) -> *mut ::dirent_extra {
+ _ALIGN(
+ _x as usize + ::mem::size_of::<::dirent_extra>() + (*_x).d_datalen as usize, 8
+ ) as *mut ::dirent_extra
+ }
+
+ pub fn SOCKCREDSIZE(ngrps: usize) -> usize {
+ let ngrps = if ngrps > 0 {
+ ngrps - 1
+ } else {
+ 0
+ };
+ ::mem::size_of::<sockcred>() + ::mem::size_of::<::gid_t>() * ngrps
+ }
+}
+
+safe_f! {
+ pub {const} fn WIFSTOPPED(status: ::c_int) -> bool {
+ (status & 0xff) == 0x7f
+ }
+
+ pub {const} fn WSTOPSIG(status: ::c_int) -> ::c_int {
+ (status >> 8) & 0xff
+ }
+
+ pub {const} fn WIFCONTINUED(status: ::c_int) -> bool {
+ status == 0xffff
+ }
+
+ pub {const} fn WIFSIGNALED(status: ::c_int) -> bool {
+ ((status & 0x7f) + 1) as i8 >= 2
+ }
+
+ pub {const} fn WTERMSIG(status: ::c_int) -> ::c_int {
+ status & 0x7f
+ }
+
+ pub {const} fn WIFEXITED(status: ::c_int) -> bool {
+ (status & 0x7f) == 0
+ }
+
+ pub {const} fn WEXITSTATUS(status: ::c_int) -> ::c_int {
+ (status >> 8) & 0xff
+ }
+
+ pub {const} fn WCOREDUMP(status: ::c_int) -> bool {
+ (status & 0x80) != 0
+ }
+
+ pub {const} fn IPTOS_ECN(x: u8) -> u8 {
+ x & ::IPTOS_ECN_MASK
+ }
+}
+
+// Network related functions are provided by libsocket and regex
+// functions are provided by libregex.
+#[link(name = "socket")]
+#[link(name = "regex")]
+
+extern "C" {
+ pub fn sem_destroy(sem: *mut sem_t) -> ::c_int;
+ pub fn sem_init(sem: *mut sem_t, pshared: ::c_int, value: ::c_uint) -> ::c_int;
+ pub fn fdatasync(fd: ::c_int) -> ::c_int;
+ pub fn getpriority(which: ::c_int, who: ::id_t) -> ::c_int;
+ pub fn setpriority(which: ::c_int, who: ::id_t, prio: ::c_int) -> ::c_int;
+ pub fn mkfifoat(dirfd: ::c_int, pathname: *const ::c_char, mode: ::mode_t) -> ::c_int;
+
+ pub fn clock_getres(clk_id: ::clockid_t, tp: *mut ::timespec) -> ::c_int;
+ pub fn clock_gettime(clk_id: ::clockid_t, tp: *mut ::timespec) -> ::c_int;
+ pub fn clock_settime(clk_id: ::clockid_t, tp: *const ::timespec) -> ::c_int;
+ pub fn clock_getcpuclockid(pid: ::pid_t, clk_id: *mut ::clockid_t) -> ::c_int;
+
+ pub fn pthread_attr_getstack(
+ attr: *const ::pthread_attr_t,
+ stackaddr: *mut *mut ::c_void,
+ stacksize: *mut ::size_t,
+ ) -> ::c_int;
+ pub fn memalign(align: ::size_t, size: ::size_t) -> *mut ::c_void;
+ pub fn setgroups(ngroups: ::c_int, ptr: *const ::gid_t) -> ::c_int;
+
+ pub fn posix_fadvise(fd: ::c_int, offset: ::off_t, len: ::off_t, advise: ::c_int) -> ::c_int;
+ pub fn futimens(fd: ::c_int, times: *const ::timespec) -> ::c_int;
+ pub fn nl_langinfo(item: ::nl_item) -> *mut ::c_char;
+
+ pub fn utimensat(
+ dirfd: ::c_int,
+ path: *const ::c_char,
+ times: *const ::timespec,
+ flag: ::c_int,
+ ) -> ::c_int;
+
+ pub fn pthread_condattr_getclock(
+ attr: *const pthread_condattr_t,
+ clock_id: *mut clockid_t,
+ ) -> ::c_int;
+ pub fn pthread_condattr_setclock(
+ attr: *mut pthread_condattr_t,
+ clock_id: ::clockid_t,
+ ) -> ::c_int;
+ pub fn pthread_condattr_setpshared(attr: *mut pthread_condattr_t, pshared: ::c_int) -> ::c_int;
+ pub fn pthread_mutexattr_setpshared(
+ attr: *mut pthread_mutexattr_t,
+ pshared: ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_rwlockattr_getpshared(
+ attr: *const pthread_rwlockattr_t,
+ val: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_rwlockattr_setpshared(attr: *mut pthread_rwlockattr_t, val: ::c_int) -> ::c_int;
+ pub fn ptsname_r(fd: ::c_int, buf: *mut ::c_char, buflen: ::size_t) -> *mut ::c_char;
+ pub fn clearenv() -> ::c_int;
+ pub fn waitid(idtype: idtype_t, id: id_t, infop: *mut ::siginfo_t, options: ::c_int)
+ -> ::c_int;
+ pub fn wait4(
+ pid: ::pid_t,
+ status: *mut ::c_int,
+ options: ::c_int,
+ rusage: *mut ::rusage,
+ ) -> ::pid_t;
+ pub fn execvpe(
+ file: *const ::c_char,
+ argv: *const *const ::c_char,
+ envp: *const *const ::c_char,
+ ) -> ::c_int;
+
+ pub fn getifaddrs(ifap: *mut *mut ::ifaddrs) -> ::c_int;
+ pub fn freeifaddrs(ifa: *mut ::ifaddrs);
+ pub fn bind(socket: ::c_int, address: *const ::sockaddr, address_len: ::socklen_t) -> ::c_int;
+
+ pub fn writev(fd: ::c_int, iov: *const ::iovec, iovcnt: ::c_int) -> ::ssize_t;
+ pub fn readv(fd: ::c_int, iov: *const ::iovec, iovcnt: ::c_int) -> ::ssize_t;
+
+ pub fn sendmsg(fd: ::c_int, msg: *const ::msghdr, flags: ::c_int) -> ::ssize_t;
+ pub fn recvmsg(fd: ::c_int, msg: *mut ::msghdr, flags: ::c_int) -> ::ssize_t;
+ pub fn openpty(
+ amaster: *mut ::c_int,
+ aslave: *mut ::c_int,
+ name: *mut ::c_char,
+ termp: *mut termios,
+ winp: *mut ::winsize,
+ ) -> ::c_int;
+ pub fn forkpty(
+ amaster: *mut ::c_int,
+ name: *mut ::c_char,
+ termp: *mut termios,
+ winp: *mut ::winsize,
+ ) -> ::pid_t;
+ pub fn login_tty(fd: ::c_int) -> ::c_int;
+
+ pub fn uname(buf: *mut ::utsname) -> ::c_int;
+
+ pub fn getpeereid(socket: ::c_int, euid: *mut ::uid_t, egid: *mut ::gid_t) -> ::c_int;
+
+ pub fn strerror_r(errnum: ::c_int, buf: *mut c_char, buflen: ::size_t) -> ::c_int;
+
+ pub fn abs(i: ::c_int) -> ::c_int;
+ pub fn atof(s: *const ::c_char) -> ::c_double;
+ pub fn labs(i: ::c_long) -> ::c_long;
+ pub fn rand() -> ::c_int;
+ pub fn srand(seed: ::c_uint);
+
+ pub fn setpwent();
+ pub fn endpwent();
+ pub fn getpwent() -> *mut passwd;
+ pub fn setgrent();
+ pub fn endgrent();
+ pub fn getgrent() -> *mut ::group;
+ pub fn setspent();
+ pub fn endspent();
+
+ pub fn shm_open(name: *const c_char, oflag: ::c_int, mode: mode_t) -> ::c_int;
+
+ pub fn ftok(pathname: *const ::c_char, proj_id: ::c_int) -> ::key_t;
+ pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int) -> ::c_int;
+
+ pub fn posix_fallocate(fd: ::c_int, offset: ::off_t, len: ::off_t) -> ::c_int;
+ pub fn mkostemp(template: *mut ::c_char, flags: ::c_int) -> ::c_int;
+ pub fn mkostemps(template: *mut ::c_char, suffixlen: ::c_int, flags: ::c_int) -> ::c_int;
+ pub fn sigtimedwait(
+ set: *const sigset_t,
+ info: *mut siginfo_t,
+ timeout: *const ::timespec,
+ ) -> ::c_int;
+ pub fn sigwaitinfo(set: *const sigset_t, info: *mut siginfo_t) -> ::c_int;
+ pub fn pthread_setschedprio(native: ::pthread_t, priority: ::c_int) -> ::c_int;
+
+ pub fn if_nameindex() -> *mut if_nameindex;
+ pub fn if_freenameindex(ptr: *mut if_nameindex);
+
+ pub fn glob(
+ pattern: *const c_char,
+ flags: ::c_int,
+ errfunc: ::Option<extern "C" fn(epath: *const c_char, errno: ::c_int) -> ::c_int>,
+ pglob: *mut ::glob_t,
+ ) -> ::c_int;
+ pub fn globfree(pglob: *mut ::glob_t);
+
+ pub fn posix_madvise(addr: *mut ::c_void, len: ::size_t, advice: ::c_int) -> ::c_int;
+
+ pub fn shm_unlink(name: *const ::c_char) -> ::c_int;
+
+ pub fn seekdir(dirp: *mut ::DIR, loc: ::c_long);
+
+ pub fn telldir(dirp: *mut ::DIR) -> ::c_long;
+
+ pub fn msync(addr: *mut ::c_void, len: ::size_t, flags: ::c_int) -> ::c_int;
+
+ pub fn recvfrom(
+ socket: ::c_int,
+ buf: *mut ::c_void,
+ len: ::size_t,
+ flags: ::c_int,
+ addr: *mut ::sockaddr,
+ addrlen: *mut ::socklen_t,
+ ) -> ::ssize_t;
+ pub fn mkstemps(template: *mut ::c_char, suffixlen: ::c_int) -> ::c_int;
+
+ pub fn getdomainname(name: *mut ::c_char, len: ::size_t) -> ::c_int;
+ pub fn setdomainname(name: *const ::c_char, len: ::size_t) -> ::c_int;
+ pub fn sync();
+ pub fn pthread_getschedparam(
+ native: ::pthread_t,
+ policy: *mut ::c_int,
+ param: *mut ::sched_param,
+ ) -> ::c_int;
+ pub fn umount(target: *const ::c_char, flags: ::c_int) -> ::c_int;
+ pub fn sched_get_priority_max(policy: ::c_int) -> ::c_int;
+ pub fn settimeofday(tv: *const ::timeval, tz: *const ::c_void) -> ::c_int;
+ pub fn sched_rr_get_interval(pid: ::pid_t, tp: *mut ::timespec) -> ::c_int;
+ pub fn sem_timedwait(sem: *mut sem_t, abstime: *const ::timespec) -> ::c_int;
+ pub fn sem_getvalue(sem: *mut sem_t, sval: *mut ::c_int) -> ::c_int;
+ pub fn sched_setparam(pid: ::pid_t, param: *const ::sched_param) -> ::c_int;
+ pub fn mount(
+ special_device: *const ::c_char,
+ mount_directory: *const ::c_char,
+ flags: ::c_int,
+ mount_type: *const ::c_char,
+ mount_data: *const ::c_void,
+ mount_datalen: ::c_int,
+ ) -> ::c_int;
+ pub fn sched_getparam(pid: ::pid_t, param: *mut ::sched_param) -> ::c_int;
+ pub fn pthread_mutex_consistent(mutex: *mut pthread_mutex_t) -> ::c_int;
+ pub fn pthread_mutex_timedlock(
+ lock: *mut pthread_mutex_t,
+ abstime: *const ::timespec,
+ ) -> ::c_int;
+ pub fn pthread_spin_init(lock: *mut ::pthread_spinlock_t, pshared: ::c_int) -> ::c_int;
+ pub fn pthread_spin_destroy(lock: *mut ::pthread_spinlock_t) -> ::c_int;
+ pub fn pthread_spin_lock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
+ pub fn pthread_spin_trylock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
+ pub fn pthread_spin_unlock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
+ pub fn pthread_barrierattr_init(__attr: *mut ::pthread_barrierattr_t) -> ::c_int;
+ pub fn pthread_barrierattr_destroy(__attr: *mut ::pthread_barrierattr_t) -> ::c_int;
+ pub fn pthread_barrierattr_getpshared(
+ __attr: *const ::pthread_barrierattr_t,
+ __pshared: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_barrierattr_setpshared(
+ __attr: *mut ::pthread_barrierattr_t,
+ __pshared: ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_barrier_init(
+ __barrier: *mut ::pthread_barrier_t,
+ __attr: *const ::pthread_barrierattr_t,
+ __count: ::c_uint,
+ ) -> ::c_int;
+ pub fn pthread_barrier_destroy(__barrier: *mut ::pthread_barrier_t) -> ::c_int;
+ pub fn pthread_barrier_wait(__barrier: *mut ::pthread_barrier_t) -> ::c_int;
+
+ pub fn sched_getscheduler(pid: ::pid_t) -> ::c_int;
+ pub fn clock_nanosleep(
+ clk_id: ::clockid_t,
+ flags: ::c_int,
+ rqtp: *const ::timespec,
+ rmtp: *mut ::timespec,
+ ) -> ::c_int;
+ pub fn pthread_attr_getguardsize(
+ attr: *const ::pthread_attr_t,
+ guardsize: *mut ::size_t,
+ ) -> ::c_int;
+ pub fn sethostname(name: *const ::c_char, len: ::size_t) -> ::c_int;
+ pub fn sched_get_priority_min(policy: ::c_int) -> ::c_int;
+ pub fn pthread_condattr_getpshared(
+ attr: *const pthread_condattr_t,
+ pshared: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_setschedparam(
+ native: ::pthread_t,
+ policy: ::c_int,
+ param: *const ::sched_param,
+ ) -> ::c_int;
+ pub fn sched_setscheduler(
+ pid: ::pid_t,
+ policy: ::c_int,
+ param: *const ::sched_param,
+ ) -> ::c_int;
+ pub fn sigsuspend(mask: *const ::sigset_t) -> ::c_int;
+ pub fn getgrgid_r(
+ gid: ::gid_t,
+ grp: *mut ::group,
+ buf: *mut ::c_char,
+ buflen: ::size_t,
+ result: *mut *mut ::group,
+ ) -> ::c_int;
+ pub fn sem_close(sem: *mut sem_t) -> ::c_int;
+ pub fn getdtablesize() -> ::c_int;
+ pub fn getgrnam_r(
+ name: *const ::c_char,
+ grp: *mut ::group,
+ buf: *mut ::c_char,
+ buflen: ::size_t,
+ result: *mut *mut ::group,
+ ) -> ::c_int;
+ pub fn initgroups(user: *const ::c_char, group: ::gid_t) -> ::c_int;
+ pub fn pthread_sigmask(how: ::c_int, set: *const sigset_t, oldset: *mut sigset_t) -> ::c_int;
+ pub fn sem_open(name: *const ::c_char, oflag: ::c_int, ...) -> *mut sem_t;
+ pub fn getgrnam(name: *const ::c_char) -> *mut ::group;
+ pub fn pthread_cancel(thread: ::pthread_t) -> ::c_int;
+ pub fn pthread_kill(thread: ::pthread_t, sig: ::c_int) -> ::c_int;
+ pub fn sem_unlink(name: *const ::c_char) -> ::c_int;
+ pub fn daemon(nochdir: ::c_int, noclose: ::c_int) -> ::c_int;
+ pub fn getpwnam_r(
+ name: *const ::c_char,
+ pwd: *mut passwd,
+ buf: *mut ::c_char,
+ buflen: ::size_t,
+ result: *mut *mut passwd,
+ ) -> ::c_int;
+ pub fn getpwuid_r(
+ uid: ::uid_t,
+ pwd: *mut passwd,
+ buf: *mut ::c_char,
+ buflen: ::size_t,
+ result: *mut *mut passwd,
+ ) -> ::c_int;
+ pub fn sigwait(set: *const sigset_t, sig: *mut ::c_int) -> ::c_int;
+ pub fn pthread_atfork(
+ prepare: ::Option<unsafe extern "C" fn()>,
+ parent: ::Option<unsafe extern "C" fn()>,
+ child: ::Option<unsafe extern "C" fn()>,
+ ) -> ::c_int;
+ pub fn getgrgid(gid: ::gid_t) -> *mut ::group;
+ pub fn getgrouplist(
+ user: *const ::c_char,
+ group: ::gid_t,
+ groups: *mut ::gid_t,
+ ngroups: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_mutexattr_getpshared(
+ attr: *const pthread_mutexattr_t,
+ pshared: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_mutexattr_getrobust(
+ attr: *const pthread_mutexattr_t,
+ robustness: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_mutexattr_setrobust(
+ attr: *mut pthread_mutexattr_t,
+ robustness: ::c_int,
+ ) -> ::c_int;
+ pub fn pthread_create(
+ native: *mut ::pthread_t,
+ attr: *const ::pthread_attr_t,
+ f: extern "C" fn(*mut ::c_void) -> *mut ::c_void,
+ value: *mut ::c_void,
+ ) -> ::c_int;
+ pub fn getitimer(which: ::c_int, curr_value: *mut ::itimerval) -> ::c_int;
+ pub fn setitimer(
+ which: ::c_int,
+ value: *const ::itimerval,
+ ovalue: *mut ::itimerval,
+ ) -> ::c_int;
+ pub fn posix_spawn(
+ pid: *mut ::pid_t,
+ path: *const ::c_char,
+ file_actions: *const ::posix_spawn_file_actions_t,
+ attrp: *const ::posix_spawnattr_t,
+ argv: *const *mut ::c_char,
+ envp: *const *mut ::c_char,
+ ) -> ::c_int;
+ pub fn posix_spawnp(
+ pid: *mut ::pid_t,
+ file: *const ::c_char,
+ file_actions: *const ::posix_spawn_file_actions_t,
+ attrp: *const ::posix_spawnattr_t,
+ argv: *const *mut ::c_char,
+ envp: *const *mut ::c_char,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_init(attr: *mut posix_spawnattr_t) -> ::c_int;
+ pub fn posix_spawnattr_destroy(attr: *mut posix_spawnattr_t) -> ::c_int;
+ pub fn posix_spawnattr_getsigdefault(
+ attr: *const posix_spawnattr_t,
+ default: *mut ::sigset_t,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setsigdefault(
+ attr: *mut posix_spawnattr_t,
+ default: *const ::sigset_t,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_getsigmask(
+ attr: *const posix_spawnattr_t,
+ default: *mut ::sigset_t,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setsigmask(
+ attr: *mut posix_spawnattr_t,
+ default: *const ::sigset_t,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_getflags(
+ attr: *const posix_spawnattr_t,
+ flags: *mut ::c_short,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setflags(attr: *mut posix_spawnattr_t, flags: ::c_short) -> ::c_int;
+ pub fn posix_spawnattr_getpgroup(
+ attr: *const posix_spawnattr_t,
+ flags: *mut ::pid_t,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setpgroup(attr: *mut posix_spawnattr_t, flags: ::pid_t) -> ::c_int;
+ pub fn posix_spawnattr_getschedpolicy(
+ attr: *const posix_spawnattr_t,
+ flags: *mut ::c_int,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setschedpolicy(attr: *mut posix_spawnattr_t, flags: ::c_int) -> ::c_int;
+ pub fn posix_spawnattr_getschedparam(
+ attr: *const posix_spawnattr_t,
+ param: *mut ::sched_param,
+ ) -> ::c_int;
+ pub fn posix_spawnattr_setschedparam(
+ attr: *mut posix_spawnattr_t,
+ param: *const ::sched_param,
+ ) -> ::c_int;
+
+ pub fn posix_spawn_file_actions_init(actions: *mut posix_spawn_file_actions_t) -> ::c_int;
+ pub fn posix_spawn_file_actions_destroy(actions: *mut posix_spawn_file_actions_t) -> ::c_int;
+ pub fn posix_spawn_file_actions_addopen(
+ actions: *mut posix_spawn_file_actions_t,
+ fd: ::c_int,
+ path: *const ::c_char,
+ oflag: ::c_int,
+ mode: ::mode_t,
+ ) -> ::c_int;
+ pub fn posix_spawn_file_actions_addclose(
+ actions: *mut posix_spawn_file_actions_t,
+ fd: ::c_int,
+ ) -> ::c_int;
+ pub fn posix_spawn_file_actions_adddup2(
+ actions: *mut posix_spawn_file_actions_t,
+ fd: ::c_int,
+ newfd: ::c_int,
+ ) -> ::c_int;
+ pub fn popen(command: *const c_char, mode: *const c_char) -> *mut ::FILE;
+ pub fn faccessat(
+ dirfd: ::c_int,
+ pathname: *const ::c_char,
+ mode: ::c_int,
+ flags: ::c_int,
+ ) -> ::c_int;
+ pub fn inotify_rm_watch(fd: ::c_int, wd: ::c_int) -> ::c_int;
+ pub fn inotify_init() -> ::c_int;
+ pub fn inotify_add_watch(fd: ::c_int, path: *const ::c_char, mask: u32) -> ::c_int;
+
+ pub fn gettid() -> ::pid_t;
+
+ pub fn pthread_getcpuclockid(thread: ::pthread_t, clk_id: *mut ::clockid_t) -> ::c_int;
+
+ pub fn getnameinfo(
+ sa: *const ::sockaddr,
+ salen: ::socklen_t,
+ host: *mut ::c_char,
+ hostlen: ::socklen_t,
+ serv: *mut ::c_char,
+ sevlen: ::socklen_t,
+ flags: ::c_int,
+ ) -> ::c_int;
+
+ pub fn sendmmsg(
+ sockfd: ::c_int,
+ msgvec: *mut ::mmsghdr,
+ vlen: ::c_uint,
+ flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn recvmmsg(
+ sockfd: ::c_int,
+ msgvec: *mut ::mmsghdr,
+ vlen: ::c_uint,
+ flags: ::c_uint,
+ timeout: *mut ::timespec,
+ ) -> ::c_int;
+
+ pub fn mallopt(param: ::c_int, value: i64) -> ::c_int;
+ pub fn gettimeofday(tp: *mut ::timeval, tz: *mut ::c_void) -> ::c_int;
+
+ pub fn ctermid(s: *mut ::c_char) -> *mut ::c_char;
+ pub fn ioctl(fd: ::c_int, request: ::c_int, ...) -> ::c_int;
+
+ pub fn mallinfo() -> ::mallinfo;
+ pub fn getpwent_r(
+ pwd: *mut ::passwd,
+ buf: *mut ::c_char,
+ __bufsize: ::c_int,
+ __result: *mut *mut ::passwd,
+ ) -> ::c_int;
+ pub fn pthread_getname_np(thread: ::pthread_t, name: *mut ::c_char, len: ::c_int) -> ::c_int;
+ pub fn pthread_setname_np(thread: ::pthread_t, name: *const ::c_char) -> ::c_int;
+
+ pub fn sysctl(
+ _: *const ::c_int,
+ _: ::c_uint,
+ _: *mut ::c_void,
+ _: *mut ::size_t,
+ _: *const ::c_void,
+ _: ::size_t,
+ ) -> ::c_int;
+
+ pub fn getrlimit(resource: ::c_int, rlim: *mut ::rlimit) -> ::c_int;
+ pub fn setrlimit(resource: ::c_int, rlp: *const ::rlimit) -> ::c_int;
+
+ pub fn lio_listio(
+ __mode: ::c_int,
+ __list: *const *mut aiocb,
+ __nent: ::c_int,
+ __sig: *mut sigevent,
+ ) -> ::c_int;
+
+ pub fn dl_iterate_phdr(
+ callback: ::Option<
+ unsafe extern "C" fn(
+ info: *const dl_phdr_info,
+ size: ::size_t,
+ data: *mut ::c_void,
+ ) -> ::c_int,
+ >,
+ data: *mut ::c_void,
+ ) -> ::c_int;
+
+ pub fn memset_s(s: *mut ::c_void, smax: ::size_t, c: ::c_int, n: ::size_t) -> ::c_int;
+
+ pub fn regcomp(
+ __preg: *mut ::regex_t,
+ __pattern: *const ::c_char,
+ __cflags: ::c_int,
+ ) -> ::c_int;
+ pub fn regexec(
+ __preg: *const ::regex_t,
+ __str: *const ::c_char,
+ __nmatch: ::size_t,
+ __pmatch: *mut ::regmatch_t,
+ __eflags: ::c_int,
+ ) -> ::c_int;
+ pub fn regerror(
+ __errcode: ::c_int,
+ __preg: *const ::regex_t,
+ __errbuf: *mut ::c_char,
+ __errbuf_size: ::size_t,
+ ) -> ::size_t;
+ pub fn regfree(__preg: *mut ::regex_t);
+ pub fn dirfd(__dirp: *mut ::DIR) -> ::c_int;
+ pub fn dircntl(dir: *mut ::DIR, cmd: ::c_int, ...) -> ::c_int;
+
+ pub fn aio_cancel(__fd: ::c_int, __aiocbp: *mut ::aiocb) -> ::c_int;
+ pub fn aio_error(__aiocbp: *const ::aiocb) -> ::c_int;
+ pub fn aio_fsync(__operation: ::c_int, __aiocbp: *mut ::aiocb) -> ::c_int;
+ pub fn aio_read(__aiocbp: *mut ::aiocb) -> ::c_int;
+ pub fn aio_return(__aiocpb: *mut ::aiocb) -> ::ssize_t;
+ pub fn aio_suspend(
+ __list: *const *const ::aiocb,
+ __nent: ::c_int,
+ __timeout: *const ::timespec,
+ ) -> ::c_int;
+ pub fn aio_write(__aiocpb: *mut ::aiocb) -> ::c_int;
+
+ pub fn mq_close(__mqdes: ::mqd_t) -> ::c_int;
+ pub fn mq_getattr(__mqdes: ::mqd_t, __mqstat: *mut ::mq_attr) -> ::c_int;
+ pub fn mq_notify(__mqdes: ::mqd_t, __notification: *const ::sigevent) -> ::c_int;
+ pub fn mq_open(__name: *const ::c_char, __oflag: ::c_int, ...) -> ::mqd_t;
+ pub fn mq_receive(
+ __mqdes: ::mqd_t,
+ __msg_ptr: *mut ::c_char,
+ __msg_len: ::size_t,
+ __msg_prio: *mut ::c_uint,
+ ) -> ::ssize_t;
+ pub fn mq_send(
+ __mqdes: ::mqd_t,
+ __msg_ptr: *const ::c_char,
+ __msg_len: ::size_t,
+ __msg_prio: ::c_uint,
+ ) -> ::c_int;
+ pub fn mq_setattr(
+ __mqdes: ::mqd_t,
+ __mqstat: *const mq_attr,
+ __omqstat: *mut mq_attr,
+ ) -> ::c_int;
+ pub fn mq_timedreceive(
+ __mqdes: ::mqd_t,
+ __msg_ptr: *mut ::c_char,
+ __msg_len: ::size_t,
+ __msg_prio: *mut ::c_uint,
+ __abs_timeout: *const ::timespec,
+ ) -> ::ssize_t;
+ pub fn mq_timedsend(
+ __mqdes: ::mqd_t,
+ __msg_ptr: *const ::c_char,
+ __msg_len: ::size_t,
+ __msg_prio: ::c_uint,
+ __abs_timeout: *const ::timespec,
+ ) -> ::c_int;
+ pub fn mq_unlink(__name: *const ::c_char) -> ::c_int;
+ pub fn __get_errno_ptr() -> *mut ::c_int;
+
+ // System page, see https://www.qnx.com/developers/docs/7.1#com.qnx.doc.neutrino.building/topic/syspage/syspage_about.html
+ pub static mut _syspage_ptr: *mut syspage_entry;
+
+ // Function on the stack after a call to pthread_create(). This is used
+ // as a sentinel to work around an infitnite loop in the unwinding code.
+ pub fn __my_thread_exit(value_ptr: *mut *const ::c_void);
+}
+
+// Models the implementation in stdlib.h. Ctest will fail if trying to use the
+// default symbol from libc
+pub unsafe fn atexit(cb: extern "C" fn()) -> ::c_int {
+ extern "C" {
+ static __dso_handle: *mut ::c_void;
+ pub fn __cxa_atexit(
+ cb: extern "C" fn(),
+ __arg: *mut ::c_void,
+ __dso: *mut ::c_void,
+ ) -> ::c_int;
+ }
+ __cxa_atexit(cb, 0 as *mut ::c_void, __dso_handle)
+}
+
+impl siginfo_t {
+ pub unsafe fn si_addr(&self) -> *mut ::c_void {
+ #[repr(C)]
+ struct siginfo_si_addr {
+ _pad: [u8; 32],
+ si_addr: *mut ::c_void,
+ }
+ (*(self as *const siginfo_t as *const siginfo_si_addr)).si_addr
+ }
+
+ pub unsafe fn si_value(&self) -> ::sigval {
+ #[repr(C)]
+ struct siginfo_si_value {
+ _pad: [u8; 32],
+ si_value: ::sigval,
+ }
+ (*(self as *const siginfo_t as *const siginfo_si_value)).si_value
+ }
+
+ pub unsafe fn si_pid(&self) -> ::pid_t {
+ #[repr(C)]
+ struct siginfo_si_pid {
+ _pad: [u8; 16],
+ si_pid: ::pid_t,
+ }
+ (*(self as *const siginfo_t as *const siginfo_si_pid)).si_pid
+ }
+
+ pub unsafe fn si_uid(&self) -> ::uid_t {
+ #[repr(C)]
+ struct siginfo_si_uid {
+ _pad: [u8; 24],
+ si_uid: ::uid_t,
+ }
+ (*(self as *const siginfo_t as *const siginfo_si_uid)).si_uid
+ }
+
+ pub unsafe fn si_status(&self) -> ::c_int {
+ #[repr(C)]
+ struct siginfo_si_status {
+ _pad: [u8; 28],
+ si_status: ::c_int,
+ }
+ (*(self as *const siginfo_t as *const siginfo_si_status)).si_status
+ }
+}
+
+cfg_if! {
+ if #[cfg(target_arch = "x86_64")] {
+ mod x86_64;
+ pub use self::x86_64::*;
+ }
+ else if #[cfg(target_arch = "aarch64")] {
+ mod aarch64;
+ pub use self::aarch64::*;
+ }
+ else {
+ panic!("Unsupported arch");
+ }
+}
+
+mod neutrino;
+pub use self::neutrino::*;
diff --git a/vendor/libc/src/unix/nto/neutrino.rs b/vendor/libc/src/unix/nto/neutrino.rs
new file mode 100644
index 000000000..cedd21659
--- /dev/null
+++ b/vendor/libc/src/unix/nto/neutrino.rs
@@ -0,0 +1,1288 @@
+pub type nto_job_t = ::sync_t;
+
+s! {
+ pub struct intrspin {
+ pub value: ::c_uint, // volatile
+ }
+
+ pub struct iov_t {
+ pub iov_base: *mut ::c_void, // union
+ pub iov_len: ::size_t,
+ }
+
+ pub struct _itimer {
+ pub nsec: u64,
+ pub interval_nsec: u64,
+ }
+
+ pub struct _msg_info64 {
+ pub nd: u32,
+ pub srcnd: u32,
+ pub pid: ::pid_t,
+ pub tid: i32,
+ pub chid: i32,
+ pub scoid: i32,
+ pub coid: i32,
+ pub priority: i16,
+ pub flags: i16,
+ pub msglen: isize,
+ pub srcmsglen: isize,
+ pub dstmsglen: isize,
+ pub type_id: u32,
+ reserved: u32,
+ }
+
+ pub struct _cred_info {
+ pub ruid: ::uid_t,
+ pub euid: ::uid_t,
+ pub suid: ::uid_t,
+ pub rgid: ::gid_t,
+ pub egid: ::gid_t,
+ pub sgid: ::gid_t,
+ pub ngroups: u32,
+ pub grouplist: [::gid_t; 8],
+ }
+
+ pub struct _client_info {
+ pub nd: u32,
+ pub pid: ::pid_t,
+ pub sid: ::pid_t,
+ pub flags: u32,
+ pub cred: ::_cred_info,
+ }
+
+ pub struct _client_able {
+ pub ability: u32,
+ pub flags: u32,
+ pub range_lo: u64,
+ pub range_hi: u64,
+ }
+
+ pub struct nto_channel_config {
+ pub event: ::sigevent,
+ pub num_pulses: ::c_uint,
+ pub rearm_threshold: ::c_uint,
+ pub options: ::c_uint,
+ reserved: [::c_uint; 3],
+ }
+
+ // TODO: The following structures are defined in a header file which doesn't
+ // appear as part of the default headers found in a standard installation
+ // of Neutrino 7.1 SDP. Commented out for now.
+ //pub struct _asyncmsg_put_header {
+ // pub err: ::c_int,
+ // pub iov: *mut ::iov_t,
+ // pub parts: ::c_int,
+ // pub handle: ::c_uint,
+ // pub cb: ::Option<
+ // unsafe extern "C" fn(
+ // err: ::c_int,
+ // buf: *mut ::c_void,
+ // handle: ::c_uint,
+ // ) -> ::c_int>,
+ // pub put_hdr_flags: ::c_uint,
+ //}
+
+ //pub struct _asyncmsg_connection_attr {
+ // pub call_back: ::Option<
+ // unsafe extern "C" fn(
+ // err: ::c_int,
+ // buff: *mut ::c_void,
+ // handle: ::c_uint,
+ // ) -> ::c_int>,
+ // pub buffer_size: ::size_t,
+ // pub max_num_buffer: ::c_uint,
+ // pub trigger_num_msg: ::c_uint,
+ // pub trigger_time: ::_itimer,
+ // reserve: ::c_uint,
+ //}
+
+ //pub struct _asyncmsg_connection_descriptor {
+ // pub flags: ::c_uint,
+ // pub sendq_size: ::c_uint,
+ // pub sendq_head: ::c_uint,
+ // pub sendq_tail: ::c_uint,
+ // pub sendq_free: ::c_uint,
+ // pub err: ::c_int,
+ // pub ev: ::sigevent,
+ // pub num_curmsg: ::c_uint,
+ // pub ttimer: ::timer_t,
+ // pub block_con: ::pthread_cond_t,
+ // pub mu: ::pthread_mutex_t,
+ // reserved: ::c_uint,
+ // pub attr: ::_asyncmsg_connection_attr,
+ // pub reserves: [::c_uint; 3],
+ // pub sendq: [::_asyncmsg_put_header; 1], // flexarray
+ //}
+
+ pub struct __c_anonymous_struct_ev {
+ pub event: ::sigevent,
+ pub coid: ::c_int,
+ }
+
+ pub struct _channel_connect_attr { // union
+ pub ev: ::__c_anonymous_struct_ev,
+ }
+
+ pub struct _sighandler_info {
+ pub siginfo: ::siginfo_t,
+ pub handler: ::Option<unsafe extern "C" fn(value: ::c_int)>,
+ pub context: *mut ::c_void,
+ }
+
+ pub struct __c_anonymous_struct_time {
+ pub length: ::c_uint,
+ pub scale: ::c_uint,
+ }
+
+ pub struct _idle_hook {
+ pub hook_size: ::c_uint,
+ pub cmd: ::c_uint,
+ pub mode: ::c_uint,
+ pub latency: ::c_uint,
+ pub next_fire: u64,
+ pub curr_time: u64,
+ pub tod_adjust: u64,
+ pub resp: ::c_uint,
+ pub time: __c_anonymous_struct_time,
+ pub trigger: ::sigevent,
+ pub intrs: *mut ::c_uint,
+ pub block_stack_size: ::c_uint,
+ }
+
+ pub struct _clockadjust {
+ pub tick_count: u32,
+ pub tick_nsec_inc: i32,
+ }
+
+ pub struct qtime_entry {
+ pub cycles_per_sec: u64,
+ pub nsec_tod_adjust: u64, // volatile
+ pub nsec: u64, // volatile
+ pub nsec_inc: u32,
+ pub boot_time: u32,
+ pub adjust: _clockadjust,
+ pub timer_rate: u32,
+ pub timer_scale: i32,
+ pub timer_load: u32,
+ pub intr: i32,
+ pub epoch: u32,
+ pub flags: u32,
+ pub rr_interval_mul: u32,
+ pub timer_load_hi: u32,
+ pub nsec_stable: u64, // volatile
+ pub timer_load_max: u64,
+ pub timer_prog_time: u32,
+ spare: [u32; 7],
+ }
+
+ pub struct _sched_info {
+ pub priority_min: ::c_int,
+ pub priority_max: ::c_int,
+ pub interval: u64,
+ pub priority_priv: ::c_int,
+ reserved: [::c_int; 11],
+ }
+
+ pub struct _timer_info {
+ pub itime: ::_itimer,
+ pub otime: ::_itimer,
+ pub flags: u32,
+ pub tid: i32,
+ pub notify: i32,
+ pub clockid: ::clockid_t,
+ pub overruns: u32,
+ pub event: ::sigevent, // union
+ }
+
+ pub struct _clockperiod {
+ pub nsec: u32,
+ pub fract: i32,
+ }
+}
+
+s_no_extra_traits! {
+ pub struct syspage_entry_info {
+ pub entry_off: u16,
+ pub entry_size: u16,
+ }
+
+ pub struct syspage_array_info {
+ entry_off: u16,
+ entry_size: u16,
+ element_size: u16,
+ }
+
+ #[repr(align(8))]
+ pub struct syspage_entry {
+ pub size: u16,
+ pub total_size: u16,
+ pub type_: u16,
+ pub num_cpu: u16,
+ pub system_private: syspage_entry_info,
+ pub old_asinfo: syspage_entry_info,
+ pub __mangle_name_to_cause_compilation_errs_meminfo: syspage_entry_info,
+ pub hwinfo: syspage_entry_info,
+ pub old_cpuinfo: syspage_entry_info,
+ pub old_cacheattr: syspage_entry_info,
+ pub qtime: syspage_entry_info,
+ pub callout: syspage_entry_info,
+ pub callin: syspage_entry_info,
+ pub typed_strings: syspage_entry_info,
+ pub strings: syspage_entry_info,
+ pub old_intrinfo: syspage_entry_info,
+ pub smp: syspage_entry_info,
+ pub pminfo: syspage_entry_info,
+ pub old_mdriver: syspage_entry_info,
+ spare0: [u32; 1],
+ __reserved: [u8; 160], // anonymous union with architecture dependent structs
+ pub new_asinfo: syspage_array_info,
+ pub new_cpuinfo: syspage_array_info,
+ pub new_cacheattr: syspage_array_info,
+ pub new_intrinfo: syspage_array_info,
+ pub new_mdriver: syspage_array_info,
+ }
+}
+
+pub const SYSMGR_PID: u32 = 1;
+pub const SYSMGR_CHID: u32 = 1;
+pub const SYSMGR_COID: u32 = _NTO_SIDE_CHANNEL;
+pub const SYSMGR_HANDLE: u32 = 0;
+
+pub const STATE_DEAD: ::c_int = 0x00;
+pub const STATE_RUNNING: ::c_int = 0x01;
+pub const STATE_READY: ::c_int = 0x02;
+pub const STATE_STOPPED: ::c_int = 0x03;
+pub const STATE_SEND: ::c_int = 0x04;
+pub const STATE_RECEIVE: ::c_int = 0x05;
+pub const STATE_REPLY: ::c_int = 0x06;
+pub const STATE_STACK: ::c_int = 0x07;
+pub const STATE_WAITTHREAD: ::c_int = 0x08;
+pub const STATE_WAITPAGE: ::c_int = 0x09;
+pub const STATE_SIGSUSPEND: ::c_int = 0x0a;
+pub const STATE_SIGWAITINFO: ::c_int = 0x0b;
+pub const STATE_NANOSLEEP: ::c_int = 0x0c;
+pub const STATE_MUTEX: ::c_int = 0x0d;
+pub const STATE_CONDVAR: ::c_int = 0x0e;
+pub const STATE_JOIN: ::c_int = 0x0f;
+pub const STATE_INTR: ::c_int = 0x10;
+pub const STATE_SEM: ::c_int = 0x11;
+pub const STATE_WAITCTX: ::c_int = 0x12;
+pub const STATE_NET_SEND: ::c_int = 0x13;
+pub const STATE_NET_REPLY: ::c_int = 0x14;
+pub const STATE_MAX: ::c_int = 0x18;
+
+pub const _NTO_TIMEOUT_RECEIVE: i32 = 1 << STATE_RECEIVE;
+pub const _NTO_TIMEOUT_SEND: i32 = 1 << STATE_SEND;
+pub const _NTO_TIMEOUT_REPLY: i32 = 1 << STATE_REPLY;
+pub const _NTO_TIMEOUT_SIGSUSPEND: i32 = 1 << STATE_SIGSUSPEND;
+pub const _NTO_TIMEOUT_SIGWAITINFO: i32 = 1 << STATE_SIGWAITINFO;
+pub const _NTO_TIMEOUT_NANOSLEEP: i32 = 1 << STATE_NANOSLEEP;
+pub const _NTO_TIMEOUT_MUTEX: i32 = 1 << STATE_MUTEX;
+pub const _NTO_TIMEOUT_CONDVAR: i32 = 1 << STATE_CONDVAR;
+pub const _NTO_TIMEOUT_JOIN: i32 = 1 << STATE_JOIN;
+pub const _NTO_TIMEOUT_INTR: i32 = 1 << STATE_INTR;
+pub const _NTO_TIMEOUT_SEM: i32 = 1 << STATE_SEM;
+
+pub const _NTO_MI_ENDIAN_BIG: u32 = 1;
+pub const _NTO_MI_ENDIAN_DIFF: u32 = 2;
+pub const _NTO_MI_UNBLOCK_REQ: u32 = 256;
+pub const _NTO_MI_NET_CRED_DIRTY: u32 = 512;
+pub const _NTO_MI_CONSTRAINED: u32 = 1024;
+pub const _NTO_MI_CHROOT: u32 = 2048;
+pub const _NTO_MI_BITS_64: u32 = 4096;
+pub const _NTO_MI_BITS_DIFF: u32 = 8192;
+pub const _NTO_MI_SANDBOX: u32 = 16384;
+
+pub const _NTO_CI_ENDIAN_BIG: u32 = 1;
+pub const _NTO_CI_BKGND_PGRP: u32 = 4;
+pub const _NTO_CI_ORPHAN_PGRP: u32 = 8;
+pub const _NTO_CI_STOPPED: u32 = 128;
+pub const _NTO_CI_UNABLE: u32 = 256;
+pub const _NTO_CI_TYPE_ID: u32 = 512;
+pub const _NTO_CI_CHROOT: u32 = 2048;
+pub const _NTO_CI_BITS_64: u32 = 4096;
+pub const _NTO_CI_SANDBOX: u32 = 16384;
+pub const _NTO_CI_LOADER: u32 = 32768;
+pub const _NTO_CI_FULL_GROUPS: u32 = 2147483648;
+
+pub const _NTO_TI_ACTIVE: u32 = 1;
+pub const _NTO_TI_ABSOLUTE: u32 = 2;
+pub const _NTO_TI_EXPIRED: u32 = 4;
+pub const _NTO_TI_TOD_BASED: u32 = 8;
+pub const _NTO_TI_TARGET_PROCESS: u32 = 16;
+pub const _NTO_TI_REPORT_TOLERANCE: u32 = 32;
+pub const _NTO_TI_PRECISE: u32 = 64;
+pub const _NTO_TI_TOLERANT: u32 = 128;
+pub const _NTO_TI_WAKEUP: u32 = 256;
+pub const _NTO_TI_PROCESS_TOLERANT: u32 = 512;
+pub const _NTO_TI_HIGH_RESOLUTION: u32 = 1024;
+
+pub const _PULSE_TYPE: u32 = 0;
+pub const _PULSE_SUBTYPE: u32 = 0;
+pub const _PULSE_CODE_UNBLOCK: i32 = -32;
+pub const _PULSE_CODE_DISCONNECT: i32 = -33;
+pub const _PULSE_CODE_THREADDEATH: i32 = -34;
+pub const _PULSE_CODE_COIDDEATH: i32 = -35;
+pub const _PULSE_CODE_NET_ACK: i32 = -36;
+pub const _PULSE_CODE_NET_UNBLOCK: i32 = -37;
+pub const _PULSE_CODE_NET_DETACH: i32 = -38;
+pub const _PULSE_CODE_RESTART: i32 = -39;
+pub const _PULSE_CODE_NORESTART: i32 = -40;
+pub const _PULSE_CODE_UNBLOCK_RESTART: i32 = -41;
+pub const _PULSE_CODE_UNBLOCK_TIMER: i32 = -42;
+pub const _PULSE_CODE_MINAVAIL: u32 = 0;
+pub const _PULSE_CODE_MAXAVAIL: u32 = 127;
+
+pub const _NTO_HARD_FLAGS_END: u32 = 1;
+
+pub const _NTO_PULSE_IF_UNIQUE: u32 = 4096;
+pub const _NTO_PULSE_REPLACE: u32 = 8192;
+
+pub const _NTO_PF_NOCLDSTOP: u32 = 1;
+pub const _NTO_PF_LOADING: u32 = 2;
+pub const _NTO_PF_TERMING: u32 = 4;
+pub const _NTO_PF_ZOMBIE: u32 = 8;
+pub const _NTO_PF_NOZOMBIE: u32 = 16;
+pub const _NTO_PF_FORKED: u32 = 32;
+pub const _NTO_PF_ORPHAN_PGRP: u32 = 64;
+pub const _NTO_PF_STOPPED: u32 = 128;
+pub const _NTO_PF_DEBUG_STOPPED: u32 = 256;
+pub const _NTO_PF_BKGND_PGRP: u32 = 512;
+pub const _NTO_PF_NOISYNC: u32 = 1024;
+pub const _NTO_PF_CONTINUED: u32 = 2048;
+pub const _NTO_PF_CHECK_INTR: u32 = 4096;
+pub const _NTO_PF_COREDUMP: u32 = 8192;
+pub const _NTO_PF_RING0: u32 = 32768;
+pub const _NTO_PF_SLEADER: u32 = 65536;
+pub const _NTO_PF_WAITINFO: u32 = 131072;
+pub const _NTO_PF_DESTROYALL: u32 = 524288;
+pub const _NTO_PF_NOCOREDUMP: u32 = 1048576;
+pub const _NTO_PF_WAITDONE: u32 = 4194304;
+pub const _NTO_PF_TERM_WAITING: u32 = 8388608;
+pub const _NTO_PF_ASLR: u32 = 16777216;
+pub const _NTO_PF_EXECED: u32 = 33554432;
+pub const _NTO_PF_APP_STOPPED: u32 = 67108864;
+pub const _NTO_PF_64BIT: u32 = 134217728;
+pub const _NTO_PF_NET: u32 = 268435456;
+pub const _NTO_PF_NOLAZYSTACK: u32 = 536870912;
+pub const _NTO_PF_NOEXEC_STACK: u32 = 1073741824;
+pub const _NTO_PF_LOADER_PERMS: u32 = 2147483648;
+
+pub const _NTO_TF_INTR_PENDING: u32 = 65536;
+pub const _NTO_TF_DETACHED: u32 = 131072;
+pub const _NTO_TF_SHR_MUTEX: u32 = 262144;
+pub const _NTO_TF_SHR_MUTEX_EUID: u32 = 524288;
+pub const _NTO_TF_THREADS_HOLD: u32 = 1048576;
+pub const _NTO_TF_UNBLOCK_REQ: u32 = 4194304;
+pub const _NTO_TF_ALIGN_FAULT: u32 = 16777216;
+pub const _NTO_TF_SSTEP: u32 = 33554432;
+pub const _NTO_TF_ALLOCED_STACK: u32 = 67108864;
+pub const _NTO_TF_NOMULTISIG: u32 = 134217728;
+pub const _NTO_TF_LOW_LATENCY: u32 = 268435456;
+pub const _NTO_TF_IOPRIV: u32 = 2147483648;
+
+pub const _NTO_TCTL_IO_PRIV: u32 = 1;
+pub const _NTO_TCTL_THREADS_HOLD: u32 = 2;
+pub const _NTO_TCTL_THREADS_CONT: u32 = 3;
+pub const _NTO_TCTL_RUNMASK: u32 = 4;
+pub const _NTO_TCTL_ALIGN_FAULT: u32 = 5;
+pub const _NTO_TCTL_RUNMASK_GET_AND_SET: u32 = 6;
+pub const _NTO_TCTL_PERFCOUNT: u32 = 7;
+pub const _NTO_TCTL_ONE_THREAD_HOLD: u32 = 8;
+pub const _NTO_TCTL_ONE_THREAD_CONT: u32 = 9;
+pub const _NTO_TCTL_RUNMASK_GET_AND_SET_INHERIT: u32 = 10;
+pub const _NTO_TCTL_NAME: u32 = 11;
+pub const _NTO_TCTL_RCM_GET_AND_SET: u32 = 12;
+pub const _NTO_TCTL_SHR_MUTEX: u32 = 13;
+pub const _NTO_TCTL_IO: u32 = 14;
+pub const _NTO_TCTL_NET_KIF_GET_AND_SET: u32 = 15;
+pub const _NTO_TCTL_LOW_LATENCY: u32 = 16;
+pub const _NTO_TCTL_ADD_EXIT_EVENT: u32 = 17;
+pub const _NTO_TCTL_DEL_EXIT_EVENT: u32 = 18;
+pub const _NTO_TCTL_IO_LEVEL: u32 = 19;
+pub const _NTO_TCTL_RESERVED: u32 = 2147483648;
+pub const _NTO_TCTL_IO_LEVEL_INHERIT: u32 = 1073741824;
+pub const _NTO_IO_LEVEL_NONE: u32 = 1;
+pub const _NTO_IO_LEVEL_1: u32 = 2;
+pub const _NTO_IO_LEVEL_2: u32 = 3;
+
+pub const _NTO_THREAD_NAME_MAX: u32 = 100;
+
+pub const _NTO_CHF_FIXED_PRIORITY: u32 = 1;
+pub const _NTO_CHF_UNBLOCK: u32 = 2;
+pub const _NTO_CHF_THREAD_DEATH: u32 = 4;
+pub const _NTO_CHF_DISCONNECT: u32 = 8;
+pub const _NTO_CHF_NET_MSG: u32 = 16;
+pub const _NTO_CHF_SENDER_LEN: u32 = 32;
+pub const _NTO_CHF_COID_DISCONNECT: u32 = 64;
+pub const _NTO_CHF_REPLY_LEN: u32 = 128;
+pub const _NTO_CHF_PULSE_POOL: u32 = 256;
+pub const _NTO_CHF_ASYNC_NONBLOCK: u32 = 512;
+pub const _NTO_CHF_ASYNC: u32 = 1024;
+pub const _NTO_CHF_GLOBAL: u32 = 2048;
+pub const _NTO_CHF_PRIVATE: u32 = 4096;
+pub const _NTO_CHF_MSG_PAUSING: u32 = 8192;
+pub const _NTO_CHF_INHERIT_RUNMASK: u32 = 16384;
+pub const _NTO_CHF_UNBLOCK_TIMER: u32 = 32768;
+
+pub const _NTO_CHO_CUSTOM_EVENT: u32 = 1;
+
+pub const _NTO_COF_CLOEXEC: u32 = 1;
+pub const _NTO_COF_DEAD: u32 = 2;
+pub const _NTO_COF_NOSHARE: u32 = 64;
+pub const _NTO_COF_NETCON: u32 = 128;
+pub const _NTO_COF_NONBLOCK: u32 = 256;
+pub const _NTO_COF_ASYNC: u32 = 512;
+pub const _NTO_COF_GLOBAL: u32 = 1024;
+pub const _NTO_COF_NOEVENT: u32 = 2048;
+pub const _NTO_COF_INSECURE: u32 = 4096;
+pub const _NTO_COF_REG_EVENTS: u32 = 8192;
+pub const _NTO_COF_UNREG_EVENTS: u32 = 16384;
+pub const _NTO_COF_MASK: u32 = 65535;
+
+pub const _NTO_SIDE_CHANNEL: u32 = 1073741824;
+
+pub const _NTO_CONNECTION_SCOID: u32 = 65536;
+pub const _NTO_GLOBAL_CHANNEL: u32 = 1073741824;
+
+pub const _NTO_TIMEOUT_MASK: u32 = (1 << STATE_MAX) - 1;
+pub const _NTO_TIMEOUT_ACTIVE: u32 = 1 << STATE_MAX;
+pub const _NTO_TIMEOUT_IMMEDIATE: u32 = 1 << (STATE_MAX + 1);
+
+pub const _NTO_IC_LATENCY: u32 = 0;
+
+pub const _NTO_INTR_FLAGS_END: u32 = 1;
+pub const _NTO_INTR_FLAGS_NO_UNMASK: u32 = 2;
+pub const _NTO_INTR_FLAGS_PROCESS: u32 = 4;
+pub const _NTO_INTR_FLAGS_TRK_MSK: u32 = 8;
+pub const _NTO_INTR_FLAGS_ARRAY: u32 = 16;
+pub const _NTO_INTR_FLAGS_EXCLUSIVE: u32 = 32;
+pub const _NTO_INTR_FLAGS_FPU: u32 = 64;
+
+pub const _NTO_INTR_CLASS_EXTERNAL: u32 = 0;
+pub const _NTO_INTR_CLASS_SYNTHETIC: u32 = 2147418112;
+
+pub const _NTO_INTR_SPARE: u32 = 2147483647;
+
+pub const _NTO_HOOK_IDLE: u32 = 2147418113;
+pub const _NTO_HOOK_OVERDRIVE: u32 = 2147418114;
+pub const _NTO_HOOK_LAST: u32 = 2147418114;
+pub const _NTO_HOOK_IDLE2_FLAG: u32 = 32768;
+
+pub const _NTO_IH_CMD_SLEEP_SETUP: u32 = 1;
+pub const _NTO_IH_CMD_SLEEP_BLOCK: u32 = 2;
+pub const _NTO_IH_CMD_SLEEP_WAKEUP: u32 = 4;
+pub const _NTO_IH_CMD_SLEEP_ONLINE: u32 = 8;
+pub const _NTO_IH_RESP_NEEDS_BLOCK: u32 = 1;
+pub const _NTO_IH_RESP_NEEDS_WAKEUP: u32 = 2;
+pub const _NTO_IH_RESP_NEEDS_ONLINE: u32 = 4;
+pub const _NTO_IH_RESP_SYNC_TIME: u32 = 16;
+pub const _NTO_IH_RESP_SYNC_TLB: u32 = 32;
+pub const _NTO_IH_RESP_SUGGEST_OFFLINE: u32 = 256;
+pub const _NTO_IH_RESP_SLEEP_MODE_REACHED: u32 = 512;
+pub const _NTO_IH_RESP_DELIVER_INTRS: u32 = 1024;
+
+pub const _NTO_READIOV_SEND: u32 = 0;
+pub const _NTO_READIOV_REPLY: u32 = 1;
+
+pub const _NTO_KEYDATA_VTID: u32 = 2147483648;
+
+pub const _NTO_KEYDATA_PATHSIGN: u32 = 32768;
+pub const _NTO_KEYDATA_OP_MASK: u32 = 255;
+pub const _NTO_KEYDATA_VERIFY: u32 = 0;
+pub const _NTO_KEYDATA_CALCULATE: u32 = 1;
+pub const _NTO_KEYDATA_CALCULATE_REUSE: u32 = 2;
+pub const _NTO_KEYDATA_PATHSIGN_VERIFY: u32 = 32768;
+pub const _NTO_KEYDATA_PATHSIGN_CALCULATE: u32 = 32769;
+pub const _NTO_KEYDATA_PATHSIGN_CALCULATE_REUSE: u32 = 32770;
+
+pub const _NTO_SCTL_SETPRIOCEILING: u32 = 1;
+pub const _NTO_SCTL_GETPRIOCEILING: u32 = 2;
+pub const _NTO_SCTL_SETEVENT: u32 = 3;
+pub const _NTO_SCTL_MUTEX_WAKEUP: u32 = 4;
+pub const _NTO_SCTL_MUTEX_CONSISTENT: u32 = 5;
+pub const _NTO_SCTL_SEM_VALUE: u32 = 6;
+
+pub const _NTO_CLIENTINFO_GETGROUPS: u32 = 1;
+pub const _NTO_CLIENTINFO_GETTYPEID: u32 = 2;
+
+extern "C" {
+ pub fn ChannelCreate(__flags: ::c_uint) -> ::c_int;
+ pub fn ChannelCreate_r(__flags: ::c_uint) -> ::c_int;
+ pub fn ChannelCreatePulsePool(
+ __flags: ::c_uint,
+ __config: *const nto_channel_config,
+ ) -> ::c_int;
+ pub fn ChannelCreateExt(
+ __flags: ::c_uint,
+ __mode: ::mode_t,
+ __bufsize: usize,
+ __maxnumbuf: ::c_uint,
+ __ev: *const ::sigevent,
+ __cred: *mut _cred_info,
+ ) -> ::c_int;
+ pub fn ChannelDestroy(__chid: ::c_int) -> ::c_int;
+ pub fn ChannelDestroy_r(__chid: ::c_int) -> ::c_int;
+ pub fn ConnectAttach(
+ __nd: u32,
+ __pid: ::pid_t,
+ __chid: ::c_int,
+ __index: ::c_uint,
+ __flags: ::c_int,
+ ) -> ::c_int;
+ pub fn ConnectAttach_r(
+ __nd: u32,
+ __pid: ::pid_t,
+ __chid: ::c_int,
+ __index: ::c_uint,
+ __flags: ::c_int,
+ ) -> ::c_int;
+
+ // TODO: The following function uses a structure defined in a header file
+ // which doesn't appear as part of the default headers found in a
+ // standard installation of Neutrino 7.1 SDP. Commented out for now.
+ //pub fn ConnectAttachExt(
+ // __nd: u32,
+ // __pid: ::pid_t,
+ // __chid: ::c_int,
+ // __index: ::c_uint,
+ // __flags: ::c_int,
+ // __cd: *mut _asyncmsg_connection_descriptor,
+ //) -> ::c_int;
+ pub fn ConnectDetach(__coid: ::c_int) -> ::c_int;
+ pub fn ConnectDetach_r(__coid: ::c_int) -> ::c_int;
+ pub fn ConnectServerInfo(__pid: ::pid_t, __coid: ::c_int, __info: *mut _msg_info64) -> ::c_int;
+ pub fn ConnectServerInfo_r(
+ __pid: ::pid_t,
+ __coid: ::c_int,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn ConnectClientInfoExtraArgs(
+ __scoid: ::c_int,
+ __info_pp: *mut _client_info,
+ __ngroups: ::c_int,
+ __abilities: *mut _client_able,
+ __nable: ::c_int,
+ __type_id: *mut ::c_uint,
+ ) -> ::c_int;
+ pub fn ConnectClientInfoExtraArgs_r(
+ __scoid: ::c_int,
+ __info_pp: *mut _client_info,
+ __ngroups: ::c_int,
+ __abilities: *mut _client_able,
+ __nable: ::c_int,
+ __type_id: *mut ::c_uint,
+ ) -> ::c_int;
+ pub fn ConnectClientInfo(
+ __scoid: ::c_int,
+ __info: *mut _client_info,
+ __ngroups: ::c_int,
+ ) -> ::c_int;
+ pub fn ConnectClientInfo_r(
+ __scoid: ::c_int,
+ __info: *mut _client_info,
+ __ngroups: ::c_int,
+ ) -> ::c_int;
+ pub fn ConnectClientInfoExt(
+ __scoid: ::c_int,
+ __info_pp: *mut *mut _client_info,
+ flags: ::c_int,
+ ) -> ::c_int;
+ pub fn ClientInfoExtFree(__info_pp: *mut *mut _client_info) -> ::c_int;
+ pub fn ConnectClientInfoAble(
+ __scoid: ::c_int,
+ __info_pp: *mut *mut _client_info,
+ flags: ::c_int,
+ abilities: *mut _client_able,
+ nable: ::c_int,
+ ) -> ::c_int;
+ pub fn ConnectFlags(
+ __pid: ::pid_t,
+ __coid: ::c_int,
+ __mask: ::c_uint,
+ __bits: ::c_uint,
+ ) -> ::c_int;
+ pub fn ConnectFlags_r(
+ __pid: ::pid_t,
+ __coid: ::c_int,
+ __mask: ::c_uint,
+ __bits: ::c_uint,
+ ) -> ::c_int;
+ pub fn ChannelConnectAttr(
+ __id: ::c_uint,
+ __old_attr: *mut _channel_connect_attr,
+ __new_attr: *mut _channel_connect_attr,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn MsgSend(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSend_r(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendnc(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendnc_r(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendsv(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendsv_r(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendsvnc(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendsvnc_r(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvs(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvs_r(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvsnc(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvsnc_r(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ ) -> ::c_long;
+ pub fn MsgSendv(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendv_r(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvnc(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgSendvnc_r(
+ __coid: ::c_int,
+ __siov: *const ::iovec,
+ __sparts: usize,
+ __riov: *const ::iovec,
+ __rparts: usize,
+ ) -> ::c_long;
+ pub fn MsgReceive(
+ __chid: ::c_int,
+ __msg: *mut ::c_void,
+ __bytes: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceive_r(
+ __chid: ::c_int,
+ __msg: *mut ::c_void,
+ __bytes: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivev(
+ __chid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivev_r(
+ __chid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivePulse(
+ __chid: ::c_int,
+ __pulse: *mut ::c_void,
+ __bytes: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivePulse_r(
+ __chid: ::c_int,
+ __pulse: *mut ::c_void,
+ __bytes: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivePulsev(
+ __chid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReceivePulsev_r(
+ __chid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __info: *mut _msg_info64,
+ ) -> ::c_int;
+ pub fn MsgReply(
+ __rcvid: ::c_int,
+ __status: ::c_long,
+ __msg: *const ::c_void,
+ __bytes: usize,
+ ) -> ::c_int;
+ pub fn MsgReply_r(
+ __rcvid: ::c_int,
+ __status: ::c_long,
+ __msg: *const ::c_void,
+ __bytes: usize,
+ ) -> ::c_int;
+ pub fn MsgReplyv(
+ __rcvid: ::c_int,
+ __status: ::c_long,
+ __iov: *const ::iovec,
+ __parts: usize,
+ ) -> ::c_int;
+ pub fn MsgReplyv_r(
+ __rcvid: ::c_int,
+ __status: ::c_long,
+ __iov: *const ::iovec,
+ __parts: usize,
+ ) -> ::c_int;
+ pub fn MsgReadiov(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ __flags: ::c_int,
+ ) -> isize;
+ pub fn MsgReadiov_r(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ __flags: ::c_int,
+ ) -> isize;
+ pub fn MsgRead(
+ __rcvid: ::c_int,
+ __msg: *mut ::c_void,
+ __bytes: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgRead_r(
+ __rcvid: ::c_int,
+ __msg: *mut ::c_void,
+ __bytes: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgReadv(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgReadv_r(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgWrite(
+ __rcvid: ::c_int,
+ __msg: *const ::c_void,
+ __bytes: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgWrite_r(
+ __rcvid: ::c_int,
+ __msg: *const ::c_void,
+ __bytes: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgWritev(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgWritev_r(
+ __rcvid: ::c_int,
+ __iov: *const ::iovec,
+ __parts: usize,
+ __offset: usize,
+ ) -> isize;
+ pub fn MsgSendPulse(
+ __coid: ::c_int,
+ __priority: ::c_int,
+ __code: ::c_int,
+ __value: ::c_int,
+ ) -> ::c_int;
+ pub fn MsgSendPulse_r(
+ __coid: ::c_int,
+ __priority: ::c_int,
+ __code: ::c_int,
+ __value: ::c_int,
+ ) -> ::c_int;
+ pub fn MsgSendPulsePtr(
+ __coid: ::c_int,
+ __priority: ::c_int,
+ __code: ::c_int,
+ __value: *mut ::c_void,
+ ) -> ::c_int;
+ pub fn MsgSendPulsePtr_r(
+ __coid: ::c_int,
+ __priority: ::c_int,
+ __code: ::c_int,
+ __value: *mut ::c_void,
+ ) -> ::c_int;
+ pub fn MsgDeliverEvent(__rcvid: ::c_int, __event: *const ::sigevent) -> ::c_int;
+ pub fn MsgDeliverEvent_r(__rcvid: ::c_int, __event: *const ::sigevent) -> ::c_int;
+ pub fn MsgVerifyEvent(__rcvid: ::c_int, __event: *const ::sigevent) -> ::c_int;
+ pub fn MsgVerifyEvent_r(__rcvid: ::c_int, __event: *const ::sigevent) -> ::c_int;
+ pub fn MsgRegisterEvent(__event: *mut ::sigevent, __coid: ::c_int) -> ::c_int;
+ pub fn MsgRegisterEvent_r(__event: *mut ::sigevent, __coid: ::c_int) -> ::c_int;
+ pub fn MsgUnregisterEvent(__event: *const ::sigevent) -> ::c_int;
+ pub fn MsgUnregisterEvent_r(__event: *const ::sigevent) -> ::c_int;
+ pub fn MsgInfo(__rcvid: ::c_int, __info: *mut _msg_info64) -> ::c_int;
+ pub fn MsgInfo_r(__rcvid: ::c_int, __info: *mut _msg_info64) -> ::c_int;
+ pub fn MsgKeyData(
+ __rcvid: ::c_int,
+ __oper: ::c_int,
+ __key: u32,
+ __newkey: *mut u32,
+ __iov: *const ::iovec,
+ __parts: ::c_int,
+ ) -> ::c_int;
+ pub fn MsgKeyData_r(
+ __rcvid: ::c_int,
+ __oper: ::c_int,
+ __key: u32,
+ __newkey: *mut u32,
+ __iov: *const ::iovec,
+ __parts: ::c_int,
+ ) -> ::c_int;
+ pub fn MsgError(__rcvid: ::c_int, __err: ::c_int) -> ::c_int;
+ pub fn MsgError_r(__rcvid: ::c_int, __err: ::c_int) -> ::c_int;
+ pub fn MsgCurrent(__rcvid: ::c_int) -> ::c_int;
+ pub fn MsgCurrent_r(__rcvid: ::c_int) -> ::c_int;
+ pub fn MsgSendAsyncGbl(
+ __coid: ::c_int,
+ __smsg: *const ::c_void,
+ __sbytes: usize,
+ __msg_prio: ::c_uint,
+ ) -> ::c_int;
+ pub fn MsgSendAsync(__coid: ::c_int) -> ::c_int;
+ pub fn MsgReceiveAsyncGbl(
+ __chid: ::c_int,
+ __rmsg: *mut ::c_void,
+ __rbytes: usize,
+ __info: *mut _msg_info64,
+ __coid: ::c_int,
+ ) -> ::c_int;
+ pub fn MsgReceiveAsync(__chid: ::c_int, __iov: *const ::iovec, __parts: ::c_uint) -> ::c_int;
+ pub fn MsgPause(__rcvid: ::c_int, __cookie: ::c_uint) -> ::c_int;
+ pub fn MsgPause_r(__rcvid: ::c_int, __cookie: ::c_uint) -> ::c_int;
+
+ pub fn SignalKill(
+ __nd: u32,
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __signo: ::c_int,
+ __code: ::c_int,
+ __value: ::c_int,
+ ) -> ::c_int;
+ pub fn SignalKill_r(
+ __nd: u32,
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __signo: ::c_int,
+ __code: ::c_int,
+ __value: ::c_int,
+ ) -> ::c_int;
+ pub fn SignalKillSigval(
+ __nd: u32,
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __signo: ::c_int,
+ __code: ::c_int,
+ __value: *const ::sigval,
+ ) -> ::c_int;
+ pub fn SignalKillSigval_r(
+ __nd: u32,
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __signo: ::c_int,
+ __code: ::c_int,
+ __value: *const ::sigval,
+ ) -> ::c_int;
+ pub fn SignalReturn(__info: *mut _sighandler_info) -> ::c_int;
+ pub fn SignalFault(__sigcode: ::c_uint, __regs: *mut ::c_void, __refaddr: usize) -> ::c_int;
+ pub fn SignalAction(
+ __pid: ::pid_t,
+ __sigstub: unsafe extern "C" fn(),
+ __signo: ::c_int,
+ __act: *const ::sigaction,
+ __oact: *mut ::sigaction,
+ ) -> ::c_int;
+ pub fn SignalAction_r(
+ __pid: ::pid_t,
+ __sigstub: unsafe extern "C" fn(),
+ __signo: ::c_int,
+ __act: *const ::sigaction,
+ __oact: *mut ::sigaction,
+ ) -> ::c_int;
+ pub fn SignalProcmask(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __how: ::c_int,
+ __set: *const ::sigset_t,
+ __oldset: *mut ::sigset_t,
+ ) -> ::c_int;
+ pub fn SignalProcmask_r(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __how: ::c_int,
+ __set: *const ::sigset_t,
+ __oldset: *mut ::sigset_t,
+ ) -> ::c_int;
+ pub fn SignalSuspend(__set: *const ::sigset_t) -> ::c_int;
+ pub fn SignalSuspend_r(__set: *const ::sigset_t) -> ::c_int;
+ pub fn SignalWaitinfo(__set: *const ::sigset_t, __info: *mut ::siginfo_t) -> ::c_int;
+ pub fn SignalWaitinfo_r(__set: *const ::sigset_t, __info: *mut ::siginfo_t) -> ::c_int;
+ pub fn SignalWaitinfoMask(
+ __set: *const ::sigset_t,
+ __info: *mut ::siginfo_t,
+ __mask: *const ::sigset_t,
+ ) -> ::c_int;
+ pub fn SignalWaitinfoMask_r(
+ __set: *const ::sigset_t,
+ __info: *mut ::siginfo_t,
+ __mask: *const ::sigset_t,
+ ) -> ::c_int;
+ pub fn ThreadCreate(
+ __pid: ::pid_t,
+ __func: unsafe extern "C" fn(__arg: *mut ::c_void) -> *mut ::c_void,
+ __arg: *mut ::c_void,
+ __attr: *const ::_thread_attr,
+ ) -> ::c_int;
+ pub fn ThreadCreate_r(
+ __pid: ::pid_t,
+ __func: unsafe extern "C" fn(__arg: *mut ::c_void) -> *mut ::c_void,
+ __arg: *mut ::c_void,
+ __attr: *const ::_thread_attr,
+ ) -> ::c_int;
+
+ pub fn ThreadDestroy(__tid: ::c_int, __priority: ::c_int, __status: *mut ::c_void) -> ::c_int;
+ pub fn ThreadDestroy_r(__tid: ::c_int, __priority: ::c_int, __status: *mut ::c_void)
+ -> ::c_int;
+ pub fn ThreadDetach(__tid: ::c_int) -> ::c_int;
+ pub fn ThreadDetach_r(__tid: ::c_int) -> ::c_int;
+ pub fn ThreadJoin(__tid: ::c_int, __status: *mut *mut ::c_void) -> ::c_int;
+ pub fn ThreadJoin_r(__tid: ::c_int, __status: *mut *mut ::c_void) -> ::c_int;
+ pub fn ThreadCancel(__tid: ::c_int, __canstub: unsafe extern "C" fn()) -> ::c_int;
+ pub fn ThreadCancel_r(__tid: ::c_int, __canstub: unsafe extern "C" fn()) -> ::c_int;
+ pub fn ThreadCtl(__cmd: ::c_int, __data: *mut ::c_void) -> ::c_int;
+ pub fn ThreadCtl_r(__cmd: ::c_int, __data: *mut ::c_void) -> ::c_int;
+ pub fn ThreadCtlExt(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __cmd: ::c_int,
+ __data: *mut ::c_void,
+ ) -> ::c_int;
+ pub fn ThreadCtlExt_r(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __cmd: ::c_int,
+ __data: *mut ::c_void,
+ ) -> ::c_int;
+
+ pub fn InterruptHookTrace(
+ __handler: ::Option<unsafe extern "C" fn(arg1: ::c_int) -> *const ::sigevent>,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptHookIdle(
+ __handler: ::Option<unsafe extern "C" fn(arg1: *mut u64, arg2: *mut qtime_entry)>,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptHookIdle2(
+ __handler: ::Option<
+ unsafe extern "C" fn(arg1: ::c_uint, arg2: *mut syspage_entry, arg3: *mut _idle_hook),
+ >,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptHookOverdriveEvent(__event: *const ::sigevent, __flags: ::c_uint) -> ::c_int;
+ pub fn InterruptAttachEvent(
+ __intr: ::c_int,
+ __event: *const ::sigevent,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptAttachEvent_r(
+ __intr: ::c_int,
+ __event: *const ::sigevent,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptAttach(
+ __intr: ::c_int,
+ __handler: ::Option<
+ unsafe extern "C" fn(__area: *mut ::c_void, __id: ::c_int) -> *const ::sigevent,
+ >,
+ __area: *const ::c_void,
+ __size: ::c_int,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptAttach_r(
+ __intr: ::c_int,
+ __handler: ::Option<
+ unsafe extern "C" fn(__area: *mut ::c_void, __id: ::c_int) -> *const ::sigevent,
+ >,
+ __area: *const ::c_void,
+ __size: ::c_int,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptAttachArray(
+ __intr: ::c_int,
+ __handler: ::Option<
+ unsafe extern "C" fn(__area: *mut ::c_void, __id: ::c_int) -> *const *const ::sigevent,
+ >,
+ __area: *const ::c_void,
+ __size: ::c_int,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptAttachArray_r(
+ __intr: ::c_int,
+ __handler: ::Option<
+ unsafe extern "C" fn(__area: *mut ::c_void, __id: ::c_int) -> *const *const ::sigevent,
+ >,
+ __area: *const ::c_void,
+ __size: ::c_int,
+ __flags: ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptDetach(__id: ::c_int) -> ::c_int;
+ pub fn InterruptDetach_r(__id: ::c_int) -> ::c_int;
+ pub fn InterruptWait(__flags: ::c_int, __timeout: *const u64) -> ::c_int;
+ pub fn InterruptWait_r(__flags: ::c_int, __timeout: *const u64) -> ::c_int;
+ pub fn InterruptCharacteristic(
+ __type: ::c_int,
+ __id: ::c_int,
+ __new: *mut ::c_uint,
+ __old: *mut ::c_uint,
+ ) -> ::c_int;
+ pub fn InterruptCharacteristic_r(
+ __type: ::c_int,
+ __id: ::c_int,
+ __new: *mut ::c_uint,
+ __old: *mut ::c_uint,
+ ) -> ::c_int;
+
+ pub fn SchedGet(__pid: ::pid_t, __tid: ::c_int, __param: *mut ::sched_param) -> ::c_int;
+ pub fn SchedGet_r(__pid: ::pid_t, __tid: ::c_int, __param: *mut ::sched_param) -> ::c_int;
+ pub fn SchedGetCpuNum() -> ::c_uint;
+ pub fn SchedSet(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __algorithm: ::c_int,
+ __param: *const ::sched_param,
+ ) -> ::c_int;
+ pub fn SchedSet_r(
+ __pid: ::pid_t,
+ __tid: ::c_int,
+ __algorithm: ::c_int,
+ __param: *const ::sched_param,
+ ) -> ::c_int;
+ pub fn SchedInfo(__pid: ::pid_t, __algorithm: ::c_int, __info: *mut ::_sched_info) -> ::c_int;
+ pub fn SchedInfo_r(__pid: ::pid_t, __algorithm: ::c_int, __info: *mut ::_sched_info)
+ -> ::c_int;
+ pub fn SchedYield() -> ::c_int;
+ pub fn SchedYield_r() -> ::c_int;
+ pub fn SchedCtl(__cmd: ::c_int, __data: *mut ::c_void, __length: usize) -> ::c_int;
+ pub fn SchedCtl_r(__cmd: ::c_int, __data: *mut ::c_void, __length: usize) -> ::c_int;
+ pub fn SchedJobCreate(__job: *mut nto_job_t) -> ::c_int;
+ pub fn SchedJobCreate_r(__job: *mut nto_job_t) -> ::c_int;
+ pub fn SchedJobDestroy(__job: *mut nto_job_t) -> ::c_int;
+ pub fn SchedJobDestroy_r(__job: *mut nto_job_t) -> ::c_int;
+ pub fn SchedWaypoint(
+ __job: *mut nto_job_t,
+ __new: *const i64,
+ __max: *const i64,
+ __old: *mut i64,
+ ) -> ::c_int;
+ pub fn SchedWaypoint_r(
+ __job: *mut nto_job_t,
+ __new: *const i64,
+ __max: *const i64,
+ __old: *mut i64,
+ ) -> ::c_int;
+
+ pub fn TimerCreate(__id: ::clockid_t, __notify: *const ::sigevent) -> ::c_int;
+ pub fn TimerCreate_r(__id: ::clockid_t, __notify: *const ::sigevent) -> ::c_int;
+ pub fn TimerDestroy(__id: ::timer_t) -> ::c_int;
+ pub fn TimerDestroy_r(__id: ::timer_t) -> ::c_int;
+ pub fn TimerSettime(
+ __id: ::timer_t,
+ __flags: ::c_int,
+ __itime: *const ::_itimer,
+ __oitime: *mut ::_itimer,
+ ) -> ::c_int;
+ pub fn TimerSettime_r(
+ __id: ::timer_t,
+ __flags: ::c_int,
+ __itime: *const ::_itimer,
+ __oitime: *mut ::_itimer,
+ ) -> ::c_int;
+ pub fn TimerInfo(
+ __pid: ::pid_t,
+ __id: ::timer_t,
+ __flags: ::c_int,
+ __info: *mut ::_timer_info,
+ ) -> ::c_int;
+ pub fn TimerInfo_r(
+ __pid: ::pid_t,
+ __id: ::timer_t,
+ __flags: ::c_int,
+ __info: *mut ::_timer_info,
+ ) -> ::c_int;
+ pub fn TimerAlarm(
+ __id: ::clockid_t,
+ __itime: *const ::_itimer,
+ __otime: *mut ::_itimer,
+ ) -> ::c_int;
+ pub fn TimerAlarm_r(
+ __id: ::clockid_t,
+ __itime: *const ::_itimer,
+ __otime: *mut ::_itimer,
+ ) -> ::c_int;
+ pub fn TimerTimeout(
+ __id: ::clockid_t,
+ __flags: ::c_int,
+ __notify: *const ::sigevent,
+ __ntime: *const u64,
+ __otime: *mut u64,
+ ) -> ::c_int;
+ pub fn TimerTimeout_r(
+ __id: ::clockid_t,
+ __flags: ::c_int,
+ __notify: *const ::sigevent,
+ __ntime: *const u64,
+ __otime: *mut u64,
+ ) -> ::c_int;
+
+ pub fn SyncTypeCreate(
+ __type: ::c_uint,
+ __sync: *mut ::sync_t,
+ __attr: *const ::_sync_attr,
+ ) -> ::c_int;
+ pub fn SyncTypeCreate_r(
+ __type: ::c_uint,
+ __sync: *mut ::sync_t,
+ __attr: *const ::_sync_attr,
+ ) -> ::c_int;
+ pub fn SyncDestroy(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncDestroy_r(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncCtl(__cmd: ::c_int, __sync: *mut ::sync_t, __data: *mut ::c_void) -> ::c_int;
+ pub fn SyncCtl_r(__cmd: ::c_int, __sync: *mut ::sync_t, __data: *mut ::c_void) -> ::c_int;
+ pub fn SyncMutexEvent(__sync: *mut ::sync_t, event: *const ::sigevent) -> ::c_int;
+ pub fn SyncMutexEvent_r(__sync: *mut ::sync_t, event: *const ::sigevent) -> ::c_int;
+ pub fn SyncMutexLock(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncMutexLock_r(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncMutexUnlock(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncMutexUnlock_r(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncMutexRevive(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncMutexRevive_r(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncCondvarWait(__sync: *mut ::sync_t, __mutex: *mut ::sync_t) -> ::c_int;
+ pub fn SyncCondvarWait_r(__sync: *mut ::sync_t, __mutex: *mut ::sync_t) -> ::c_int;
+ pub fn SyncCondvarSignal(__sync: *mut ::sync_t, __all: ::c_int) -> ::c_int;
+ pub fn SyncCondvarSignal_r(__sync: *mut ::sync_t, __all: ::c_int) -> ::c_int;
+ pub fn SyncSemPost(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncSemPost_r(__sync: *mut ::sync_t) -> ::c_int;
+ pub fn SyncSemWait(__sync: *mut ::sync_t, __tryto: ::c_int) -> ::c_int;
+ pub fn SyncSemWait_r(__sync: *mut ::sync_t, __tryto: ::c_int) -> ::c_int;
+
+ pub fn ClockTime(__id: ::clockid_t, _new: *const u64, __old: *mut u64) -> ::c_int;
+ pub fn ClockTime_r(__id: ::clockid_t, _new: *const u64, __old: *mut u64) -> ::c_int;
+ pub fn ClockAdjust(
+ __id: ::clockid_t,
+ _new: *const ::_clockadjust,
+ __old: *mut ::_clockadjust,
+ ) -> ::c_int;
+ pub fn ClockAdjust_r(
+ __id: ::clockid_t,
+ _new: *const ::_clockadjust,
+ __old: *mut ::_clockadjust,
+ ) -> ::c_int;
+ pub fn ClockPeriod(
+ __id: ::clockid_t,
+ _new: *const ::_clockperiod,
+ __old: *mut ::_clockperiod,
+ __reserved: ::c_int,
+ ) -> ::c_int;
+ pub fn ClockPeriod_r(
+ __id: ::clockid_t,
+ _new: *const ::_clockperiod,
+ __old: *mut ::_clockperiod,
+ __reserved: ::c_int,
+ ) -> ::c_int;
+ pub fn ClockId(__pid: ::pid_t, __tid: ::c_int) -> ::c_int;
+ pub fn ClockId_r(__pid: ::pid_t, __tid: ::c_int) -> ::c_int;
+
+ //
+ //TODO: The following commented out functions are implemented in assembly.
+ // We can implmement them either via a C stub or rust's inline assembly.
+ //
+ //pub fn InterruptEnable();
+ //pub fn InterruptDisable();
+ pub fn InterruptMask(__intr: ::c_int, __id: ::c_int) -> ::c_int;
+ pub fn InterruptUnmask(__intr: ::c_int, __id: ::c_int) -> ::c_int;
+ //pub fn InterruptLock(__spin: *mut ::intrspin);
+ //pub fn InterruptUnlock(__spin: *mut ::intrspin);
+ //pub fn InterruptStatus() -> ::c_uint;
+}
diff --git a/vendor/libc/src/unix/nto/x86_64.rs b/vendor/libc/src/unix/nto/x86_64.rs
new file mode 100644
index 000000000..3a1d230bb
--- /dev/null
+++ b/vendor/libc/src/unix/nto/x86_64.rs
@@ -0,0 +1,132 @@
+pub type c_char = i8;
+pub type wchar_t = u32;
+pub type c_long = i64;
+pub type c_ulong = u64;
+pub type time_t = i64;
+
+s! {
+ #[repr(align(8))]
+ pub struct x86_64_cpu_registers {
+ pub rdi: u64,
+ pub rsi: u64,
+ pub rdx: u64,
+ pub r10: u64,
+ pub r8: u64,
+ pub r9: u64,
+ pub rax: u64,
+ pub rbx: u64,
+ pub rbp: u64,
+ pub rcx: u64,
+ pub r11: u64,
+ pub r12: u64,
+ pub r13: u64,
+ pub r14: u64,
+ pub r15: u64,
+ pub rip: u64,
+ pub cs: u32,
+ rsvd1: u32,
+ pub rflags: u64,
+ pub rsp: u64,
+ pub ss: u32,
+ rsvd2: u32,
+ }
+
+ #[repr(align(8))]
+ pub struct mcontext_t {
+ pub cpu: x86_64_cpu_registers,
+ #[cfg(libc_union)]
+ pub fpu: x86_64_fpu_registers,
+ #[cfg(not(libc_union))]
+ __reserved: [u8; 1024],
+ }
+
+ pub struct stack_t {
+ pub ss_sp: *mut ::c_void,
+ pub ss_size: ::size_t,
+ pub ss_flags: ::c_int,
+ }
+
+ pub struct fsave_area_64 {
+ pub fpu_control_word: u32,
+ pub fpu_status_word: u32,
+ pub fpu_tag_word: u32,
+ pub fpu_ip: u32,
+ pub fpu_cs: u32,
+ pub fpu_op: u32,
+ pub fpu_ds: u32,
+ pub st_regs: [u8; 80],
+ }
+
+ pub struct fxsave_area_64 {
+ pub fpu_control_word: u16,
+ pub fpu_status_word: u16,
+ pub fpu_tag_word: u16,
+ pub fpu_operand: u16,
+ pub fpu_rip: u64,
+ pub fpu_rdp: u64,
+ pub mxcsr: u32,
+ pub mxcsr_mask: u32,
+ pub st_regs: [u8; 128],
+ pub xmm_regs: [u8; 128],
+ reserved2: [u8; 224],
+ }
+
+ pub struct fpu_extention_savearea_64 {
+ pub other: [u8; 512],
+ pub xstate_bv: u64,
+ pub xstate_undef: [u64; 7],
+ pub xstate_info: [u8; 224],
+ }
+}
+
+s_no_extra_traits! {
+ #[cfg(libc_union)]
+ pub union x86_64_fpu_registers {
+ pub fsave_area: fsave_area_64,
+ pub fxsave_area: fxsave_area_64,
+ pub xsave_area: fpu_extention_savearea_64,
+ pub data: [u8; 1024],
+ }
+}
+
+cfg_if! {
+ if #[cfg(feature = "extra_traits")] {
+ #[cfg(libc_union)]
+ impl Eq for x86_64_fpu_registers {}
+
+ #[cfg(libc_union)]
+ impl PartialEq for x86_64_fpu_registers {
+ fn eq(&self, other: &x86_64_fpu_registers) -> bool {
+ unsafe {
+ self.fsave_area == other.fsave_area
+ || self.fxsave_area == other.fxsave_area
+ || self.xsave_area == other.xsave_area
+ }
+ }
+ }
+
+ #[cfg(libc_union)]
+ impl ::fmt::Debug for x86_64_fpu_registers {
+ fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
+ unsafe {
+ f.debug_struct("x86_64_fpu_registers")
+ .field("fsave_area", &self.fsave_area)
+ .field("fxsave_area", &self.fxsave_area)
+ .field("xsave_area", &self.xsave_area)
+ .finish()
+ }
+ }
+ }
+
+ #[cfg(libc_union)]
+ impl ::hash::Hash for x86_64_fpu_registers {
+ fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
+ unsafe {
+ self.fsave_area.hash(state);
+ self.fxsave_area.hash(state);
+ self.xsave_area.hash(state);
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/libc/src/unix/solarish/mod.rs b/vendor/libc/src/unix/solarish/mod.rs
index abe304e8e..99135d5f5 100644
--- a/vendor/libc/src/unix/solarish/mod.rs
+++ b/vendor/libc/src/unix/solarish/mod.rs
@@ -2710,7 +2710,6 @@ extern "C" {
pub fn abs(i: ::c_int) -> ::c_int;
pub fn acct(filename: *const ::c_char) -> ::c_int;
- pub fn atof(s: *const ::c_char) -> ::c_double;
pub fn dirfd(dirp: *mut ::DIR) -> ::c_int;
pub fn labs(i: ::c_long) -> ::c_long;
pub fn rand() -> ::c_int;
diff --git a/vendor/libc/src/vxworks/mod.rs b/vendor/libc/src/vxworks/mod.rs
index 2772d68d2..6b705e8a2 100644
--- a/vendor/libc/src/vxworks/mod.rs
+++ b/vendor/libc/src/vxworks/mod.rs
@@ -1119,11 +1119,16 @@ extern "C" {
pub fn feof(stream: *mut FILE) -> c_int;
pub fn ferror(stream: *mut FILE) -> c_int;
pub fn perror(s: *const c_char);
+ pub fn atof(s: *const c_char) -> c_double;
pub fn atoi(s: *const c_char) -> c_int;
+ pub fn atol(s: *const c_char) -> c_long;
+ pub fn atoll(s: *const c_char) -> c_longlong;
pub fn strtod(s: *const c_char, endp: *mut *mut c_char) -> c_double;
pub fn strtof(s: *const c_char, endp: *mut *mut c_char) -> c_float;
pub fn strtol(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_long;
+ pub fn strtoll(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_longlong;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulong;
+ pub fn strtoull(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulonglong;
pub fn calloc(nobj: size_t, size: size_t) -> *mut c_void;
pub fn malloc(size: size_t) -> *mut c_void;
pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void;
diff --git a/vendor/libc/src/wasi.rs b/vendor/libc/src/wasi.rs
index c5dd67047..abfebd643 100644
--- a/vendor/libc/src/wasi.rs
+++ b/vendor/libc/src/wasi.rs
@@ -540,12 +540,16 @@ extern "C" {
pub fn setvbuf(stream: *mut FILE, buffer: *mut c_char, mode: c_int, size: size_t) -> c_int;
pub fn setbuf(stream: *mut FILE, buf: *mut c_char);
pub fn fgets(buf: *mut c_char, n: c_int, stream: *mut FILE) -> *mut c_char;
- pub fn atoi(s: *const c_char) -> c_int;
pub fn atof(s: *const c_char) -> c_double;
+ pub fn atoi(s: *const c_char) -> c_int;
+ pub fn atol(s: *const c_char) -> c_long;
+ pub fn atoll(s: *const c_char) -> c_longlong;
pub fn strtod(s: *const c_char, endp: *mut *mut c_char) -> c_double;
pub fn strtof(s: *const c_char, endp: *mut *mut c_char) -> c_float;
pub fn strtol(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_long;
+ pub fn strtoll(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_longlong;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulong;
+ pub fn strtoull(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulonglong;
pub fn strcpy(dst: *mut c_char, src: *const c_char) -> *mut c_char;
pub fn strncpy(dst: *mut c_char, src: *const c_char, n: size_t) -> *mut c_char;
diff --git a/vendor/libc/src/windows/mod.rs b/vendor/libc/src/windows/mod.rs
index 916019b1f..7f2f1ded1 100644
--- a/vendor/libc/src/windows/mod.rs
+++ b/vendor/libc/src/windows/mod.rs
@@ -329,11 +329,16 @@ extern "C" {
pub fn feof(stream: *mut FILE) -> c_int;
pub fn ferror(stream: *mut FILE) -> c_int;
pub fn perror(s: *const c_char);
+ pub fn atof(s: *const c_char) -> c_double;
pub fn atoi(s: *const c_char) -> c_int;
+ pub fn atol(s: *const c_char) -> c_long;
+ pub fn atoll(s: *const c_char) -> c_longlong;
pub fn strtod(s: *const c_char, endp: *mut *mut c_char) -> c_double;
pub fn strtof(s: *const c_char, endp: *mut *mut c_char) -> c_float;
pub fn strtol(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_long;
+ pub fn strtoll(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_longlong;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulong;
+ pub fn strtoull(s: *const c_char, endp: *mut *mut c_char, base: c_int) -> c_ulonglong;
pub fn calloc(nobj: size_t, size: size_t) -> *mut c_void;
pub fn malloc(size: size_t) -> *mut c_void;
pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void;
@@ -374,7 +379,6 @@ extern "C" {
pub fn memset(dest: *mut c_void, c: c_int, n: size_t) -> *mut c_void;
pub fn abs(i: c_int) -> c_int;
- pub fn atof(s: *const c_char) -> c_double;
pub fn labs(i: c_long) -> c_long;
pub fn rand() -> c_int;
pub fn srand(seed: c_uint);
diff --git a/vendor/litemap/.cargo-checksum.json b/vendor/litemap/.cargo-checksum.json
index 1ff02fee7..46a4f357e 100644
--- a/vendor/litemap/.cargo-checksum.json
+++ b/vendor/litemap/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"46b05d0a550d86bc76aecd44a1d744c6390eda4765c98ceb4924bf0bb1caa970","Cargo.toml":"3cc1ad5a8dcfe5aed44bee3fc06fd5d2a13b739bc5ca7e4f6a64887ffbd0446e","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"021453bdd013041c191ed05243145d6fcd69d4b2e3c34685d80ee1bb96014495","benches/bin/litemap_bincode.rs":"f68c78c5438173355270560bc7fcc0b1cabc5a9a3001b13e03feb29ba8de60d1","benches/bin/litemap_postcard.rs":"2864ffc0caad3e51e67a1dd2edf23b8e924b3143b683a1101fb5113eccb4efa6","benches/litemap.rs":"56af028c07cd83d7018bcbce19855f31697ed940b508fbf5f8e49bedf52c87bc","benches/testdata/large_litemap.postcard":"7316edaf9b1b618eaa33a22cdc1f02474d53bea4fdd664ad10099235e4de5f0d","examples/language_names_hash_map.rs":"705d1049e64d6080571806f8a06330c6a3b126945bcf3bc341e1c707aad4e8f5","examples/language_names_lite_map.rs":"767246fa90005d2450f60fe1cb92c4431610a44b6e861f6146562595214996d8","src/lib.rs":"cf9c3d91961f85b96680d935f44ec0650c8963ecca4c7c80261840cc2e7b9ce5","src/map.rs":"54695e4232c2f9719586fd121aeafc342ac4c259505285c15eef2ac59f0c5198","src/serde.rs":"4308da055ada25f8fd8b1d5a9398a4bfa58936971b638148b45b2d4beb3f0c7d","src/serde_helpers.rs":"72787005972b93e49b9dc17aa47d30699364e6da9dc95aadb820ce58e4bf5c54","src/store/mod.rs":"a35aa4837c80af61cd9e0d27b7120cc26cc2fbe0e215a14fe15eed980bc4757f","src/store/slice_impl.rs":"34d46f5e3db0f81f391444b764f2fce7592efe1b758bef3f56a283dc24bf041d","src/store/vec_impl.rs":"c94cf9402fb6710500c0b193a476c705ed10cea9121e8c784bbacc88842539e4","src/testing.rs":"5ecf1642fed31150b29451987a89d0b134d94d62078c7f343fa9754a02f4bd90","tests/rkyv.rs":"6400df6740aa5a3f3831ffc4559ada8a431f33e1a48924fcd2b168cda442d835","tests/serde.rs":"0051274f8490c5837d88447bf72fc7266e970fa95c6e4ca540eeba45b7947ce6","tests/store.rs":"6f9506b301d82c9c1bdd737515d476002d06bd89e2f77fd00bc498653a67a629"},"package":"f34a3f4798fac63fb48cf277eefa38f94d3443baff555bb98e4f56bc9092368e"} \ No newline at end of file
+{"files":{"Cargo.lock":"33b6362389865144374a4952e3df4580aa90e25ccd6e141da42211158c667a34","Cargo.toml":"f8d7c3287cac0d53910dd611de6d0b1b883f43415bcfdbbd3e9396b75e9989c0","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"021453bdd013041c191ed05243145d6fcd69d4b2e3c34685d80ee1bb96014495","benches/bin/litemap_bincode.rs":"f68c78c5438173355270560bc7fcc0b1cabc5a9a3001b13e03feb29ba8de60d1","benches/bin/litemap_postcard.rs":"2864ffc0caad3e51e67a1dd2edf23b8e924b3143b683a1101fb5113eccb4efa6","benches/litemap.rs":"dc4ca991b9f0ce618005e0e3133738459c7db580744756a0568da7a0f4f6d1be","benches/testdata/large_litemap.postcard":"7316edaf9b1b618eaa33a22cdc1f02474d53bea4fdd664ad10099235e4de5f0d","examples/language_names_hash_map.rs":"705d1049e64d6080571806f8a06330c6a3b126945bcf3bc341e1c707aad4e8f5","examples/language_names_lite_map.rs":"767246fa90005d2450f60fe1cb92c4431610a44b6e861f6146562595214996d8","src/lib.rs":"cf9c3d91961f85b96680d935f44ec0650c8963ecca4c7c80261840cc2e7b9ce5","src/map.rs":"ff8aece659756a8818be30e28a08d4196de8ed1add81e6782bfd776d158276a2","src/serde.rs":"4308da055ada25f8fd8b1d5a9398a4bfa58936971b638148b45b2d4beb3f0c7d","src/serde_helpers.rs":"72787005972b93e49b9dc17aa47d30699364e6da9dc95aadb820ce58e4bf5c54","src/store/mod.rs":"6a8463f1e93c404f2564b6127dd49e85c9587bb2ec74ae6d0c621447e7642b6e","src/store/slice_impl.rs":"34d46f5e3db0f81f391444b764f2fce7592efe1b758bef3f56a283dc24bf041d","src/store/vec_impl.rs":"c94cf9402fb6710500c0b193a476c705ed10cea9121e8c784bbacc88842539e4","src/testing.rs":"5ecf1642fed31150b29451987a89d0b134d94d62078c7f343fa9754a02f4bd90","tests/rkyv.rs":"b19d91eda9105699a4340340f0a8961de5f02673599dd8eddb43269634777cc4","tests/serde.rs":"0051274f8490c5837d88447bf72fc7266e970fa95c6e4ca540eeba45b7947ce6","tests/store.rs":"6f9506b301d82c9c1bdd737515d476002d06bd89e2f77fd00bc498653a67a629"},"package":"575d8a551c59104b4df91269921e5eab561aa1b77c618dac0414b5d44a4617de"} \ No newline at end of file
diff --git a/vendor/litemap/Cargo.lock b/vendor/litemap/Cargo.lock
index 3d0f04dca..28af8845a 100644
--- a/vendor/litemap/Cargo.lock
+++ b/vendor/litemap/Cargo.lock
@@ -14,19 +14,10 @@ dependencies = [
]
[[package]]
-name = "aho-corasick"
-version = "0.7.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
-dependencies = [
- "memchr",
-]
-
-[[package]]
name = "atomic-polyfill"
-version = "0.1.10"
+version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c041a8d9751a520ee19656232a18971f18946a7900f1520ee4400002244dd89"
+checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28"
dependencies = [
"critical-section",
]
@@ -37,7 +28,7 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
"winapi",
]
@@ -49,21 +40,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
-name = "bare-metal"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5deb64efa5bd81e31fcd1938615a6d98c82eafcbcd787162b6f63b91d6bac5b3"
-dependencies = [
- "rustc_version 0.2.3",
-]
-
-[[package]]
-name = "bare-metal"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8fe8f5a8a398345e52358e18ff07cc17a568fbca5c6f73873d3a62056309603"
-
-[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -73,18 +49,6 @@ dependencies = [
]
[[package]]
-name = "bit_field"
-version = "0.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcb6dd1c2376d2e096796e234a70e17e94cc2d5d54ff8ce42b28cef1d0d359a4"
-
-[[package]]
-name = "bitfield"
-version = "0.13.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46afbd2983a5d5a7bd740ccb198caf5b82f45c40c09c0eed36052d91cb92e719"
-
-[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -104,9 +68,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.11.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "bytecheck"
@@ -165,18 +129,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]]
-name = "cortex-m"
-version = "0.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70858629a458fdfd39f9675c4dc309411f2a3f83bede76988d81bf1a0ecee9e0"
-dependencies = [
- "bare-metal 0.2.5",
- "bitfield",
- "embedded-hal",
- "volatile-register",
-]
-
-[[package]]
name = "criterion"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -214,15 +166,9 @@ dependencies = [
[[package]]
name = "critical-section"
-version = "0.2.7"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95da181745b56d4bd339530ec393508910c909c784e8962d15d722bacf0bcbcd"
-dependencies = [
- "bare-metal 1.0.0",
- "cfg-if",
- "cortex-m",
- "riscv",
-]
+checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52"
[[package]]
name = "crossbeam-channel"
@@ -247,26 +193,24 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.10"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
- "once_cell",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.11"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "once_cell",
]
[[package]]
@@ -292,37 +236,16 @@ dependencies = [
]
[[package]]
-name = "displaydoc"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3bf95dc3f046b9da4f2d51833c0d3547d8564ef6910f5c1ed130306a75b92886"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
name = "either"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
[[package]]
-name = "embedded-hal"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "35949884794ad573cf46071e41c9b60efb0cb311e3ca01f7af807af1debc66ff"
-dependencies = [
- "nb 0.1.3",
- "void",
-]
-
-[[package]]
name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -361,7 +284,7 @@ checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743"
dependencies = [
"atomic-polyfill",
"hash32",
- "rustc_version 0.4.0",
+ "rustc_version",
"serde",
"spin",
"stable_deref_trait",
@@ -377,21 +300,12 @@ dependencies = [
]
[[package]]
-name = "icu_benchmark_macros"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c867656f2d9c90b13709ac88e710a9d6afe33998c1dfa22384bab8804e8b3d4"
-
-[[package]]
-name = "icu_locid"
-version = "1.0.0-beta1"
+name = "hermit-abi"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db60d0dabdf1536b34f43f05ba3a98a7fd09de82a08c58104f3f218b8f027b67"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
- "displaydoc",
- "litemap 0.5.0",
- "tinystr",
- "writeable",
+ "libc",
]
[[package]]
@@ -411,9 +325,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "js-sys"
@@ -432,25 +346,17 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.133"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
-
-[[package]]
-name = "litemap"
-version = "0.5.0"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9cf53a06d08341da913058f6a23488788a8bebc4773e72a43a2243e28a282942"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "litemap"
-version = "0.6.0"
+version = "0.6.1"
dependencies = [
"bincode",
"bytecheck",
"criterion",
- "icu_benchmark_macros",
- "icu_locid",
"postcard",
"rkyv",
"serde",
@@ -485,29 +391,14 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
-version = "0.6.5"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
-name = "nb"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "801d31da0513b6ec5214e9bf433a77966320625a37860f910be265be6e18d06f"
-dependencies = [
- "nb 1.0.0",
-]
-
-[[package]]
-name = "nb"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "546c37ac5d9e56f55e73b677106873d9d9f5190605e41a856503623648488cae"
-
-[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -518,19 +409,19 @@ dependencies = [
[[package]]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.2.6",
"libc",
]
[[package]]
name = "once_cell"
-version = "1.15.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
[[package]]
name = "oorandom"
@@ -579,9 +470,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
@@ -608,30 +499,28 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rayon"
-version = "1.5.3"
+version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
- "autocfg",
- "crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
-version = "1.9.3"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -641,12 +530,10 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [
- "aho-corasick",
- "memchr",
"regex-syntax",
]
@@ -658,9 +545,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "rend"
@@ -672,27 +559,6 @@ dependencies = [
]
[[package]]
-name = "riscv"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6907ccdd7a31012b70faf2af85cd9e5ba97657cc3987c4f13f8e4d2c2a088aba"
-dependencies = [
- "bare-metal 1.0.0",
- "bit_field",
- "riscv-target",
-]
-
-[[package]]
-name = "riscv-target"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88aa938cda42a0cf62a20cfe8d139ff1af20c2e681212b5b34adb5a58333f222"
-dependencies = [
- "lazy_static",
- "regex",
-]
-
-[[package]]
name = "rkyv"
version = "0.7.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -719,27 +585,18 @@ dependencies = [
[[package]]
name = "rustc_version"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
-dependencies = [
- "semver 0.9.0",
-]
-
-[[package]]
-name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
- "semver 1.0.14",
+ "semver",
]
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "same-file"
@@ -764,30 +621,15 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "semver"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
-dependencies = [
- "semver-parser",
-]
-
-[[package]]
-name = "semver"
-version = "1.0.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
-
-[[package]]
-name = "semver-parser"
-version = "0.7.0"
+version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
@@ -804,9 +646,9 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -815,11 +657,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
- "itoa 1.0.3",
+ "itoa 1.0.5",
"ryu",
"serde",
]
@@ -841,9 +683,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -872,15 +714,6 @@ dependencies = [
]
[[package]]
-name = "tinystr"
-version = "0.6.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dfb77d2490072fb5616d67686f55481b3d97701e383e208a7225843eba1aae6"
-dependencies = [
- "displaydoc",
-]
-
-[[package]]
name = "tinytemplate"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -892,9 +725,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-width"
@@ -909,33 +742,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
-name = "vcell"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77439c1b53d2303b20d9459b1ade71a83c716e3f9c34f3228c00e6f185d6c002"
-
-[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
-name = "void"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
-
-[[package]]
-name = "volatile-register"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ee8f19f9d74293faf70901bc20ad067dc1ad390d2cbf1e3f75f721ffee908b6"
-dependencies = [
- "vcell",
-]
-
-[[package]]
name = "walkdir"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1048,16 +860,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
-name = "writeable"
-version = "0.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8ab608ef0f68f7b5e1f17a38342cbc2725bf212f6ba9f103b0e05f675c41d83"
-
-[[package]]
name = "yoke"
-version = "0.6.1"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "620cda8a59740b1e9313dad314848c6ffe15792c38cc4ac2def245ee77a6cae2"
+checksum = "222180af14a6b54ef2c33493c1eff77ae95a3687a21b243e752624006fb8f26e"
dependencies = [
"serde",
"stable_deref_trait",
@@ -1067,9 +873,9 @@ dependencies = [
[[package]]
name = "yoke-derive"
-version = "0.6.0"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58c2c5bb7c929b85c1b9ec69091b0d835f0878b4fd9eb67973b25936e06c4374"
+checksum = "ca800d73d6b7a7ee54f2608205c98b549fca71c9500c1abcb3abdc7708b4a8cb"
dependencies = [
"proc-macro2",
"quote",
@@ -1088,9 +894,9 @@ dependencies = [
[[package]]
name = "zerofrom-derive"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8785f47d6062c1932866147f91297286a9f350b3070e9d9f0b6078e37d623c1a"
+checksum = "2e8aa86add9ddbd2409c1ed01e033cd457d79b1b1229b64922c25095c595e829"
dependencies = [
"proc-macro2",
"quote",
diff --git a/vendor/litemap/Cargo.toml b/vendor/litemap/Cargo.toml
index 315056b05..01ee597a8 100644
--- a/vendor/litemap/Cargo.toml
+++ b/vendor/litemap/Cargo.toml
@@ -10,9 +10,9 @@
# See Cargo.toml.orig for the original contents.
[package]
-edition = "2018"
+edition = "2021"
name = "litemap"
-version = "0.6.0"
+version = "0.6.1"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -37,9 +37,15 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
+
[[example]]
name = "litemap_bincode"
path = "benches/bin/litemap_bincode.rs"
@@ -70,7 +76,7 @@ optional = true
default-features = false
[dependencies.yoke]
-version = "0.6.0"
+version = "0.7.0"
features = ["derive"]
optional = true
@@ -83,12 +89,6 @@ version = "0.6"
[dev-dependencies.criterion]
version = "0.3.4"
-[dev-dependencies.icu_benchmark_macros]
-version = "0.7"
-
-[dev-dependencies.icu_locid]
-version = "1.0.0-beta1"
-
[dev-dependencies.postcard]
version = "1.0.0"
features = ["use-std"]
diff --git a/vendor/litemap/benches/litemap.rs b/vendor/litemap/benches/litemap.rs
index bf005c4f8..353f03a86 100644
--- a/vendor/litemap/benches/litemap.rs
+++ b/vendor/litemap/benches/litemap.rs
@@ -39,7 +39,8 @@ const POSTCARD: [u8; 176] = [
105, 110, 101, 115, 101,
];
-/// Run this function to print new data to the console. Requires the optional `serde` feature.
+/// Run this function to print new data to the console.
+/// Requires the optional `serde` Cargo feature.
#[allow(dead_code)]
fn generate() {
let map = build_litemap(false);
@@ -69,13 +70,13 @@ fn overview_bench(c: &mut Criterion) {
fn build_litemap(large: bool) -> LiteMap<String, String> {
let mut map: LiteMap<String, String> = LiteMap::new();
- for (key, value) in DATA.iter() {
+ for (key, value) in DATA.into_iter() {
if large {
for n in 0..8192 {
- map.insert(format!("{}{}", key, n), value.to_string());
+ map.insert(format!("{}{}", key, n), value.to_owned());
}
} else {
- map.insert(key.to_string(), value.to_string());
+ map.insert(key.to_owned(), value.to_owned());
}
}
map
@@ -85,7 +86,7 @@ fn bench_deserialize(c: &mut Criterion) {
c.bench_function("litemap/deserialize/small", |b| {
b.iter(|| {
let map: LiteMap<String, String> = postcard::from_bytes(black_box(&POSTCARD)).unwrap();
- assert_eq!(map.get("iu"), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get("iu"), Some(&"Inuktitut".to_owned()));
})
});
}
@@ -95,7 +96,7 @@ fn bench_deserialize_large(c: &mut Criterion) {
c.bench_function("litemap/deseralize/large", |b| {
b.iter(|| {
let map: LiteMap<String, String> = postcard::from_bytes(black_box(&buf)).unwrap();
- assert_eq!(map.get("iu3333"), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get("iu3333"), Some(&"Inuktitut".to_owned()));
});
});
}
@@ -104,7 +105,7 @@ fn bench_lookup(c: &mut Criterion) {
let map: LiteMap<String, String> = postcard::from_bytes(&POSTCARD).unwrap();
c.bench_function("litemap/lookup/small", |b| {
b.iter(|| {
- assert_eq!(map.get(black_box("iu")), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get(black_box("iu")), Some(&"Inuktitut".to_owned()));
assert_eq!(map.get(black_box("zz")), None);
});
});
@@ -115,7 +116,7 @@ fn bench_lookup_large(c: &mut Criterion) {
let map: LiteMap<String, String> = postcard::from_bytes(&buf).unwrap();
c.bench_function("litemap/lookup/large", |b| {
b.iter(|| {
- assert_eq!(map.get(black_box("iu3333")), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get(black_box("iu3333")), Some(&"Inuktitut".to_owned()));
assert_eq!(map.get(black_box("zz")), None);
});
});
diff --git a/vendor/litemap/src/map.rs b/vendor/litemap/src/map.rs
index f86383337..669dc464d 100644
--- a/vendor/litemap/src/map.rs
+++ b/vendor/litemap/src/map.rs
@@ -135,8 +135,8 @@ where
/// let mut map = LiteMap::new_vec();
/// map.insert(1, "one");
/// map.insert(2, "two");
- /// assert_eq!(map.contains_key(&1), true);
- /// assert_eq!(map.contains_key(&3), false);
+ /// assert!(map.contains_key(&1));
+ /// assert!(!map.contains_key(&3));
/// ```
pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
where
diff --git a/vendor/litemap/src/store/mod.rs b/vendor/litemap/src/store/mod.rs
index e4ba6f7b9..7f4386783 100644
--- a/vendor/litemap/src/store/mod.rs
+++ b/vendor/litemap/src/store/mod.rs
@@ -18,7 +18,7 @@
//! - [`StoreIterable`] for methods that return iterators
//! - [`StoreFromIterator`] to enable `FromIterator` for LiteMap
//!
-//! To test your implementation, enable the `"testing"` feature and use [`check_store()`].
+//! To test your implementation, enable the `"testing"` Cargo feature and use [`check_store()`].
//!
//! [`check_store()`]: crate::testing::check_store
diff --git a/vendor/litemap/tests/rkyv.rs b/vendor/litemap/tests/rkyv.rs
index 2d6438dc7..7ab1eb3f5 100644
--- a/vendor/litemap/tests/rkyv.rs
+++ b/vendor/litemap/tests/rkyv.rs
@@ -41,17 +41,14 @@ type LiteMapOfStrings = LiteMap<String, String>;
type TupleVecOfStrings = Vec<(String, String)>;
fn generate() -> AlignedVec {
- let mut map: LiteMapOfStrings = LiteMap::new();
- for (lang, name) in DATA.iter() {
- map.try_append(lang.to_string(), name.to_string())
- .ok_or(())
- .unwrap_err();
- }
- let tuple_vec = map.into_tuple_vec();
+ let map = DATA
+ .iter()
+ .map(|&(k, v)| (k.to_owned(), v.to_owned()))
+ .collect::<LiteMapOfStrings>();
let mut serializer = AllocSerializer::<4096>::default();
serializer
- .serialize_value(&tuple_vec)
+ .serialize_value(&map.into_tuple_vec())
.expect("failed to archive test");
serializer.into_serializer().into_inner()
}
@@ -82,5 +79,5 @@ fn rkyv_deserialize() {
let deserialized = archived.deserialize(&mut Infallible).unwrap();
// Safe because we are deserializing a buffer from a trusted source
let deserialized: LiteMapOfStrings = LiteMap::from_sorted_store_unchecked(deserialized);
- assert_eq!(deserialized.get("tr"), Some(&"Turkish".to_string()));
+ assert_eq!(deserialized.get("tr").map(String::as_str), Some("Turkish"));
}
diff --git a/vendor/lsp-types/.cargo-checksum.json b/vendor/lsp-types/.cargo-checksum.json
index fe0d2bb53..a3fa12007 100644
--- a/vendor/lsp-types/.cargo-checksum.json
+++ b/vendor/lsp-types/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"fb3c66afb33dcad9d43aa5cb5e34de28ab8dcecdcef4f449ec5a4a5ad9a045cf","Cargo.toml":"3fa7da96cf40b1b3ba29a479dc428f2c43ecb3fd6e78bf3987639730dc96fa8f","LICENSE":"a11232911aa0d746688b560af367728021184084eeb6328a1922d60925c5eda0","README.md":"6814882daa9018d1d99ebdb7861d9759b3c96f976683fabfc8f4b9c9a05b032a","release.sh":"3bfea9a6cd08e825f3247a868338f3844d7f72f4342671898a6326a5df3c4e42","release.toml":"d4f0fecf904b64d9fc9babd3afeff77d3c102d8555b43ef83902c143e3becf16","src/call_hierarchy.rs":"a64967af04133a9c08838db19d2de665d2062ed294f6ca7d1d97edb49dc4096f","src/code_action.rs":"78813e0095a78e89a74d897e8dcb5876e1e1480c5e9e37d6f2fb22fdaecb6045","src/code_lens.rs":"169cc96d4ee9690d0e2e119aee7949006f0123c4a6b919bf7ac484258c93fa95","src/color.rs":"72846c6cda18691f5f9eaa29cb8df87bb4a10ae4e0803916244b250c68dc7701","src/completion.rs":"96681827c185732730e6b352e6259430a0585f92e34ff446b43b8d36e366a6db","src/document_highlight.rs":"491aad4beeb72ecc986d9f4cb43202d440304df3be861c41ab4e892b129212b5","src/document_link.rs":"8aab0d78081aa624290c712bacd30215553e7f0f99b300d3fefd103179fdfad0","src/document_symbols.rs":"063c29bf761c17f6f55c6812da0aad4a6aa04c6db3efbb6ca01b646b7757aabe","src/error_codes.rs":"60d4a1d00e797fe3f3f2c5ffd11be7fbaad82f4fc247ff98285566d6b777f0f7","src/file_operations.rs":"bfc413d4bbfd311c7a178863961e6cde6e8cc7a654370b3848ffd64941fea82a","src/folding_range.rs":"3dc85496b6f5b8c11f3a57f888c2febdac71c997010b13eec34736b29fb86965","src/formatting.rs":"548df3c2da1dec8a3a578b381249b6812f92e73c22257459653980492b21a2a2","src/hover.rs":"fece13f93144dfea0b3c3204f19fc7fc366b25a614515825a20a2596c88966c8","src/inlay_hint.rs":"26671f81e045b3dcca6af3222fd3d8095f061e46b8ff7fc738b6f481a0b631d4","src/lib.rs":"0c990f697238cf37f11fcbb3bc20690a90a0aaaed427d79bdbae3ffda0d38c3f","src/linked_editing.rs":"a840ee445fbcfe32e37191197fb37b19732260462a73cc4ba350b8d9a2c4e607","src/lsif.rs":"c3966583d9b0afc64e93d301c8e20f84ba875ffb6b5d4bbacf4ca4a12cc58e99","src/moniker.rs":"8d8c37daca1d2504c74c17e7195969efcfb42641392026a8e507490adf49d043","src/notification.rs":"67e831168d9e7898965d83a503a33ac10f36561bcdb07195f99769f8ff00340c","src/progress.rs":"2b2189b094fbe7b790a0895f96c29615b9375538760d413413c47ce882af2007","src/references.rs":"e824f6861f67c66af04f8f08afc174793a9662a0a67cc23ccd5240ac0d408212","src/rename.rs":"2980e9452a27868a972fc09d52ac7c000361be9f1efc2de4e20c9b99100010c8","src/request.rs":"3abac2c2390c82bf89180b1305984db2986c5bc716ae9f5dbbd0af0a783ab0f7","src/selection_range.rs":"238b14ba6ac2e45ec66fea32d8ebe5f1d235bde9bc69777a794e10d519bcac9c","src/semantic_tokens.rs":"e6af4475567efb67df53b756bb9b0c5d469f83ea0eba863209e4c151f8c60704","src/signature_help.rs":"02d6c70a6c803badd22a8a474233d1be0b169eb39a6ae668f744dcaee71045ab","src/trace.rs":"81ca45bf188452d074cb879060a88f6d6ec087d6e31e29c37db542fdb5363f5c","src/window.rs":"b380ed57b4a700514ec043310253f3fc57c51d35df18fff8283b685ee8aa17d2","src/workspace_folders.rs":"79b75c1d75a1d57892d515efd8a5cbf7c8fddce1b49dc7dfbd94e685b30fce27","src/workspace_symbols.rs":"a1f18eecc09c352b342c5cc011805457eb836432d5b8688dd2e447ac9bda407a","tests/lsif.rs":"89c7c7fa06a8b108f7519a9860d0e2a049ac66d35db3630697f2f33038513898","tests/tsc-unix.lsif":"5a85759de361285d806c8147cfc9f9cb5235a6a2253bacd8243174713419facd"},"package":"9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"a404456dd81ff6bd6aacece55d460556fd5f923db0103d8a3d395761cb2fb2c2","Cargo.toml":"2fb86fa2dd87571fb8e625b68ada491b3c44aa7c2c3a1b739773664d50b44bf3","LICENSE":"005b90a7238f92fd39eaef0b4329a47e1cab351958d9e9bbed9b9c60b76bb2bb","README.md":"aa7b801527c8ed7d06285e86ea1ab224d4f314937631fa390fbcd5558088636e","release.sh":"a8f3c599bdb68a5fa47ec50b6d056ad8b0dcd6dda94b7fc9a7a1899ef4b86c1e","release.toml":"b5c002516befc380e4bf3712b06470fb74689cb0fb9fadb8a36478c596fe621a","src/call_hierarchy.rs":"31567e72d30c3939eb68fb492b6781e31ed0da56bbb4179e3176222f66bb1357","src/code_action.rs":"ace85a0ee6a3735d91d6c7c63ccb23fea8f1c22e0997337ea7fcdb0d235e5859","src/code_lens.rs":"6f0586b780950854d3c1ffbed414d1d9db8a111480f2e99132b4520d0b3a8f07","src/color.rs":"60549c5b20bcc87c954da32c58b00856ac4ab2b1a8b42fed0225d451d7c2f274","src/completion.rs":"3b2a72e3c8d709bd1b0c80035783502e647fd6da205376ecee7afa770e45f061","src/document_highlight.rs":"83c531ba4087538145fc7ff7d3e19bfc8e38441895f885296ea9e0925484dbd0","src/document_link.rs":"ef5ea30b8e761365cdf34666432567d9e5baf08469c4e4d9a51b5a500ffee4b7","src/document_symbols.rs":"fc1097290f9704550f75703524258c121484a9091284065c5318305f67179339","src/error_codes.rs":"06380abbd2970756a6f5c68ed4a47a9226403710b1beb014a790fffb23f6bdf1","src/file_operations.rs":"049b37d961522befc2df6df7f8172d4a122969c87873d4b9750746c46589db66","src/folding_range.rs":"f38500fbd7fa92b4b97c6092b2a2b4ca8649ad09a5a4ef7c532c551d7f70b872","src/formatting.rs":"79d8579fea1f582a4e09f1a63dacf5e504d41ce835a3a9de2ffe9d2ca9b4d686","src/hover.rs":"9feddaed11504a11f8d9cad5733ed3006c04300c2d75c8baee170ec0d68c14f5","src/inlay_hint.rs":"f5ee63e0e4c3c5877620d4c3d2942c8070bed66b6aeace6c2f7578084ff2a9ba","src/inline_value.rs":"34499bb445526b70ff370d507b098439f918b4007663761398af0b7c77d781bb","src/lib.rs":"ded25d5e58499d7e4e2f11665fde4d196fb80f85eee8939c6e8237c7e5019786","src/linked_editing.rs":"1a010996ff20012be72e2c611e5eb57d22f37a969216be4e6fc7b62c4281971e","src/lsif.rs":"747b9552ca4065fb56b887e9f40df8e8cc2502c68ffd305e0d9ca44eae377622","src/moniker.rs":"d59c8ce30f3babd86a7e3f0eac680be5111bf47e879f514afc3d894ddc874fe8","src/notification.rs":"c00b675ae519271827d15e9a8569b55e11233bd4db362110b98fccf65c00b68a","src/progress.rs":"2aaa8f5fb0cb14deb17b09ee29bc092f00ba23024fb0ec81138bb72abea91e4c","src/references.rs":"8d2d3b4622f680343da3622156df656495b3fa67ca4f3f6ecf673e77ec4d2cb8","src/rename.rs":"3f5f1106e9c926c09dadee03c1118e332c5e4fb59c0a71c7bb43800a6322c5bd","src/request.rs":"1a4703fecf7c459a6ed4f13658ecdeca44a2bb42a634e2077c183bb0f54fdc4d","src/selection_range.rs":"7e5d3cd207810f2a2f266960569d5b0adb3aff939b9b9aa0721c91f106a9f6e6","src/semantic_tokens.rs":"9cba9968aebd931de3e31d31564d0bc2c79a1bf28ac4b761b8cc59f3193326fa","src/signature_help.rs":"93440d96f04c01cd81ba13b0b7cb5859a4e8698a7007022f811d300f77090281","src/trace.rs":"338aa8faa6b8cec523d5d8bd08ad87f7a76fca7df64e2db6b2139a879f68bffd","src/type_hierarchy.rs":"d134db897e7cc8c3fb7b494ca8c36547421c46df780e63186d2bfcbac386a866","src/window.rs":"34b9e49217a44f2098708a4d256bab1ffafa13da8d7268fe9a234f427ab40525","src/workspace_folders.rs":"006216c7e0f8b3c50fab89c18231a4d08bf0994d1d8097564d6c1bec882f2975","src/workspace_symbols.rs":"48c9edfbb87b105925d1cd0956f456faa2b289fea7f7ebe85b0739170796b162","tests/lsif.rs":"075ebcdc8bbad2172fdeb72ce41656822e13153ef7b72bfed0b5cceba84fa090","tests/tsc-unix.lsif":"a6b171427afa74a648b0c633272958506ea7cf690b7ab8ba2548229fffd9c403"},"package":"0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"} \ No newline at end of file
diff --git a/vendor/lsp-types/CHANGELOG.md b/vendor/lsp-types/CHANGELOG.md
index 35655f7fc..584b8ec8b 100644
--- a/vendor/lsp-types/CHANGELOG.md
+++ b/vendor/lsp-types/CHANGELOG.md
@@ -1,289 +1,295 @@
-<a name="v0.93.2"></a>
-### v0.93.2 (2022-10-21)
-
-
-
-
-<a name="v0.93.1"></a>
-### v0.93.1 (2022-08-23)
-
-
-
-
-<a name="v0.93.0"></a>
-## v0.93.0 (2022-04-08)
-
-
-#### Features
-
-* **helperTypes:** implement new JSON types ([bd9bb2e7](https://github.com/gluon-lang/lsp-types/commit/bd9bb2e74171b0194be51085c822c3ccbbb68b7b))
-
-
-
-<a name="v0.92.1"></a>
-### v0.92.1 (2022-03-21)
-
-
-#### Bug Fixes
-
-* **inlayHint:** typo in prop name ([7f7ada31](https://github.com/gluon-lang/lsp-types/commit/7f7ada310fa045ef628dc36f97b8564408fcec6a))
-
-#### Features
-
-* **inlayHint:**
- * add request impls ([393aefa1](https://github.com/gluon-lang/lsp-types/commit/393aefa18b3c900c3f1ac8f6337b7098bdc4365a))
- * add inlayHint provider ([69f21e71](https://github.com/gluon-lang/lsp-types/commit/69f21e714df305643aa727cbf745b439bbd8c112))
- * implement inlayHint basic types ([29a50e71](https://github.com/gluon-lang/lsp-types/commit/29a50e71461f72d2fa2aea90fa03f61adf062074))
-
-
-
-<a name="v0.92.0"></a>
-## v0.92.0 (2022-02-07)
-
-
-
-
-<a name="v0.91.2"></a>
-### v0.91.2 (2022-02-07)
-
-
-#### Features
-
-* Add a TryFrom<&str> implementation for enumerations ([19b85cf4](https://github.com/gluon-lang/lsp-types/commit/19b85cf4a7b5396a89d364e271c3867eb6c840e4), closes [#223](https://github.com/gluon-lang/lsp-types/issues/223))
-
-#### Bug Fixes
-
-* Ensure that the pascal_case_name is evaluated at compile time ([7337535b](https://github.com/gluon-lang/lsp-types/commit/7337535b79000912f859319455cd371af77d1899))
-
-
-
-<a name="v0.91.1"></a>
-### v0.91.1 (2021-11-01)
-
-
-#### Features
-
-* Print the name of know enumerations ([2877b6a1](https://github.com/gluon-lang/lsp-types/commit/2877b6a131ef5c28310b972126f6d1b23cd97788))
-
-
-
-<a name="v0.91.0"></a>
-## v0.91.0 (2021-10-18)
-
-
-
-
-<a name="v0.90.1"></a>
-### v0.90.1 (2021-10-11)
-
-
-#### Bug Fixes
-
-* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
-
-
-
-<a name="v0.90.1"></a>
-### v0.90.1 (2021-10-11)
-
-
-#### Bug Fixes
-
-* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
-
-
-
-<a name="v0.90.1"></a>
-### v0.90.1 (2021-10-11)
-
-
-#### Bug Fixes
-
-* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
-
-
-
-<a name="v0.90.1"></a>
-### v0.90.1 (2021-10-11)
-
-
-#### Bug Fixes
-
-* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
-
-
-
-<a name="v0.90.0"></a>
-## v0.90.0 (2021-09-25)
-
-
-
-
-<a name="v0.61.1"></a>
-### v0.61.1 (2021-06-10)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
-<a name="v0.89.1"></a>
-### v0.89.1 (2021-05-22)
-
-
-
-
-<a name="v0.89.0"></a>
-## v0.89.0 (2021-04-07)
-
-
-
-
-<a name="v0.88.0"></a>
-## v0.88.0 (2021-02-16)
-
-
-
-
-<a name="v0.87.0"></a>
-## v0.87.0 (2021-02-14)
-
-
-
-
-<a name="v0.86.0"></a>
-## v0.86.0 (2020-12-17)
-
-
-
-
-<a name="v0.85.0"></a>
-## v0.85.0 (2020-12-01)
-
-
-
-
-<a name="v0.84.0"></a>
-## v0.84.0 (2020-11-24)
-
-
-
-
-<a name="v0.83.1"></a>
-### v0.83.1 (2020-11-16)
-
-
-
-
-<a name="v0.82.0"></a>
-## v0.82.0 (2020-09-23)
-
-
-
-
-<a name="v0.80.0"></a>
-## v0.80.0 (2020-09-18)
-
-
-
-
-<a name="v0.80.0"></a>
-## v0.80.0 (2020-09-02)
-
-
-
-
-<a name="v0.79.0"></a>
-## v0.79.0 (2020-07-26)
-
-
-
-
-<a name="v0.77.0"></a>
-## v0.77.0 (2020-07-15)
-
-
-
-
-<a name="v0.76.1"></a>
-### v0.76.1 (2020-07-14)
-
-
-
-
-<a name="v0.74.2"></a>
-### v0.74.2 (2020-05-30)
-
-
-
-
-<a name="v0.74.1"></a>
-### v0.74.1 (2020-05-01)
-
-
-
-
-<a name="v0.74.0"></a>
-## v0.74.0 (2020-04-26)
-
-
-
-
-<a name="v0.74.0"></a>
-## v0.74.0 (2020-04-26)
-
-
-
-
-<a name="v0.72.0"></a>
-## v0.72.0 (2020-03-02)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
-<a name="v0.72.0"></a>
-## v0.72.0 (2020-03-02)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
-<a name="v0.72.0"></a>
-## v0.72.0 (2020-03-02)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
-<a name="v0.72.0"></a>
-## v0.72.0 (2020-03-02)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
-<a name="v0.72.0"></a>
-## v0.72.0 (2020-03-02)
-
-
-#### Features
-
-* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
-
-
-
+<a name="v0.94.0"></a>
+## v0.94.0 (2023-02-08)
+
+
+
+
+<a name="v0.93.2"></a>
+### v0.93.2 (2022-10-21)
+
+
+
+
+<a name="v0.93.1"></a>
+### v0.93.1 (2022-08-23)
+
+
+
+
+<a name="v0.93.0"></a>
+## v0.93.0 (2022-04-08)
+
+
+#### Features
+
+* **helperTypes:** implement new JSON types ([bd9bb2e7](https://github.com/gluon-lang/lsp-types/commit/bd9bb2e74171b0194be51085c822c3ccbbb68b7b))
+
+
+
+<a name="v0.92.1"></a>
+### v0.92.1 (2022-03-21)
+
+
+#### Bug Fixes
+
+* **inlayHint:** typo in prop name ([7f7ada31](https://github.com/gluon-lang/lsp-types/commit/7f7ada310fa045ef628dc36f97b8564408fcec6a))
+
+#### Features
+
+* **inlayHint:**
+ * add request impls ([393aefa1](https://github.com/gluon-lang/lsp-types/commit/393aefa18b3c900c3f1ac8f6337b7098bdc4365a))
+ * add inlayHint provider ([69f21e71](https://github.com/gluon-lang/lsp-types/commit/69f21e714df305643aa727cbf745b439bbd8c112))
+ * implement inlayHint basic types ([29a50e71](https://github.com/gluon-lang/lsp-types/commit/29a50e71461f72d2fa2aea90fa03f61adf062074))
+
+
+
+<a name="v0.92.0"></a>
+## v0.92.0 (2022-02-07)
+
+
+
+
+<a name="v0.91.2"></a>
+### v0.91.2 (2022-02-07)
+
+
+#### Features
+
+* Add a TryFrom<&str> implementation for enumerations ([19b85cf4](https://github.com/gluon-lang/lsp-types/commit/19b85cf4a7b5396a89d364e271c3867eb6c840e4), closes [#223](https://github.com/gluon-lang/lsp-types/issues/223))
+
+#### Bug Fixes
+
+* Ensure that the pascal_case_name is evaluated at compile time ([7337535b](https://github.com/gluon-lang/lsp-types/commit/7337535b79000912f859319455cd371af77d1899))
+
+
+
+<a name="v0.91.1"></a>
+### v0.91.1 (2021-11-01)
+
+
+#### Features
+
+* Print the name of know enumerations ([2877b6a1](https://github.com/gluon-lang/lsp-types/commit/2877b6a131ef5c28310b972126f6d1b23cd97788))
+
+
+
+<a name="v0.91.0"></a>
+## v0.91.0 (2021-10-18)
+
+
+
+
+<a name="v0.90.1"></a>
+### v0.90.1 (2021-10-11)
+
+
+#### Bug Fixes
+
+* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
+
+
+
+<a name="v0.90.1"></a>
+### v0.90.1 (2021-10-11)
+
+
+#### Bug Fixes
+
+* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
+
+
+
+<a name="v0.90.1"></a>
+### v0.90.1 (2021-10-11)
+
+
+#### Bug Fixes
+
+* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
+
+
+
+<a name="v0.90.1"></a>
+### v0.90.1 (2021-10-11)
+
+
+#### Bug Fixes
+
+* add missing rename_all serde attribute ([b403cba5](https://github.com/gluon-lang/lsp-types/commit/b403cba5fb568f7f1bda3ea2929822269797f7f3))
+
+
+
+<a name="v0.90.0"></a>
+## v0.90.0 (2021-09-25)
+
+
+
+
+<a name="v0.61.1"></a>
+### v0.61.1 (2021-06-10)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
+<a name="v0.89.1"></a>
+### v0.89.1 (2021-05-22)
+
+
+
+
+<a name="v0.89.0"></a>
+## v0.89.0 (2021-04-07)
+
+
+
+
+<a name="v0.88.0"></a>
+## v0.88.0 (2021-02-16)
+
+
+
+
+<a name="v0.87.0"></a>
+## v0.87.0 (2021-02-14)
+
+
+
+
+<a name="v0.86.0"></a>
+## v0.86.0 (2020-12-17)
+
+
+
+
+<a name="v0.85.0"></a>
+## v0.85.0 (2020-12-01)
+
+
+
+
+<a name="v0.84.0"></a>
+## v0.84.0 (2020-11-24)
+
+
+
+
+<a name="v0.83.1"></a>
+### v0.83.1 (2020-11-16)
+
+
+
+
+<a name="v0.82.0"></a>
+## v0.82.0 (2020-09-23)
+
+
+
+
+<a name="v0.80.0"></a>
+## v0.80.0 (2020-09-18)
+
+
+
+
+<a name="v0.80.0"></a>
+## v0.80.0 (2020-09-02)
+
+
+
+
+<a name="v0.79.0"></a>
+## v0.79.0 (2020-07-26)
+
+
+
+
+<a name="v0.77.0"></a>
+## v0.77.0 (2020-07-15)
+
+
+
+
+<a name="v0.76.1"></a>
+### v0.76.1 (2020-07-14)
+
+
+
+
+<a name="v0.74.2"></a>
+### v0.74.2 (2020-05-30)
+
+
+
+
+<a name="v0.74.1"></a>
+### v0.74.1 (2020-05-01)
+
+
+
+
+<a name="v0.74.0"></a>
+## v0.74.0 (2020-04-26)
+
+
+
+
+<a name="v0.74.0"></a>
+## v0.74.0 (2020-04-26)
+
+
+
+
+<a name="v0.72.0"></a>
+## v0.72.0 (2020-03-02)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
+<a name="v0.72.0"></a>
+## v0.72.0 (2020-03-02)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
+<a name="v0.72.0"></a>
+## v0.72.0 (2020-03-02)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
+<a name="v0.72.0"></a>
+## v0.72.0 (2020-03-02)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
+<a name="v0.72.0"></a>
+## v0.72.0 (2020-03-02)
+
+
+#### Features
+
+* Add DeserializeOwned + Serialize bounds on request/notifications ([fb945a93](https://github.com/gluon-lang/lsp-types/commit/fb945a9347b353dd9bc5aab99a86731bebd94c15), closes [#140](https://github.com/gluon-lang/lsp-types/issues/140))
+
+
+
diff --git a/vendor/lsp-types/Cargo.toml b/vendor/lsp-types/Cargo.toml
index fa39192e8..46a883b36 100644
--- a/vendor/lsp-types/Cargo.toml
+++ b/vendor/lsp-types/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "lsp-types"
-version = "0.93.2"
+version = "0.94.0"
authors = [
"Markus Westerlind <marwes91@gmail.com>",
"Bruno Medeiros <bruno.do.medeiros@gmail.com>",
@@ -29,7 +29,6 @@ keywords = [
]
license = "MIT"
repository = "https://github.com/gluon-lang/lsp-types"
-resolver = "1"
[dependencies.bitflags]
version = "1.0.1"
diff --git a/vendor/lsp-types/LICENSE b/vendor/lsp-types/LICENSE
index 32781d976..968950bbb 100644
--- a/vendor/lsp-types/LICENSE
+++ b/vendor/lsp-types/LICENSE
@@ -1,22 +1,22 @@
-The MIT License (MIT)
-
-Copyright (c) 2016 Markus Westerlind
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
+The MIT License (MIT)
+
+Copyright (c) 2016 Markus Westerlind
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/vendor/lsp-types/README.md b/vendor/lsp-types/README.md
index d46701166..86203e609 100644
--- a/vendor/lsp-types/README.md
+++ b/vendor/lsp-types/README.md
@@ -1,14 +1,14 @@
-# lsp-types [![Build Status](https://travis-ci.org/gluon-lang/lsp-types.svg?branch=master)](https://travis-ci.org/gluon-lang/lsp-types) [![Documentation](https://docs.rs/lsp-types/badge.svg)](https://docs.rs/crate/lsp-types)
-
-Types useful for interacting with a [language server](https://code.visualstudio.com/blogs/2016/06/27/common-language-protocol).
-
-Supports Language Server Protocol (LSP) version 3.16.0.
-
-Proposed 3.17 features can be activated using the `proposed` feature flag.
-- **NOTE** that these are unstable and may change between releases.
-
-## Links
-
-[Stable Protocol reference](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md)
-
-[Proposed Protocol reference](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/lsp/3.17/specification.md)
+# lsp-types [![Build Status](https://travis-ci.org/gluon-lang/lsp-types.svg?branch=master)](https://travis-ci.org/gluon-lang/lsp-types) [![Documentation](https://docs.rs/lsp-types/badge.svg)](https://docs.rs/crate/lsp-types)
+
+Types useful for interacting with a [language server](https://code.visualstudio.com/blogs/2016/06/27/common-language-protocol).
+
+Supports Language Server Protocol (LSP) version 3.16.0.
+
+Proposed 3.17 features can be activated using the `proposed` feature flag.
+- **NOTE** that these are unstable and may change between releases.
+
+## Links
+
+[Stable Protocol reference](https://github.com/microsoft/language-server-protocol/tree/gh-pages/_specifications/lsp/3.17/specification.md)
+
+[Proposed Protocol reference](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/lsp/3.18/specification.md)
diff --git a/vendor/lsp-types/release.sh b/vendor/lsp-types/release.sh
index 68e484455..642c5ec0d 100755..100644
--- a/vendor/lsp-types/release.sh
+++ b/vendor/lsp-types/release.sh
@@ -1,15 +1,15 @@
-#!/bin/sh
-set -ex
-
-LEVEL=$1
-if [ -z "$LEVEL" ]; then
- echo "Expected patch, minor or major"
- exit 1
-fi
-
-clog --$LEVEL
-
-git add CHANGELOG.md
-git commit -m "Update changelog"
-
-cargo release $LEVEL --execute
+#!/bin/sh
+set -ex
+
+LEVEL=$1
+if [ -z "$LEVEL" ]; then
+ echo "Expected patch, minor or major"
+ exit 1
+fi
+
+clog --$LEVEL
+
+git add CHANGELOG.md
+git commit -m "Update changelog"
+
+cargo release $LEVEL --execute
diff --git a/vendor/lsp-types/release.toml b/vendor/lsp-types/release.toml
index b4a3d205e..a1d785e19 100644
--- a/vendor/lsp-types/release.toml
+++ b/vendor/lsp-types/release.toml
@@ -1,2 +1 @@
-dev-version = false
-tag-message = "Version {{version}}"
+tag-message = "Version {{version}}"
diff --git a/vendor/lsp-types/src/call_hierarchy.rs b/vendor/lsp-types/src/call_hierarchy.rs
index 21d3ec462..e59f0d00b 100644
--- a/vendor/lsp-types/src/call_hierarchy.rs
+++ b/vendor/lsp-types/src/call_hierarchy.rs
@@ -1,127 +1,127 @@
-use serde::{Deserialize, Serialize};
-use serde_json::Value;
-use url::Url;
-
-use crate::{
- DynamicRegistrationClientCapabilities, PartialResultParams, Range, SymbolKind, SymbolTag,
- TextDocumentPositionParams, WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-
-pub type CallHierarchyClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum CallHierarchyServerCapability {
- Simple(bool),
- Options(CallHierarchyOptions),
-}
-
-impl From<CallHierarchyOptions> for CallHierarchyServerCapability {
- fn from(from: CallHierarchyOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for CallHierarchyServerCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyPrepareParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyItem {
- /// The name of this item.
- pub name: String,
-
- /// The kind of this item.
- pub kind: SymbolKind,
-
- /// Tags for this item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tags: Option<Vec<SymbolTag>>,
-
- /// More detail for this item, e.g. the signature of a function.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub detail: Option<String>,
-
- /// The resource identifier of this item.
- pub uri: Url,
-
- /// The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code.
- pub range: Range,
-
- /// The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function.
- /// Must be contained by the [`range`](#CallHierarchyItem.range).
- pub selection_range: Range,
-
- /// A data entry field that is preserved between a call hierarchy prepare and incloming calls or outgoing calls requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<Value>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyIncomingCallsParams {
- pub item: CallHierarchyItem,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// Represents an incoming call, e.g. a caller of a method or constructor.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyIncomingCall {
- /// The item that makes the call.
- pub from: CallHierarchyItem,
-
- /// The range at which at which the calls appears. This is relative to the caller
- /// denoted by [`this.from`](#CallHierarchyIncomingCall.from).
- pub from_ranges: Vec<Range>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyOutgoingCallsParams {
- pub item: CallHierarchyItem,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct CallHierarchyOutgoingCall {
- /// The item that is called.
- pub to: CallHierarchyItem,
-
- /// The range at which this item is called. This is the range relative to the caller, e.g the item
- /// passed to [`provideCallHierarchyOutgoingCalls`](#CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls)
- /// and not [`this.to`](#CallHierarchyOutgoingCall.to).
- pub from_ranges: Vec<Range>,
-}
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+use url::Url;
+
+use crate::{
+ DynamicRegistrationClientCapabilities, PartialResultParams, Range, SymbolKind, SymbolTag,
+ TextDocumentPositionParams, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+
+pub type CallHierarchyClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum CallHierarchyServerCapability {
+ Simple(bool),
+ Options(CallHierarchyOptions),
+}
+
+impl From<CallHierarchyOptions> for CallHierarchyServerCapability {
+ fn from(from: CallHierarchyOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for CallHierarchyServerCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyPrepareParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyItem {
+ /// The name of this item.
+ pub name: String,
+
+ /// The kind of this item.
+ pub kind: SymbolKind,
+
+ /// Tags for this item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<SymbolTag>>,
+
+ /// More detail for this item, e.g. the signature of a function.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub detail: Option<String>,
+
+ /// The resource identifier of this item.
+ pub uri: Url,
+
+ /// The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code.
+ pub range: Range,
+
+ /// The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function.
+ /// Must be contained by the [`range`](#CallHierarchyItem.range).
+ pub selection_range: Range,
+
+ /// A data entry field that is preserved between a call hierarchy prepare and incloming calls or outgoing calls requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<Value>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyIncomingCallsParams {
+ pub item: CallHierarchyItem,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// Represents an incoming call, e.g. a caller of a method or constructor.
+#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyIncomingCall {
+ /// The item that makes the call.
+ pub from: CallHierarchyItem,
+
+ /// The range at which at which the calls appears. This is relative to the caller
+ /// denoted by [`this.from`](#CallHierarchyIncomingCall.from).
+ pub from_ranges: Vec<Range>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyOutgoingCallsParams {
+ pub item: CallHierarchyItem,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc.
+#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CallHierarchyOutgoingCall {
+ /// The item that is called.
+ pub to: CallHierarchyItem,
+
+ /// The range at which this item is called. This is the range relative to the caller, e.g the item
+ /// passed to [`provideCallHierarchyOutgoingCalls`](#CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls)
+ /// and not [`this.to`](#CallHierarchyOutgoingCall.to).
+ pub from_ranges: Vec<Range>,
+}
diff --git a/vendor/lsp-types/src/code_action.rs b/vendor/lsp-types/src/code_action.rs
index 8859743d2..859fc1c02 100644
--- a/vendor/lsp-types/src/code_action.rs
+++ b/vendor/lsp-types/src/code_action.rs
@@ -1,360 +1,395 @@
-use crate::{
- Command, Diagnostic, PartialResultParams, Range, TextDocumentIdentifier,
- WorkDoneProgressOptions, WorkDoneProgressParams, WorkspaceEdit,
-};
-use serde::{Deserialize, Serialize};
-
-use serde_json::Value;
-
-use std::borrow::Cow;
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum CodeActionProviderCapability {
- Simple(bool),
- Options(CodeActionOptions),
-}
-
-impl From<CodeActionOptions> for CodeActionProviderCapability {
- fn from(from: CodeActionOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for CodeActionProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionClientCapabilities {
- ///
- /// This capability supports dynamic registration.
- ///
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client support code action literals as a valid
- /// response of the `textDocument/codeAction` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_action_literal_support: Option<CodeActionLiteralSupport>,
-
- /// Whether code action supports the `isPreferred` property.
- ///
- /// since 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub is_preferred_support: Option<bool>,
-
- /// Whether code action supports the `disabled` property.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub disabled_support: Option<bool>,
-
- /// Whether code action supports the `data` property which is
- /// preserved between a `textDocument/codeAction` and a
- /// `codeAction/resolve` request.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data_support: Option<bool>,
-
- /// Whether the client supports resolving additional code action
- /// properties via a separate `codeAction/resolve` request.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_support: Option<CodeActionCapabilityResolveSupport>,
-
- /// Whether the client honors the change annotations in
- /// text edits and resource operations returned via the
- /// `CodeAction#edit` property by for example presenting
- /// the workspace edit in the user interface and asking
- /// for confirmation.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub honors_change_annotations: Option<bool>,
-}
-
-/// Whether the client supports resolving additional code action
-/// properties via a separate `codeAction/resolve` request.
-///
-/// since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionCapabilityResolveSupport {
- /// The properties that a client can resolve lazily.
- pub properties: Vec<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionLiteralSupport {
- /// The code action kind is support with the following value set.
- pub code_action_kind: CodeActionKindLiteralSupport,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionKindLiteralSupport {
- /// The code action kind values the client supports. When this
- /// property exists the client also guarantees that it will
- /// handle values outside its set gracefully and falls back
- /// to a default value when unknown.
- pub value_set: Vec<String>,
-}
-
-/// Params for the CodeActionRequest
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionParams {
- /// The document in which the command was invoked.
- pub text_document: TextDocumentIdentifier,
-
- /// The range for which the command was invoked.
- pub range: Range,
-
- /// Context carrying additional information.
- pub context: CodeActionContext,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// response for CodeActionRequest
-pub type CodeActionResponse = Vec<CodeActionOrCommand>;
-
-#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum CodeActionOrCommand {
- Command(Command),
- CodeAction(CodeAction),
-}
-
-impl From<Command> for CodeActionOrCommand {
- fn from(comand: Command) -> Self {
- CodeActionOrCommand::Command(comand)
- }
-}
-
-impl From<CodeAction> for CodeActionOrCommand {
- fn from(action: CodeAction) -> Self {
- CodeActionOrCommand::CodeAction(action)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
-pub struct CodeActionKind(Cow<'static, str>);
-
-impl CodeActionKind {
- /// Empty kind.
- pub const EMPTY: CodeActionKind = CodeActionKind::new("");
-
- /// Base kind for quickfix actions: 'quickfix'
- pub const QUICKFIX: CodeActionKind = CodeActionKind::new("quickfix");
-
- /// Base kind for refactoring actions: 'refactor'
- pub const REFACTOR: CodeActionKind = CodeActionKind::new("refactor");
-
- /// Base kind for refactoring extraction actions: 'refactor.extract'
- ///
- /// Example extract actions:
- ///
- /// - Extract method
- /// - Extract function
- /// - Extract variable
- /// - Extract interface from class
- /// - ...
- pub const REFACTOR_EXTRACT: CodeActionKind = CodeActionKind::new("refactor.extract");
-
- /// Base kind for refactoring inline actions: 'refactor.inline'
- ///
- /// Example inline actions:
- ///
- /// - Inline function
- /// - Inline variable
- /// - Inline constant
- /// - ...
- pub const REFACTOR_INLINE: CodeActionKind = CodeActionKind::new("refactor.inline");
-
- /// Base kind for refactoring rewrite actions: 'refactor.rewrite'
- ///
- /// Example rewrite actions:
- ///
- /// - Convert JavaScript function to class
- /// - Add or remove parameter
- /// - Encapsulate field
- /// - Make method static
- /// - Move method to base class
- /// - ...
- pub const REFACTOR_REWRITE: CodeActionKind = CodeActionKind::new("refactor.rewrite");
-
- /// Base kind for source actions: `source`
- ///
- /// Source code actions apply to the entire file.
- pub const SOURCE: CodeActionKind = CodeActionKind::new("source");
-
- /// Base kind for an organize imports source action: `source.organizeImports`
- pub const SOURCE_ORGANIZE_IMPORTS: CodeActionKind =
- CodeActionKind::new("source.organizeImports");
-
- pub const fn new(tag: &'static str) -> Self {
- CodeActionKind(Cow::Borrowed(tag))
- }
-
- pub fn as_str(&self) -> &str {
- &self.0
- }
-}
-
-impl From<String> for CodeActionKind {
- fn from(from: String) -> Self {
- CodeActionKind(Cow::from(from))
- }
-}
-
-impl From<&'static str> for CodeActionKind {
- fn from(from: &'static str) -> Self {
- CodeActionKind::new(from)
- }
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeAction {
- /// A short, human-readable, title for this code action.
- pub title: String,
-
- /// The kind of the code action.
- /// Used to filter code actions.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<CodeActionKind>,
-
- /// The diagnostics that this code action resolves.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub diagnostics: Option<Vec<Diagnostic>>,
-
- /// The workspace edit this code action performs.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub edit: Option<WorkspaceEdit>,
-
- /// A command this code action executes. If a code action
- /// provides an edit and a command, first the edit is
- /// executed and then the command.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub command: Option<Command>,
-
- /// Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted
- /// by keybindings.
- /// A quick fix should be marked preferred if it properly addresses the underlying error.
- /// A refactoring should be marked preferred if it is the most reasonable choice of actions to take.
- ///
- /// since 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub is_preferred: Option<bool>,
-
- /// Marks that the code action cannot currently be applied.
- ///
- /// Clients should follow the following guidelines regarding disabled code actions:
- ///
- /// - Disabled code actions are not shown in automatic
- /// [lightbulb](https://code.visualstudio.com/docs/editor/editingevolved#_code-action)
- /// code action menu.
- ///
- /// - Disabled actions are shown as faded out in the code action menu when the user request
- /// a more specific type of code action, such as refactorings.
- ///
- /// - If the user has a keybinding that auto applies a code action and only a disabled code
- /// actions are returned, the client should show the user an error message with `reason`
- /// in the editor.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub disabled: Option<CodeActionDisabled>,
-
- /// A data entry field that is preserved on a code action between
- /// a `textDocument/codeAction` and a `codeAction/resolve` request.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<Value>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionDisabled {
- /// Human readable description of why the code action is currently disabled.
- ///
- /// This is displayed in the code actions UI.
- pub reason: String,
-}
-
-/// Contains additional diagnostic information about the context in which
-/// a code action is run.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct CodeActionContext {
- /// An array of diagnostics.
- pub diagnostics: Vec<Diagnostic>,
-
- /// Requested kind of actions to return.
- ///
- /// Actions not of this kind are filtered out by the client before being shown. So servers
- /// can omit computing them.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub only: Option<Vec<CodeActionKind>>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeActionOptions {
- /// CodeActionKinds that this server may return.
- ///
- /// The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server
- /// may list out every specific kind they provide.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_action_kinds: Option<Vec<CodeActionKind>>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-
- /// The server provides support to resolve additional
- /// information for a code action.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_provider: Option<bool>,
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::tests::test_serialization;
-
- #[test]
- fn test_code_action_response() {
- test_serialization(
- &vec![
- CodeActionOrCommand::Command(Command {
- title: "title".to_string(),
- command: "command".to_string(),
- arguments: None,
- }),
- CodeActionOrCommand::CodeAction(CodeAction {
- title: "title".to_string(),
- kind: Some(CodeActionKind::QUICKFIX),
- command: None,
- diagnostics: None,
- edit: None,
- is_preferred: None,
- ..CodeAction::default()
- }),
- ],
- r#"[{"title":"title","command":"command"},{"title":"title","kind":"quickfix"}]"#,
- )
- }
-}
+use crate::{
+ Command, Diagnostic, PartialResultParams, Range, TextDocumentIdentifier,
+ WorkDoneProgressOptions, WorkDoneProgressParams, WorkspaceEdit,
+};
+use serde::{Deserialize, Serialize};
+
+use serde_json::Value;
+
+use std::borrow::Cow;
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum CodeActionProviderCapability {
+ Simple(bool),
+ Options(CodeActionOptions),
+}
+
+impl From<CodeActionOptions> for CodeActionProviderCapability {
+ fn from(from: CodeActionOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for CodeActionProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionClientCapabilities {
+ ///
+ /// This capability supports dynamic registration.
+ ///
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client support code action literals as a valid
+ /// response of the `textDocument/codeAction` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_action_literal_support: Option<CodeActionLiteralSupport>,
+
+ /// Whether code action supports the `isPreferred` property.
+ ///
+ /// since 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub is_preferred_support: Option<bool>,
+
+ /// Whether code action supports the `disabled` property.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub disabled_support: Option<bool>,
+
+ /// Whether code action supports the `data` property which is
+ /// preserved between a `textDocument/codeAction` and a
+ /// `codeAction/resolve` request.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data_support: Option<bool>,
+
+ /// Whether the client supports resolving additional code action
+ /// properties via a separate `codeAction/resolve` request.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_support: Option<CodeActionCapabilityResolveSupport>,
+
+ /// Whether the client honors the change annotations in
+ /// text edits and resource operations returned via the
+ /// `CodeAction#edit` property by for example presenting
+ /// the workspace edit in the user interface and asking
+ /// for confirmation.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub honors_change_annotations: Option<bool>,
+}
+
+/// Whether the client supports resolving additional code action
+/// properties via a separate `codeAction/resolve` request.
+///
+/// since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionCapabilityResolveSupport {
+ /// The properties that a client can resolve lazily.
+ pub properties: Vec<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionLiteralSupport {
+ /// The code action kind is support with the following value set.
+ pub code_action_kind: CodeActionKindLiteralSupport,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionKindLiteralSupport {
+ /// The code action kind values the client supports. When this
+ /// property exists the client also guarantees that it will
+ /// handle values outside its set gracefully and falls back
+ /// to a default value when unknown.
+ pub value_set: Vec<String>,
+}
+
+/// Params for the CodeActionRequest
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionParams {
+ /// The document in which the command was invoked.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The range for which the command was invoked.
+ pub range: Range,
+
+ /// Context carrying additional information.
+ pub context: CodeActionContext,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// response for CodeActionRequest
+pub type CodeActionResponse = Vec<CodeActionOrCommand>;
+
+#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum CodeActionOrCommand {
+ Command(Command),
+ CodeAction(CodeAction),
+}
+
+impl From<Command> for CodeActionOrCommand {
+ fn from(comand: Command) -> Self {
+ CodeActionOrCommand::Command(comand)
+ }
+}
+
+impl From<CodeAction> for CodeActionOrCommand {
+ fn from(action: CodeAction) -> Self {
+ CodeActionOrCommand::CodeAction(action)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
+pub struct CodeActionKind(Cow<'static, str>);
+
+impl CodeActionKind {
+ /// Empty kind.
+ pub const EMPTY: CodeActionKind = CodeActionKind::new("");
+
+ /// Base kind for quickfix actions: 'quickfix'
+ pub const QUICKFIX: CodeActionKind = CodeActionKind::new("quickfix");
+
+ /// Base kind for refactoring actions: 'refactor'
+ pub const REFACTOR: CodeActionKind = CodeActionKind::new("refactor");
+
+ /// Base kind for refactoring extraction actions: 'refactor.extract'
+ ///
+ /// Example extract actions:
+ ///
+ /// - Extract method
+ /// - Extract function
+ /// - Extract variable
+ /// - Extract interface from class
+ /// - ...
+ pub const REFACTOR_EXTRACT: CodeActionKind = CodeActionKind::new("refactor.extract");
+
+ /// Base kind for refactoring inline actions: 'refactor.inline'
+ ///
+ /// Example inline actions:
+ ///
+ /// - Inline function
+ /// - Inline variable
+ /// - Inline constant
+ /// - ...
+ pub const REFACTOR_INLINE: CodeActionKind = CodeActionKind::new("refactor.inline");
+
+ /// Base kind for refactoring rewrite actions: 'refactor.rewrite'
+ ///
+ /// Example rewrite actions:
+ ///
+ /// - Convert JavaScript function to class
+ /// - Add or remove parameter
+ /// - Encapsulate field
+ /// - Make method static
+ /// - Move method to base class
+ /// - ...
+ pub const REFACTOR_REWRITE: CodeActionKind = CodeActionKind::new("refactor.rewrite");
+
+ /// Base kind for source actions: `source`
+ ///
+ /// Source code actions apply to the entire file.
+ pub const SOURCE: CodeActionKind = CodeActionKind::new("source");
+
+ /// Base kind for an organize imports source action: `source.organizeImports`
+ pub const SOURCE_ORGANIZE_IMPORTS: CodeActionKind =
+ CodeActionKind::new("source.organizeImports");
+
+ /// Base kind for a 'fix all' source action: `source.fixAll`.
+ ///
+ /// 'Fix all' actions automatically fix errors that have a clear fix that
+ /// do not require user input. They should not suppress errors or perform
+ /// unsafe fixes such as generating new types or classes.
+ ///
+ /// @since 3.17.0
+ pub const SOURCE_FIX_ALL: CodeActionKind = CodeActionKind::new("source.fixAll");
+
+ pub const fn new(tag: &'static str) -> Self {
+ CodeActionKind(Cow::Borrowed(tag))
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for CodeActionKind {
+ fn from(from: String) -> Self {
+ CodeActionKind(Cow::from(from))
+ }
+}
+
+impl From<&'static str> for CodeActionKind {
+ fn from(from: &'static str) -> Self {
+ CodeActionKind::new(from)
+ }
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeAction {
+ /// A short, human-readable, title for this code action.
+ pub title: String,
+
+ /// The kind of the code action.
+ /// Used to filter code actions.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<CodeActionKind>,
+
+ /// The diagnostics that this code action resolves.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub diagnostics: Option<Vec<Diagnostic>>,
+
+ /// The workspace edit this code action performs.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub edit: Option<WorkspaceEdit>,
+
+ /// A command this code action executes. If a code action
+ /// provides an edit and a command, first the edit is
+ /// executed and then the command.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<Command>,
+
+ /// Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted
+ /// by keybindings.
+ /// A quick fix should be marked preferred if it properly addresses the underlying error.
+ /// A refactoring should be marked preferred if it is the most reasonable choice of actions to take.
+ ///
+ /// since 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub is_preferred: Option<bool>,
+
+ /// Marks that the code action cannot currently be applied.
+ ///
+ /// Clients should follow the following guidelines regarding disabled code actions:
+ ///
+ /// - Disabled code actions are not shown in automatic
+ /// [lightbulb](https://code.visualstudio.com/docs/editor/editingevolved#_code-action)
+ /// code action menu.
+ ///
+ /// - Disabled actions are shown as faded out in the code action menu when the user request
+ /// a more specific type of code action, such as refactorings.
+ ///
+ /// - If the user has a keybinding that auto applies a code action and only a disabled code
+ /// actions are returned, the client should show the user an error message with `reason`
+ /// in the editor.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub disabled: Option<CodeActionDisabled>,
+
+ /// A data entry field that is preserved on a code action between
+ /// a `textDocument/codeAction` and a `codeAction/resolve` request.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<Value>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionDisabled {
+ /// Human readable description of why the code action is currently disabled.
+ ///
+ /// This is displayed in the code actions UI.
+ pub reason: String,
+}
+
+/// The reason why code actions were requested.
+///
+/// @since 3.17.0
+#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct CodeActionTriggerKind(i32);
+lsp_enum! {
+impl CodeActionTriggerKind {
+ /// Code actions were explicitly requested by the user or by an extension.
+ pub const INVOKED: CodeActionTriggerKind = CodeActionTriggerKind(1);
+
+ /// Code actions were requested automatically.
+ ///
+ /// This typically happens when current selection in a file changes, but can
+ /// also be triggered when file content changes.
+ pub const AUTOMATIC: CodeActionTriggerKind = CodeActionTriggerKind(2);
+}
+}
+
+/// Contains additional diagnostic information about the context in which
+/// a code action is run.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionContext {
+ /// An array of diagnostics.
+ pub diagnostics: Vec<Diagnostic>,
+
+ /// Requested kind of actions to return.
+ ///
+ /// Actions not of this kind are filtered out by the client before being shown. So servers
+ /// can omit computing them.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub only: Option<Vec<CodeActionKind>>,
+
+ /// The reason why code actions were requested.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trigger_kind: Option<CodeActionTriggerKind>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionOptions {
+ /// CodeActionKinds that this server may return.
+ ///
+ /// The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server
+ /// may list out every specific kind they provide.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_action_kinds: Option<Vec<CodeActionKind>>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+
+ /// The server provides support to resolve additional
+ /// information for a code action.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::test_serialization;
+
+ #[test]
+ fn test_code_action_response() {
+ test_serialization(
+ &vec![
+ CodeActionOrCommand::Command(Command {
+ title: "title".to_string(),
+ command: "command".to_string(),
+ arguments: None,
+ }),
+ CodeActionOrCommand::CodeAction(CodeAction {
+ title: "title".to_string(),
+ kind: Some(CodeActionKind::QUICKFIX),
+ command: None,
+ diagnostics: None,
+ edit: None,
+ is_preferred: None,
+ ..CodeAction::default()
+ }),
+ ],
+ r#"[{"title":"title","command":"command"},{"title":"title","kind":"quickfix"}]"#,
+ )
+ }
+}
diff --git a/vendor/lsp-types/src/code_lens.rs b/vendor/lsp-types/src/code_lens.rs
index 2e4fa790f..e4f35c722 100644
--- a/vendor/lsp-types/src/code_lens.rs
+++ b/vendor/lsp-types/src/code_lens.rs
@@ -1,66 +1,66 @@
-use serde::{Deserialize, Serialize};
-use serde_json::Value;
-
-use crate::{
- Command, DynamicRegistrationClientCapabilities, PartialResultParams, Range,
- TextDocumentIdentifier, WorkDoneProgressParams,
-};
-
-pub type CodeLensClientCapabilities = DynamicRegistrationClientCapabilities;
-
-/// Code Lens options.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeLensOptions {
- /// Code lens has a resolve provider as well.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_provider: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeLensParams {
- /// The document to request code lens for.
- pub text_document: TextDocumentIdentifier,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// A code lens represents a command that should be shown along with
-/// source text, like the number of references, a way to run tests, etc.
-///
-/// A code lens is _unresolved_ when no command is associated to it. For performance
-/// reasons the creation of a code lens and resolving should be done in two stages.
-#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeLens {
- /// The range in which this code lens is valid. Should only span a single line.
- pub range: Range,
-
- /// The command this code lens represents.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub command: Option<Command>,
-
- /// A data entry field that is preserved on a code lens item between
- /// a code lens and a code lens resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<Value>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeLensWorkspaceClientCapabilities {
- /// Whether the client implementation supports a refresh request sent from the
- /// server to the client.
- ///
- /// Note that this event is global and will force the client to refresh all
- /// code lenses currently shown. It should be used with absolute care and is
- /// useful for situation where a server for example detect a project wide
- /// change that requires such a calculation.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub refresh_support: Option<bool>,
-}
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+use crate::{
+ Command, DynamicRegistrationClientCapabilities, PartialResultParams, Range,
+ TextDocumentIdentifier, WorkDoneProgressParams,
+};
+
+pub type CodeLensClientCapabilities = DynamicRegistrationClientCapabilities;
+
+/// Code Lens options.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeLensOptions {
+ /// Code lens has a resolve provider as well.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeLensParams {
+ /// The document to request code lens for.
+ pub text_document: TextDocumentIdentifier,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// A code lens represents a command that should be shown along with
+/// source text, like the number of references, a way to run tests, etc.
+///
+/// A code lens is _unresolved_ when no command is associated to it. For performance
+/// reasons the creation of a code lens and resolving should be done in two stages.
+#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeLens {
+ /// The range in which this code lens is valid. Should only span a single line.
+ pub range: Range,
+
+ /// The command this code lens represents.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<Command>,
+
+ /// A data entry field that is preserved on a code lens item between
+ /// a code lens and a code lens resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<Value>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeLensWorkspaceClientCapabilities {
+ /// Whether the client implementation supports a refresh request sent from the
+ /// server to the client.
+ ///
+ /// Note that this event is global and will force the client to refresh all
+ /// code lenses currently shown. It should be used with absolute care and is
+ /// useful for situation where a server for example detect a project wide
+ /// change that requires such a calculation.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub refresh_support: Option<bool>,
+}
diff --git a/vendor/lsp-types/src/color.rs b/vendor/lsp-types/src/color.rs
index ef9e858af..a3999683a 100644
--- a/vendor/lsp-types/src/color.rs
+++ b/vendor/lsp-types/src/color.rs
@@ -1,122 +1,122 @@
-use crate::{
- DocumentSelector, DynamicRegistrationClientCapabilities, PartialResultParams, Range,
- TextDocumentIdentifier, TextEdit, WorkDoneProgressParams,
-};
-use serde::{Deserialize, Serialize};
-
-pub type DocumentColorClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ColorProviderOptions {}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct StaticTextDocumentColorProviderOptions {
- /// A document selector to identify the scope of the registration. If set to null
- /// the document selector provided on the client side will be used.
- pub document_selector: Option<DocumentSelector>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- pub id: Option<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum ColorProviderCapability {
- Simple(bool),
- ColorProvider(ColorProviderOptions),
- Options(StaticTextDocumentColorProviderOptions),
-}
-
-impl From<ColorProviderOptions> for ColorProviderCapability {
- fn from(from: ColorProviderOptions) -> Self {
- Self::ColorProvider(from)
- }
-}
-
-impl From<StaticTextDocumentColorProviderOptions> for ColorProviderCapability {
- fn from(from: StaticTextDocumentColorProviderOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for ColorProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentColorParams {
- /// The text document
- pub text_document: TextDocumentIdentifier,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ColorInformation {
- /// The range in the document where this color appears.
- pub range: Range,
- /// The actual color value for this color range.
- pub color: Color,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Color {
- /// The red component of this color in the range [0-1].
- pub red: f32,
- /// The green component of this color in the range [0-1].
- pub green: f32,
- /// The blue component of this color in the range [0-1].
- pub blue: f32,
- /// The alpha component of this color in the range [0-1].
- pub alpha: f32,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ColorPresentationParams {
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The color information to request presentations for.
- pub color: Color,
-
- /// The range where the color would be inserted. Serves as a context.
- pub range: Range,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Default, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct ColorPresentation {
- /// The label of this color presentation. It will be shown on the color
- /// picker header. By default this is also the text that is inserted when selecting
- /// this color presentation.
- pub label: String,
-
- /// An [edit](#TextEdit) which is applied to a document when selecting
- /// this presentation for the color. When `falsy` the [label](#ColorPresentation.label)
- /// is used.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text_edit: Option<TextEdit>,
-
- /// An optional array of additional [text edits](#TextEdit) that are applied when
- /// selecting this color presentation. Edits must not overlap with the main [edit](#ColorPresentation.textEdit) nor with themselves.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub additional_text_edits: Option<Vec<TextEdit>>,
-}
+use crate::{
+ DocumentSelector, DynamicRegistrationClientCapabilities, PartialResultParams, Range,
+ TextDocumentIdentifier, TextEdit, WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+pub type DocumentColorClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ColorProviderOptions {}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StaticTextDocumentColorProviderOptions {
+ /// A document selector to identify the scope of the registration. If set to null
+ /// the document selector provided on the client side will be used.
+ pub document_selector: Option<DocumentSelector>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub id: Option<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum ColorProviderCapability {
+ Simple(bool),
+ ColorProvider(ColorProviderOptions),
+ Options(StaticTextDocumentColorProviderOptions),
+}
+
+impl From<ColorProviderOptions> for ColorProviderCapability {
+ fn from(from: ColorProviderOptions) -> Self {
+ Self::ColorProvider(from)
+ }
+}
+
+impl From<StaticTextDocumentColorProviderOptions> for ColorProviderCapability {
+ fn from(from: StaticTextDocumentColorProviderOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for ColorProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentColorParams {
+ /// The text document
+ pub text_document: TextDocumentIdentifier,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ColorInformation {
+ /// The range in the document where this color appears.
+ pub range: Range,
+ /// The actual color value for this color range.
+ pub color: Color,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Color {
+ /// The red component of this color in the range [0-1].
+ pub red: f32,
+ /// The green component of this color in the range [0-1].
+ pub green: f32,
+ /// The blue component of this color in the range [0-1].
+ pub blue: f32,
+ /// The alpha component of this color in the range [0-1].
+ pub alpha: f32,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ColorPresentationParams {
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The color information to request presentations for.
+ pub color: Color,
+
+ /// The range where the color would be inserted. Serves as a context.
+ pub range: Range,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Default, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ColorPresentation {
+ /// The label of this color presentation. It will be shown on the color
+ /// picker header. By default this is also the text that is inserted when selecting
+ /// this color presentation.
+ pub label: String,
+
+ /// An [edit](#TextEdit) which is applied to a document when selecting
+ /// this presentation for the color. When `falsy` the [label](#ColorPresentation.label)
+ /// is used.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text_edit: Option<TextEdit>,
+
+ /// An optional array of additional [text edits](#TextEdit) that are applied when
+ /// selecting this color presentation. Edits must not overlap with the main [edit](#ColorPresentation.textEdit) nor with themselves.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub additional_text_edits: Option<Vec<TextEdit>>,
+}
diff --git a/vendor/lsp-types/src/completion.rs b/vendor/lsp-types/src/completion.rs
index 5f9277a35..4884960a0 100644
--- a/vendor/lsp-types/src/completion.rs
+++ b/vendor/lsp-types/src/completion.rs
@@ -1,600 +1,616 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- Command, Documentation, MarkupKind, PartialResultParams, TagSupport,
- TextDocumentPositionParams, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions,
- WorkDoneProgressParams,
-};
-
-use crate::Range;
-use serde_json::Value;
-use std::fmt::Debug;
-
-/// Defines how to interpret the insert text in a completion item
-#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct InsertTextFormat(i32);
-lsp_enum! {
-impl InsertTextFormat {
- pub const PLAIN_TEXT: InsertTextFormat = InsertTextFormat(1);
- pub const SNIPPET: InsertTextFormat = InsertTextFormat(2);
-}
-}
-
-/// The kind of a completion entry.
-#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct CompletionItemKind(i32);
-lsp_enum! {
-impl CompletionItemKind {
- pub const TEXT: CompletionItemKind = CompletionItemKind(1);
- pub const METHOD: CompletionItemKind = CompletionItemKind(2);
- pub const FUNCTION: CompletionItemKind = CompletionItemKind(3);
- pub const CONSTRUCTOR: CompletionItemKind = CompletionItemKind(4);
- pub const FIELD: CompletionItemKind = CompletionItemKind(5);
- pub const VARIABLE: CompletionItemKind = CompletionItemKind(6);
- pub const CLASS: CompletionItemKind = CompletionItemKind(7);
- pub const INTERFACE: CompletionItemKind = CompletionItemKind(8);
- pub const MODULE: CompletionItemKind = CompletionItemKind(9);
- pub const PROPERTY: CompletionItemKind = CompletionItemKind(10);
- pub const UNIT: CompletionItemKind = CompletionItemKind(11);
- pub const VALUE: CompletionItemKind = CompletionItemKind(12);
- pub const ENUM: CompletionItemKind = CompletionItemKind(13);
- pub const KEYWORD: CompletionItemKind = CompletionItemKind(14);
- pub const SNIPPET: CompletionItemKind = CompletionItemKind(15);
- pub const COLOR: CompletionItemKind = CompletionItemKind(16);
- pub const FILE: CompletionItemKind = CompletionItemKind(17);
- pub const REFERENCE: CompletionItemKind = CompletionItemKind(18);
- pub const FOLDER: CompletionItemKind = CompletionItemKind(19);
- pub const ENUM_MEMBER: CompletionItemKind = CompletionItemKind(20);
- pub const CONSTANT: CompletionItemKind = CompletionItemKind(21);
- pub const STRUCT: CompletionItemKind = CompletionItemKind(22);
- pub const EVENT: CompletionItemKind = CompletionItemKind(23);
- pub const OPERATOR: CompletionItemKind = CompletionItemKind(24);
- pub const TYPE_PARAMETER: CompletionItemKind = CompletionItemKind(25);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionItemCapability {
- /// Client supports snippets as insert text.
- ///
- /// A snippet can define tab stops and placeholders with `$1`, `$2`
- /// and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- /// the end of the snippet. Placeholders with equal identifiers are linked,
- /// that is typing in one will update others too.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub snippet_support: Option<bool>,
-
- /// Client supports commit characters on a completion item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub commit_characters_support: Option<bool>,
-
- /// Client supports the follow content formats for the documentation
- /// property. The order describes the preferred format of the client.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub documentation_format: Option<Vec<MarkupKind>>,
-
- /// Client supports the deprecated property on a completion item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub deprecated_support: Option<bool>,
-
- /// Client supports the preselect property on a completion item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub preselect_support: Option<bool>,
-
- /// Client supports the tag property on a completion item. Clients supporting
- /// tags have to handle unknown tags gracefully. Clients especially need to
- /// preserve unknown tags when sending a completion item back to the server in
- /// a resolve call.
- #[serde(
- default,
- skip_serializing_if = "Option::is_none",
- deserialize_with = "TagSupport::deserialize_compat"
- )]
- pub tag_support: Option<TagSupport<CompletionItemTag>>,
-
- /// Client support insert replace edit to control different behavior if a
- /// completion item is inserted in the text or should replace text.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_replace_support: Option<bool>,
-
- /// Indicates which properties a client can resolve lazily on a completion
- /// item. Before version 3.16.0 only the predefined properties `documentation`
- /// and `details` could be resolved lazily.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_support: Option<CompletionItemCapabilityResolveSupport>,
-
- /// The client supports the `insertTextMode` property on
- /// a completion item to override the whitespace handling mode
- /// as defined by the client.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_text_mode_support: Option<InsertTextModeSupport>,
-
- /// The client has support for completion item label
- /// details (see also `CompletionItemLabelDetails`).
- ///
- /// @since 3.17.0 - proposed state
- ///
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label_details_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionItemCapabilityResolveSupport {
- /// The properties that a client can resolve lazily.
- pub properties: Vec<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InsertTextModeSupport {
- pub value_set: Vec<InsertTextMode>,
-}
-
-/// How whitespace and indentation is handled during completion
-/// item insertion.
-///
-/// @since 3.16.0
-#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct InsertTextMode(i32);
-lsp_enum! {
-impl InsertTextMode {
- /// The insertion or replace strings is taken as it is. If the
- /// value is multi line the lines below the cursor will be
- /// inserted using the indentation defined in the string value.
- /// The client will not apply any kind of adjustments to the
- /// string.
- pub const AS_IS: InsertTextMode = InsertTextMode(1);
-
- /// The editor adjusts leading whitespace of new lines so that
- /// they match the indentation up to the cursor of the line for
- /// which the item is accepted.
- ///
- /// Consider a line like this: <2tabs><cursor><3tabs>foo. Accepting a
- /// multi line completion item is indented using 2 tabs all
- /// following lines inserted will be indented using 2 tabs as well.
- pub const ADJUST_INDENTATION: InsertTextMode = InsertTextMode(2);
-}
-}
-
-#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct CompletionItemTag(i32);
-lsp_enum! {
-impl CompletionItemTag {
- pub const DEPRECATED: CompletionItemTag = CompletionItemTag(1);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionItemKindCapability {
- /// The completion item kind values the client supports. When this
- /// property exists the client also guarantees that it will
- /// handle values outside its set gracefully and falls back
- /// to a default value when unknown.
- ///
- /// If this property is not present the client only supports
- /// the completion items kinds from `Text` to `Reference` as defined in
- /// the initial version of the protocol.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub value_set: Option<Vec<CompletionItemKind>>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionClientCapabilities {
- /// Whether completion supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client supports the following `CompletionItem` specific
- /// capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub completion_item: Option<CompletionItemCapability>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- pub completion_item_kind: Option<CompletionItemKindCapability>,
-
- /// The client supports to send additional context information for a
- /// `textDocument/completion` requestion.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub context_support: Option<bool>,
-
- /// The client's default when the completion item doesn't provide a
- /// `insertTextMode` property.
- ///
- /// @since 3.17.0
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_text_mode: Option<InsertTextMode>,
-}
-
-/// A special text edit to provide an insert and a replace operation.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InsertReplaceEdit {
- /// The string to be inserted.
- pub new_text: String,
-
- /// The range if the insert is requested
- pub insert: Range,
-
- /// The range if the replace is requested.
- pub replace: Range,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum CompletionTextEdit {
- Edit(TextEdit),
- InsertAndReplace(InsertReplaceEdit),
-}
-
-impl From<TextEdit> for CompletionTextEdit {
- fn from(edit: TextEdit) -> Self {
- CompletionTextEdit::Edit(edit)
- }
-}
-
-impl From<InsertReplaceEdit> for CompletionTextEdit {
- fn from(edit: InsertReplaceEdit) -> Self {
- CompletionTextEdit::InsertAndReplace(edit)
- }
-}
-
-/// Completion options.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionOptions {
- /// The server provides support to resolve additional information for a completion item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_provider: Option<bool>,
-
- /// Most tools trigger completion request automatically without explicitly
- /// requesting it using a keyboard shortcut (e.g. Ctrl+Space). Typically they
- /// do so when the user starts to type an identifier. For example if the user
- /// types `c` in a JavaScript file code complete will automatically pop up
- /// present `console` besides others as a completion item. Characters that
- /// make up identifiers don't need to be listed here.
- ///
- /// If code complete should automatically be trigger on characters not being
- /// valid inside an identifier (for example `.` in JavaScript) list them in
- /// `triggerCharacters`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trigger_characters: Option<Vec<String>>,
-
- /// The list of all possible characters that commit a completion. This field
- /// can be used if clients don't support individual commit characters per
- /// completion item. See client capability
- /// `completion.completionItem.commitCharactersSupport`.
- ///
- /// If a server provides both `allCommitCharacters` and commit characters on
- /// an individual completion item the ones on the completion item win.
- ///
- /// @since 3.2.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub all_commit_characters: Option<Vec<String>>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-
- /// The server supports the following `CompletionItem` specific
- /// capabilities.
- ///
- /// @since 3.17.0 - proposed state
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub completion_item: Option<CompletionOptionsCompletionItem>,
-}
-
-#[cfg(feature = "proposed")]
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionOptionsCompletionItem {
- /// The server has support for completion item label
- /// details (see also `CompletionItemLabelDetails`) when receiving
- /// a completion item in a resolve call.
- ///
- /// @since 3.17.0 - proposed state
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label_details_support: Option<bool>,
-}
-
-#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
-pub struct CompletionRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub completion_options: CompletionOptions,
-}
-
-#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum CompletionResponse {
- Array(Vec<CompletionItem>),
- List(CompletionList),
-}
-
-impl From<Vec<CompletionItem>> for CompletionResponse {
- fn from(items: Vec<CompletionItem>) -> Self {
- CompletionResponse::Array(items)
- }
-}
-
-impl From<CompletionList> for CompletionResponse {
- fn from(list: CompletionList) -> Self {
- CompletionResponse::List(list)
- }
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionParams {
- // This field was "mixed-in" from TextDocumentPositionParams
- #[serde(flatten)]
- pub text_document_position: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- // CompletionParams properties:
- #[serde(skip_serializing_if = "Option::is_none")]
- pub context: Option<CompletionContext>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionContext {
- /// How the completion was triggered.
- pub trigger_kind: CompletionTriggerKind,
-
- /// The trigger character (a single character) that has trigger code complete.
- /// Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trigger_character: Option<String>,
-}
-
-/// How a completion was triggered.
-#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct CompletionTriggerKind(i32);
-lsp_enum! {
-impl CompletionTriggerKind {
- pub const INVOKED: CompletionTriggerKind = CompletionTriggerKind(1);
- pub const TRIGGER_CHARACTER: CompletionTriggerKind = CompletionTriggerKind(2);
- pub const TRIGGER_FOR_INCOMPLETE_COMPLETIONS: CompletionTriggerKind = CompletionTriggerKind(3);
-}
-}
-
-/// Represents a collection of [completion items](#CompletionItem) to be presented
-/// in the editor.
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionList {
- /// This list it not complete. Further typing should result in recomputing
- /// this list.
- pub is_incomplete: bool,
-
- /// The completion items.
- pub items: Vec<CompletionItem>,
-}
-
-#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionItem {
- /// The label of this completion item. By default
- /// also the text that is inserted when selecting
- /// this completion.
- pub label: String,
-
- /// Additional details for the label
- ///
- /// @since 3.17.0 - proposed state
- ///
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label_details: Option<CompletionItemLabelDetails>,
-
- /// The kind of this completion item. Based of the kind
- /// an icon is chosen by the editor.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<CompletionItemKind>,
-
- /// A human-readable string with additional information
- /// about this item, like type or symbol information.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub detail: Option<String>,
-
- /// A human-readable string that represents a doc-comment.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub documentation: Option<Documentation>,
-
- /// Indicates if this item is deprecated.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub deprecated: Option<bool>,
-
- /// Select this item when showing.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub preselect: Option<bool>,
-
- /// A string that should be used when comparing this item
- /// with other items. When `falsy` the label is used
- /// as the sort text for this item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub sort_text: Option<String>,
-
- /// A string that should be used when filtering a set of
- /// completion items. When `falsy` the label is used as the
- /// filter text for this item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub filter_text: Option<String>,
-
- /// A string that should be inserted into a document when selecting
- /// this completion. When `falsy` the label is used as the insert text
- /// for this item.
- ///
- /// The `insertText` is subject to interpretation by the client side.
- /// Some tools might not take the string literally. For example
- /// VS Code when code complete is requested in this example
- /// `con<cursor position>` and a completion item with an `insertText` of
- /// `console` is provided it will only insert `sole`. Therefore it is
- /// recommended to use `textEdit` instead since it avoids additional client
- /// side interpretation.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_text: Option<String>,
-
- /// The format of the insert text. The format applies to both the `insertText` property
- /// and the `newText` property of a provided `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_text_format: Option<InsertTextFormat>,
-
- /// How whitespace and indentation is handled during completion
- /// item insertion. If not provided the client's default value is used.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_text_mode: Option<InsertTextMode>,
-
- /// An edit which is applied to a document when selecting
- /// this completion. When an edit is provided the value of
- /// insertText is ignored.
- ///
- /// Most editors support two different operation when accepting a completion item. One is to insert a
-
- /// completion text and the other is to replace an existing text with a completion text. Since this can
- /// usually not predetermined by a server it can report both ranges. Clients need to signal support for
- /// `InsertReplaceEdits` via the `textDocument.completion.insertReplaceSupport` client capability
- /// property.
- ///
- /// *Note 1:* The text edit's range as well as both ranges from a insert replace edit must be a
- /// [single line] and they must contain the position at which completion has been requested.
- /// *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range must be a prefix of
- /// the edit's replace range, that means it must be contained and starting at the same position.
- ///
- /// @since 3.16.0 additional type `InsertReplaceEdit`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text_edit: Option<CompletionTextEdit>,
-
- /// An optional array of additional text edits that are applied when
- /// selecting this completion. Edits must not overlap with the main edit
- /// nor with themselves.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub additional_text_edits: Option<Vec<TextEdit>>,
-
- /// An optional command that is executed *after* inserting this completion. *Note* that
- /// additional modifications to the current document should be described with the
- /// additionalTextEdits-property.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub command: Option<Command>,
-
- /// An optional set of characters that when pressed while this completion is
- /// active will accept it first and then type that character. *Note* that all
- /// commit characters should have `length=1` and that superfluous characters
- /// will be ignored.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub commit_characters: Option<Vec<String>>,
-
- /// An data entry field that is preserved on a completion item between
- /// a completion and a completion resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<Value>,
-
- /// Tags for this completion item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tags: Option<Vec<CompletionItemTag>>,
-}
-
-impl CompletionItem {
- /// Create a CompletionItem with the minimum possible info (label and detail).
- pub fn new_simple(label: String, detail: String) -> CompletionItem {
- CompletionItem {
- label,
- detail: Some(detail),
- ..Self::default()
- }
- }
-}
-
-/// Additional details for a completion item label.
-///
-/// @since 3.17.0 - proposed state
-#[cfg(feature = "proposed")]
-#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct CompletionItemLabelDetails {
- /// An optional string which is rendered less prominently directly after
- /// {@link CompletionItemLabel.label label}, without any spacing. Should be
- /// used for function signatures or type annotations.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub detail: Option<String>,
-
- /// An optional string which is rendered less prominently after
- /// {@link CompletionItemLabel.detail}. Should be used for fully qualified
- /// names or file path.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub description: Option<String>,
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::tests::test_deserialization;
-
- #[test]
- fn test_tag_support_deserialization() {
- let mut empty = CompletionItemCapability::default();
- empty.tag_support = None;
-
- test_deserialization(r#"{}"#, &empty);
- test_deserialization(r#"{"tagSupport": false}"#, &empty);
-
- let mut t = CompletionItemCapability::default();
- t.tag_support = Some(TagSupport { value_set: vec![] });
- test_deserialization(r#"{"tagSupport": true}"#, &t);
-
- let mut t = CompletionItemCapability::default();
- t.tag_support = Some(TagSupport {
- value_set: vec![CompletionItemTag::DEPRECATED],
- });
- test_deserialization(r#"{"tagSupport": {"valueSet": [1]}}"#, &t);
- }
-
- #[test]
- fn test_debug_enum() {
- assert_eq!(format!("{:?}", CompletionItemKind::TEXT), "Text");
- assert_eq!(
- format!("{:?}", CompletionItemKind::TYPE_PARAMETER),
- "TypeParameter"
- );
- }
-
- #[test]
- fn test_try_from_enum() {
- use std::convert::TryInto;
- assert_eq!("Text".try_into(), Ok(CompletionItemKind::TEXT));
- assert_eq!(
- "TypeParameter".try_into(),
- Ok(CompletionItemKind::TYPE_PARAMETER)
- );
- }
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ Command, Documentation, MarkupKind, PartialResultParams, TagSupport,
+ TextDocumentPositionParams, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions,
+ WorkDoneProgressParams,
+};
+
+use crate::Range;
+use serde_json::Value;
+use std::fmt::Debug;
+
+/// Defines how to interpret the insert text in a completion item
+#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct InsertTextFormat(i32);
+lsp_enum! {
+impl InsertTextFormat {
+ pub const PLAIN_TEXT: InsertTextFormat = InsertTextFormat(1);
+ pub const SNIPPET: InsertTextFormat = InsertTextFormat(2);
+}
+}
+
+/// The kind of a completion entry.
+#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct CompletionItemKind(i32);
+lsp_enum! {
+impl CompletionItemKind {
+ pub const TEXT: CompletionItemKind = CompletionItemKind(1);
+ pub const METHOD: CompletionItemKind = CompletionItemKind(2);
+ pub const FUNCTION: CompletionItemKind = CompletionItemKind(3);
+ pub const CONSTRUCTOR: CompletionItemKind = CompletionItemKind(4);
+ pub const FIELD: CompletionItemKind = CompletionItemKind(5);
+ pub const VARIABLE: CompletionItemKind = CompletionItemKind(6);
+ pub const CLASS: CompletionItemKind = CompletionItemKind(7);
+ pub const INTERFACE: CompletionItemKind = CompletionItemKind(8);
+ pub const MODULE: CompletionItemKind = CompletionItemKind(9);
+ pub const PROPERTY: CompletionItemKind = CompletionItemKind(10);
+ pub const UNIT: CompletionItemKind = CompletionItemKind(11);
+ pub const VALUE: CompletionItemKind = CompletionItemKind(12);
+ pub const ENUM: CompletionItemKind = CompletionItemKind(13);
+ pub const KEYWORD: CompletionItemKind = CompletionItemKind(14);
+ pub const SNIPPET: CompletionItemKind = CompletionItemKind(15);
+ pub const COLOR: CompletionItemKind = CompletionItemKind(16);
+ pub const FILE: CompletionItemKind = CompletionItemKind(17);
+ pub const REFERENCE: CompletionItemKind = CompletionItemKind(18);
+ pub const FOLDER: CompletionItemKind = CompletionItemKind(19);
+ pub const ENUM_MEMBER: CompletionItemKind = CompletionItemKind(20);
+ pub const CONSTANT: CompletionItemKind = CompletionItemKind(21);
+ pub const STRUCT: CompletionItemKind = CompletionItemKind(22);
+ pub const EVENT: CompletionItemKind = CompletionItemKind(23);
+ pub const OPERATOR: CompletionItemKind = CompletionItemKind(24);
+ pub const TYPE_PARAMETER: CompletionItemKind = CompletionItemKind(25);
+}
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionItemCapability {
+ /// Client supports snippets as insert text.
+ ///
+ /// A snippet can define tab stops and placeholders with `$1`, `$2`
+ /// and `${3:foo}`. `$0` defines the final tab stop, it defaults to
+ /// the end of the snippet. Placeholders with equal identifiers are linked,
+ /// that is typing in one will update others too.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub snippet_support: Option<bool>,
+
+ /// Client supports commit characters on a completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub commit_characters_support: Option<bool>,
+
+ /// Client supports the follow content formats for the documentation
+ /// property. The order describes the preferred format of the client.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub documentation_format: Option<Vec<MarkupKind>>,
+
+ /// Client supports the deprecated property on a completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub deprecated_support: Option<bool>,
+
+ /// Client supports the preselect property on a completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub preselect_support: Option<bool>,
+
+ /// Client supports the tag property on a completion item. Clients supporting
+ /// tags have to handle unknown tags gracefully. Clients especially need to
+ /// preserve unknown tags when sending a completion item back to the server in
+ /// a resolve call.
+ #[serde(
+ default,
+ skip_serializing_if = "Option::is_none",
+ deserialize_with = "TagSupport::deserialize_compat"
+ )]
+ pub tag_support: Option<TagSupport<CompletionItemTag>>,
+
+ /// Client support insert replace edit to control different behavior if a
+ /// completion item is inserted in the text or should replace text.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_replace_support: Option<bool>,
+
+ /// Indicates which properties a client can resolve lazily on a completion
+ /// item. Before version 3.16.0 only the predefined properties `documentation`
+ /// and `details` could be resolved lazily.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_support: Option<CompletionItemCapabilityResolveSupport>,
+
+ /// The client supports the `insertTextMode` property on
+ /// a completion item to override the whitespace handling mode
+ /// as defined by the client.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_mode_support: Option<InsertTextModeSupport>,
+
+ /// The client has support for completion item label
+ /// details (see also `CompletionItemLabelDetails`).
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label_details_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionItemCapabilityResolveSupport {
+ /// The properties that a client can resolve lazily.
+ pub properties: Vec<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InsertTextModeSupport {
+ pub value_set: Vec<InsertTextMode>,
+}
+
+/// How whitespace and indentation is handled during completion
+/// item insertion.
+///
+/// @since 3.16.0
+#[derive(Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct InsertTextMode(i32);
+lsp_enum! {
+impl InsertTextMode {
+ /// The insertion or replace strings is taken as it is. If the
+ /// value is multi line the lines below the cursor will be
+ /// inserted using the indentation defined in the string value.
+ /// The client will not apply any kind of adjustments to the
+ /// string.
+ pub const AS_IS: InsertTextMode = InsertTextMode(1);
+
+ /// The editor adjusts leading whitespace of new lines so that
+ /// they match the indentation up to the cursor of the line for
+ /// which the item is accepted.
+ ///
+ /// Consider a line like this: <2tabs><cursor><3tabs>foo. Accepting a
+ /// multi line completion item is indented using 2 tabs all
+ /// following lines inserted will be indented using 2 tabs as well.
+ pub const ADJUST_INDENTATION: InsertTextMode = InsertTextMode(2);
+}
+}
+
+#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct CompletionItemTag(i32);
+lsp_enum! {
+impl CompletionItemTag {
+ pub const DEPRECATED: CompletionItemTag = CompletionItemTag(1);
+}
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionItemKindCapability {
+ /// The completion item kind values the client supports. When this
+ /// property exists the client also guarantees that it will
+ /// handle values outside its set gracefully and falls back
+ /// to a default value when unknown.
+ ///
+ /// If this property is not present the client only supports
+ /// the completion items kinds from `Text` to `Reference` as defined in
+ /// the initial version of the protocol.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub value_set: Option<Vec<CompletionItemKind>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionListCapability {
+ /// The client supports the following itemDefaults on
+ /// a completion list.
+ ///
+ /// The value lists the supported property names of the
+ /// `CompletionList.itemDefaults` object. If omitted
+ /// no properties are supported.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub item_defaults: Option<Vec<String>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionClientCapabilities {
+ /// Whether completion supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client supports the following `CompletionItem` specific
+ /// capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion_item: Option<CompletionItemCapability>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion_item_kind: Option<CompletionItemKindCapability>,
+
+ /// The client supports to send additional context information for a
+ /// `textDocument/completion` requestion.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub context_support: Option<bool>,
+
+ /// The client's default when the completion item doesn't provide a
+ /// `insertTextMode` property.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_mode: Option<InsertTextMode>,
+
+ /// The client supports the following `CompletionList` specific
+ /// capabilities.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion_list: Option<CompletionListCapability>,
+}
+
+/// A special text edit to provide an insert and a replace operation.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InsertReplaceEdit {
+ /// The string to be inserted.
+ pub new_text: String,
+
+ /// The range if the insert is requested
+ pub insert: Range,
+
+ /// The range if the replace is requested.
+ pub replace: Range,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum CompletionTextEdit {
+ Edit(TextEdit),
+ InsertAndReplace(InsertReplaceEdit),
+}
+
+impl From<TextEdit> for CompletionTextEdit {
+ fn from(edit: TextEdit) -> Self {
+ CompletionTextEdit::Edit(edit)
+ }
+}
+
+impl From<InsertReplaceEdit> for CompletionTextEdit {
+ fn from(edit: InsertReplaceEdit) -> Self {
+ CompletionTextEdit::InsertAndReplace(edit)
+ }
+}
+
+/// Completion options.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionOptions {
+ /// The server provides support to resolve additional information for a completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+
+ /// Most tools trigger completion request automatically without explicitly
+ /// requesting it using a keyboard shortcut (e.g. Ctrl+Space). Typically they
+ /// do so when the user starts to type an identifier. For example if the user
+ /// types `c` in a JavaScript file code complete will automatically pop up
+ /// present `console` besides others as a completion item. Characters that
+ /// make up identifiers don't need to be listed here.
+ ///
+ /// If code complete should automatically be trigger on characters not being
+ /// valid inside an identifier (for example `.` in JavaScript) list them in
+ /// `triggerCharacters`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trigger_characters: Option<Vec<String>>,
+
+ /// The list of all possible characters that commit a completion. This field
+ /// can be used if clients don't support individual commit characters per
+ /// completion item. See client capability
+ /// `completion.completionItem.commitCharactersSupport`.
+ ///
+ /// If a server provides both `allCommitCharacters` and commit characters on
+ /// an individual completion item the ones on the completion item win.
+ ///
+ /// @since 3.2.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub all_commit_characters: Option<Vec<String>>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+
+ /// The server supports the following `CompletionItem` specific
+ /// capabilities.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion_item: Option<CompletionOptionsCompletionItem>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionOptionsCompletionItem {
+ /// The server has support for completion item label
+ /// details (see also `CompletionItemLabelDetails`) when receiving
+ /// a completion item in a resolve call.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label_details_support: Option<bool>,
+}
+
+#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
+pub struct CompletionRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub completion_options: CompletionOptions,
+}
+
+#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum CompletionResponse {
+ Array(Vec<CompletionItem>),
+ List(CompletionList),
+}
+
+impl From<Vec<CompletionItem>> for CompletionResponse {
+ fn from(items: Vec<CompletionItem>) -> Self {
+ CompletionResponse::Array(items)
+ }
+}
+
+impl From<CompletionList> for CompletionResponse {
+ fn from(list: CompletionList) -> Self {
+ CompletionResponse::List(list)
+ }
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionParams {
+ // This field was "mixed-in" from TextDocumentPositionParams
+ #[serde(flatten)]
+ pub text_document_position: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ // CompletionParams properties:
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub context: Option<CompletionContext>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionContext {
+ /// How the completion was triggered.
+ pub trigger_kind: CompletionTriggerKind,
+
+ /// The trigger character (a single character) that has trigger code complete.
+ /// Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trigger_character: Option<String>,
+}
+
+/// How a completion was triggered.
+#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct CompletionTriggerKind(i32);
+lsp_enum! {
+impl CompletionTriggerKind {
+ pub const INVOKED: CompletionTriggerKind = CompletionTriggerKind(1);
+ pub const TRIGGER_CHARACTER: CompletionTriggerKind = CompletionTriggerKind(2);
+ pub const TRIGGER_FOR_INCOMPLETE_COMPLETIONS: CompletionTriggerKind = CompletionTriggerKind(3);
+}
+}
+
+/// Represents a collection of [completion items](#CompletionItem) to be presented
+/// in the editor.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionList {
+ /// This list it not complete. Further typing should result in recomputing
+ /// this list.
+ pub is_incomplete: bool,
+
+ /// The completion items.
+ pub items: Vec<CompletionItem>,
+}
+
+#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionItem {
+ /// The label of this completion item. By default
+ /// also the text that is inserted when selecting
+ /// this completion.
+ pub label: String,
+
+ /// Additional details for the label
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label_details: Option<CompletionItemLabelDetails>,
+
+ /// The kind of this completion item. Based of the kind
+ /// an icon is chosen by the editor.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<CompletionItemKind>,
+
+ /// A human-readable string with additional information
+ /// about this item, like type or symbol information.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub detail: Option<String>,
+
+ /// A human-readable string that represents a doc-comment.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub documentation: Option<Documentation>,
+
+ /// Indicates if this item is deprecated.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub deprecated: Option<bool>,
+
+ /// Select this item when showing.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub preselect: Option<bool>,
+
+ /// A string that should be used when comparing this item
+ /// with other items. When `falsy` the label is used
+ /// as the sort text for this item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub sort_text: Option<String>,
+
+ /// A string that should be used when filtering a set of
+ /// completion items. When `falsy` the label is used as the
+ /// filter text for this item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub filter_text: Option<String>,
+
+ /// A string that should be inserted into a document when selecting
+ /// this completion. When `falsy` the label is used as the insert text
+ /// for this item.
+ ///
+ /// The `insertText` is subject to interpretation by the client side.
+ /// Some tools might not take the string literally. For example
+ /// VS Code when code complete is requested in this example
+ /// `con<cursor position>` and a completion item with an `insertText` of
+ /// `console` is provided it will only insert `sole`. Therefore it is
+ /// recommended to use `textEdit` instead since it avoids additional client
+ /// side interpretation.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text: Option<String>,
+
+ /// The format of the insert text. The format applies to both the `insertText` property
+ /// and the `newText` property of a provided `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_format: Option<InsertTextFormat>,
+
+ /// How whitespace and indentation is handled during completion
+ /// item insertion. If not provided the client's default value depends on
+ /// the `textDocument.completion.insertTextMode` client capability.
+ ///
+ /// @since 3.16.0
+ /// @since 3.17.0 - support for `textDocument.completion.insertTextMode`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_mode: Option<InsertTextMode>,
+
+ /// An edit which is applied to a document when selecting
+ /// this completion. When an edit is provided the value of
+ /// insertText is ignored.
+ ///
+ /// Most editors support two different operation when accepting a completion item. One is to insert a
+
+ /// completion text and the other is to replace an existing text with a completion text. Since this can
+ /// usually not predetermined by a server it can report both ranges. Clients need to signal support for
+ /// `InsertReplaceEdits` via the `textDocument.completion.insertReplaceSupport` client capability
+ /// property.
+ ///
+ /// *Note 1:* The text edit's range as well as both ranges from a insert replace edit must be a
+ /// [single line] and they must contain the position at which completion has been requested.
+ /// *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range must be a prefix of
+ /// the edit's replace range, that means it must be contained and starting at the same position.
+ ///
+ /// @since 3.16.0 additional type `InsertReplaceEdit`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text_edit: Option<CompletionTextEdit>,
+
+ /// An optional array of additional text edits that are applied when
+ /// selecting this completion. Edits must not overlap with the main edit
+ /// nor with themselves.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub additional_text_edits: Option<Vec<TextEdit>>,
+
+ /// An optional command that is executed *after* inserting this completion. *Note* that
+ /// additional modifications to the current document should be described with the
+ /// additionalTextEdits-property.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<Command>,
+
+ /// An optional set of characters that when pressed while this completion is
+ /// active will accept it first and then type that character. *Note* that all
+ /// commit characters should have `length=1` and that superfluous characters
+ /// will be ignored.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub commit_characters: Option<Vec<String>>,
+
+ /// An data entry field that is preserved on a completion item between
+ /// a completion and a completion resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<Value>,
+
+ /// Tags for this completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<CompletionItemTag>>,
+}
+
+impl CompletionItem {
+ /// Create a CompletionItem with the minimum possible info (label and detail).
+ pub fn new_simple(label: String, detail: String) -> CompletionItem {
+ CompletionItem {
+ label,
+ detail: Some(detail),
+ ..Self::default()
+ }
+ }
+}
+
+/// Additional details for a completion item label.
+///
+/// @since 3.17.0
+#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CompletionItemLabelDetails {
+ /// An optional string which is rendered less prominently directly after
+ /// {@link CompletionItemLabel.label label}, without any spacing. Should be
+ /// used for function signatures or type annotations.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub detail: Option<String>,
+
+ /// An optional string which is rendered less prominently after
+ /// {@link CompletionItemLabel.detail}. Should be used for fully qualified
+ /// names or file path.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub description: Option<String>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::test_deserialization;
+
+ #[test]
+ fn test_tag_support_deserialization() {
+ let mut empty = CompletionItemCapability::default();
+ empty.tag_support = None;
+
+ test_deserialization(r#"{}"#, &empty);
+ test_deserialization(r#"{"tagSupport": false}"#, &empty);
+
+ let mut t = CompletionItemCapability::default();
+ t.tag_support = Some(TagSupport { value_set: vec![] });
+ test_deserialization(r#"{"tagSupport": true}"#, &t);
+
+ let mut t = CompletionItemCapability::default();
+ t.tag_support = Some(TagSupport {
+ value_set: vec![CompletionItemTag::DEPRECATED],
+ });
+ test_deserialization(r#"{"tagSupport": {"valueSet": [1]}}"#, &t);
+ }
+
+ #[test]
+ fn test_debug_enum() {
+ assert_eq!(format!("{:?}", CompletionItemKind::TEXT), "Text");
+ assert_eq!(
+ format!("{:?}", CompletionItemKind::TYPE_PARAMETER),
+ "TypeParameter"
+ );
+ }
+
+ #[test]
+ fn test_try_from_enum() {
+ use std::convert::TryInto;
+ assert_eq!("Text".try_into(), Ok(CompletionItemKind::TEXT));
+ assert_eq!(
+ "TypeParameter".try_into(),
+ Ok(CompletionItemKind::TYPE_PARAMETER)
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/document_highlight.rs b/vendor/lsp-types/src/document_highlight.rs
index 9a97c104b..d7cd1ae18 100644
--- a/vendor/lsp-types/src/document_highlight.rs
+++ b/vendor/lsp-types/src/document_highlight.rs
@@ -1,51 +1,51 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- DynamicRegistrationClientCapabilities, PartialResultParams, Range, TextDocumentPositionParams,
- WorkDoneProgressParams,
-};
-
-pub type DocumentHighlightClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentHighlightParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// A document highlight is a range inside a text document which deserves
-/// special attention. Usually a document highlight is visualized by changing
-/// the background color of its range.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DocumentHighlight {
- /// The range this highlight applies to.
- pub range: Range,
-
- /// The highlight kind, default is DocumentHighlightKind.Text.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<DocumentHighlightKind>,
-}
-
-/// A document highlight kind.
-#[derive(Eq, PartialEq, Copy, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct DocumentHighlightKind(i32);
-lsp_enum! {
-impl DocumentHighlightKind {
- /// A textual occurrance.
- pub const TEXT: DocumentHighlightKind = DocumentHighlightKind(1);
-
- /// Read-access of a symbol, like reading a variable.
- pub const READ: DocumentHighlightKind = DocumentHighlightKind(2);
-
- /// Write-access of a symbol, like writing to a variable.
- pub const WRITE: DocumentHighlightKind = DocumentHighlightKind(3);
-}
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ DynamicRegistrationClientCapabilities, PartialResultParams, Range, TextDocumentPositionParams,
+ WorkDoneProgressParams,
+};
+
+pub type DocumentHighlightClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentHighlightParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// A document highlight is a range inside a text document which deserves
+/// special attention. Usually a document highlight is visualized by changing
+/// the background color of its range.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DocumentHighlight {
+ /// The range this highlight applies to.
+ pub range: Range,
+
+ /// The highlight kind, default is DocumentHighlightKind.Text.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<DocumentHighlightKind>,
+}
+
+/// A document highlight kind.
+#[derive(Eq, PartialEq, Copy, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct DocumentHighlightKind(i32);
+lsp_enum! {
+impl DocumentHighlightKind {
+ /// A textual occurrance.
+ pub const TEXT: DocumentHighlightKind = DocumentHighlightKind(1);
+
+ /// Read-access of a symbol, like reading a variable.
+ pub const READ: DocumentHighlightKind = DocumentHighlightKind(2);
+
+ /// Write-access of a symbol, like writing to a variable.
+ pub const WRITE: DocumentHighlightKind = DocumentHighlightKind(3);
+}
+}
diff --git a/vendor/lsp-types/src/document_link.rs b/vendor/lsp-types/src/document_link.rs
index 1400dd96b..df57df29f 100644
--- a/vendor/lsp-types/src/document_link.rs
+++ b/vendor/lsp-types/src/document_link.rs
@@ -1,67 +1,67 @@
-use crate::{
- PartialResultParams, Range, TextDocumentIdentifier, WorkDoneProgressOptions,
- WorkDoneProgressParams,
-};
-use serde::{Deserialize, Serialize};
-use serde_json::Value;
-use url::Url;
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentLinkClientCapabilities {
- /// Whether document link supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Whether the client support the `tooltip` property on `DocumentLink`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tooltip_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentLinkOptions {
- /// Document links have a resolve provider as well.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_provider: Option<bool>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentLinkParams {
- /// The document to provide document links for.
- pub text_document: TextDocumentIdentifier,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// A document link is a range in a text document that links to an internal or external resource, like another
-/// text document or a web site.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DocumentLink {
- /// The range this link applies to.
- pub range: Range,
- /// The uri this link points to.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub target: Option<Url>,
-
- /// The tooltip text when you hover over this link.
- ///
- /// If a tooltip is provided, is will be displayed in a string that includes instructions on how to
- /// trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS,
- /// user settings, and localization.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tooltip: Option<String>,
-
- /// A data entry field that is preserved on a document link between a DocumentLinkRequest
- /// and a DocumentLinkResolveRequest.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<Value>,
-}
+use crate::{
+ PartialResultParams, Range, TextDocumentIdentifier, WorkDoneProgressOptions,
+ WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+use url::Url;
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentLinkClientCapabilities {
+ /// Whether document link supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Whether the client support the `tooltip` property on `DocumentLink`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentLinkOptions {
+ /// Document links have a resolve provider as well.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentLinkParams {
+ /// The document to provide document links for.
+ pub text_document: TextDocumentIdentifier,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// A document link is a range in a text document that links to an internal or external resource, like another
+/// text document or a web site.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DocumentLink {
+ /// The range this link applies to.
+ pub range: Range,
+ /// The uri this link points to.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub target: Option<Url>,
+
+ /// The tooltip text when you hover over this link.
+ ///
+ /// If a tooltip is provided, is will be displayed in a string that includes instructions on how to
+ /// trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS,
+ /// user settings, and localization.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip: Option<String>,
+
+ /// A data entry field that is preserved on a document link between a DocumentLinkRequest
+ /// and a DocumentLinkResolveRequest.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<Value>,
+}
diff --git a/vendor/lsp-types/src/document_symbols.rs b/vendor/lsp-types/src/document_symbols.rs
index f8b166f57..61d0c2520 100644
--- a/vendor/lsp-types/src/document_symbols.rs
+++ b/vendor/lsp-types/src/document_symbols.rs
@@ -1,132 +1,132 @@
-use crate::{
- Location, PartialResultParams, Range, SymbolKind, SymbolKindCapability, TextDocumentIdentifier,
- WorkDoneProgressParams,
-};
-
-use crate::{SymbolTag, TagSupport};
-
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentSymbolClientCapabilities {
- /// This capability supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Specific capabilities for the `SymbolKind`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub symbol_kind: Option<SymbolKindCapability>,
-
- /// The client support hierarchical document symbols.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub hierarchical_document_symbol_support: Option<bool>,
-
- /// The client supports tags on `SymbolInformation`. Tags are supported on
- /// `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true.
- /// Clients supporting tags have to handle unknown tags gracefully.
- ///
- /// @since 3.16.0
- #[serde(
- default,
- skip_serializing_if = "Option::is_none",
- deserialize_with = "TagSupport::deserialize_compat"
- )]
- pub tag_support: Option<TagSupport<SymbolTag>>,
-}
-
-#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum DocumentSymbolResponse {
- Flat(Vec<SymbolInformation>),
- Nested(Vec<DocumentSymbol>),
-}
-
-impl From<Vec<SymbolInformation>> for DocumentSymbolResponse {
- fn from(info: Vec<SymbolInformation>) -> Self {
- DocumentSymbolResponse::Flat(info)
- }
-}
-
-impl From<Vec<DocumentSymbol>> for DocumentSymbolResponse {
- fn from(symbols: Vec<DocumentSymbol>) -> Self {
- DocumentSymbolResponse::Nested(symbols)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentSymbolParams {
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// Represents programming constructs like variables, classes, interfaces etc.
-/// that appear in a document. Document symbols can be hierarchical and they have two ranges:
-/// one that encloses its definition and one that points to its most interesting range,
-/// e.g. the range of an identifier.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentSymbol {
- /// The name of this symbol.
- pub name: String,
- /// More detail for this symbol, e.g the signature of a function. If not provided the
- /// name is used.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub detail: Option<String>,
- /// The kind of this symbol.
- pub kind: SymbolKind,
- /// Tags for this completion item.
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tags: Option<Vec<SymbolTag>>,
- /// Indicates if this symbol is deprecated.
- #[serde(skip_serializing_if = "Option::is_none")]
- #[deprecated(note = "Use tags instead")]
- pub deprecated: Option<bool>,
- /// The range enclosing this symbol not including leading/trailing whitespace but everything else
- /// like comments. This information is typically used to determine if the the clients cursor is
- /// inside the symbol to reveal in the symbol in the UI.
- pub range: Range,
- /// The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
- /// Must be contained by the the `range`.
- pub selection_range: Range,
- /// Children of this symbol, e.g. properties of a class.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub children: Option<Vec<DocumentSymbol>>,
-}
-
-/// Represents information about programming constructs like variables, classes,
-/// interfaces etc.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SymbolInformation {
- /// The name of this symbol.
- pub name: String,
-
- /// The kind of this symbol.
- pub kind: SymbolKind,
-
- /// Tags for this completion item.
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tags: Option<Vec<SymbolTag>>,
-
- /// Indicates if this symbol is deprecated.
- #[serde(skip_serializing_if = "Option::is_none")]
- #[deprecated(note = "Use tags instead")]
- pub deprecated: Option<bool>,
-
- /// The location of this symbol.
- pub location: Location,
-
- /// The name of the symbol containing this symbol.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub container_name: Option<String>,
-}
+use crate::{
+ Location, PartialResultParams, Range, SymbolKind, SymbolKindCapability, TextDocumentIdentifier,
+ WorkDoneProgressParams,
+};
+
+use crate::{SymbolTag, TagSupport};
+
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentSymbolClientCapabilities {
+ /// This capability supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Specific capabilities for the `SymbolKind`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub symbol_kind: Option<SymbolKindCapability>,
+
+ /// The client support hierarchical document symbols.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub hierarchical_document_symbol_support: Option<bool>,
+
+ /// The client supports tags on `SymbolInformation`. Tags are supported on
+ /// `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true.
+ /// Clients supporting tags have to handle unknown tags gracefully.
+ ///
+ /// @since 3.16.0
+ #[serde(
+ default,
+ skip_serializing_if = "Option::is_none",
+ deserialize_with = "TagSupport::deserialize_compat"
+ )]
+ pub tag_support: Option<TagSupport<SymbolTag>>,
+}
+
+#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum DocumentSymbolResponse {
+ Flat(Vec<SymbolInformation>),
+ Nested(Vec<DocumentSymbol>),
+}
+
+impl From<Vec<SymbolInformation>> for DocumentSymbolResponse {
+ fn from(info: Vec<SymbolInformation>) -> Self {
+ DocumentSymbolResponse::Flat(info)
+ }
+}
+
+impl From<Vec<DocumentSymbol>> for DocumentSymbolResponse {
+ fn from(symbols: Vec<DocumentSymbol>) -> Self {
+ DocumentSymbolResponse::Nested(symbols)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentSymbolParams {
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// Represents programming constructs like variables, classes, interfaces etc.
+/// that appear in a document. Document symbols can be hierarchical and they have two ranges:
+/// one that encloses its definition and one that points to its most interesting range,
+/// e.g. the range of an identifier.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentSymbol {
+ /// The name of this symbol.
+ pub name: String,
+ /// More detail for this symbol, e.g the signature of a function. If not provided the
+ /// name is used.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub detail: Option<String>,
+ /// The kind of this symbol.
+ pub kind: SymbolKind,
+ /// Tags for this completion item.
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<SymbolTag>>,
+ /// Indicates if this symbol is deprecated.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[deprecated(note = "Use tags instead")]
+ pub deprecated: Option<bool>,
+ /// The range enclosing this symbol not including leading/trailing whitespace but everything else
+ /// like comments. This information is typically used to determine if the the clients cursor is
+ /// inside the symbol to reveal in the symbol in the UI.
+ pub range: Range,
+ /// The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
+ /// Must be contained by the the `range`.
+ pub selection_range: Range,
+ /// Children of this symbol, e.g. properties of a class.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub children: Option<Vec<DocumentSymbol>>,
+}
+
+/// Represents information about programming constructs like variables, classes,
+/// interfaces etc.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SymbolInformation {
+ /// The name of this symbol.
+ pub name: String,
+
+ /// The kind of this symbol.
+ pub kind: SymbolKind,
+
+ /// Tags for this completion item.
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<SymbolTag>>,
+
+ /// Indicates if this symbol is deprecated.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[deprecated(note = "Use tags instead")]
+ pub deprecated: Option<bool>,
+
+ /// The location of this symbol.
+ pub location: Location,
+
+ /// The name of the symbol containing this symbol.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub container_name: Option<String>,
+}
diff --git a/vendor/lsp-types/src/error_codes.rs b/vendor/lsp-types/src/error_codes.rs
index 9435a7261..1d0c1d571 100644
--- a/vendor/lsp-types/src/error_codes.rs
+++ b/vendor/lsp-types/src/error_codes.rs
@@ -1,37 +1,36 @@
-//! In this module we only define constants for lsp specific error codes.
-//! There are other error codes that are defined in the
-//! [JSON RPC specification](https://www.jsonrpc.org/specification#error_object).
-
-// This is the start range of LSP reserved error codes.
-// It doesn't denote a real error code.
-//
-// @since 3.16.0
-pub const LSP_RESERVED_ERROR_RANGE_START: i64 = -32899;
-
-// The server cancelled the request. This error code should
-// only be used for requests that explicitly support being
-// server cancellable.
-//
-// @since 3.17.0
-#[cfg(feature = "proposed")]
-pub const SERVER_CANCELLED: i64 = -32802;
-
-// The server detected that the content of a document got
-// modified outside normal conditions. A server should
-// NOT send this error code if it detects a content change
-// in it unprocessed messages. The result even computed
-// on an older state might still be useful for the client.
-//
-// If a client decides that a result is not of any use anymore
-// the client should cancel the request.
-pub const CONTENT_MODIFIED: i64 = -32801;
-
-// The client has canceled a request and a server as detected
-// the cancel.
-pub const REQUEST_CANCELLED: i64 = -32800;
-
-// This is the end range of LSP reserved error codes.
-// It doesn't denote a real error code.
-//
-// @since 3.16.0
-pub const LSP_RESERVED_ERROR_RANGE_END: i64 = -32800;
+//! In this module we only define constants for lsp specific error codes.
+//! There are other error codes that are defined in the
+//! [JSON RPC specification](https://www.jsonrpc.org/specification#error_object).
+
+// This is the start range of LSP reserved error codes.
+// It doesn't denote a real error code.
+//
+// @since 3.16.0
+pub const LSP_RESERVED_ERROR_RANGE_START: i64 = -32899;
+
+// The server cancelled the request. This error code should
+// only be used for requests that explicitly support being
+// server cancellable.
+//
+// @since 3.17.0
+pub const SERVER_CANCELLED: i64 = -32802;
+
+// The server detected that the content of a document got
+// modified outside normal conditions. A server should
+// NOT send this error code if it detects a content change
+// in it unprocessed messages. The result even computed
+// on an older state might still be useful for the client.
+//
+// If a client decides that a result is not of any use anymore
+// the client should cancel the request.
+pub const CONTENT_MODIFIED: i64 = -32801;
+
+// The client has canceled a request and a server as detected
+// the cancel.
+pub const REQUEST_CANCELLED: i64 = -32800;
+
+// This is the end range of LSP reserved error codes.
+// It doesn't denote a real error code.
+//
+// @since 3.16.0
+pub const LSP_RESERVED_ERROR_RANGE_END: i64 = -32800;
diff --git a/vendor/lsp-types/src/file_operations.rs b/vendor/lsp-types/src/file_operations.rs
index 4092d931b..5adcb9838 100644
--- a/vendor/lsp-types/src/file_operations.rs
+++ b/vendor/lsp-types/src/file_operations.rs
@@ -1,213 +1,213 @@
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceFileOperationsClientCapabilities {
- /// Whether the client supports dynamic registration for file
- /// requests/notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client has support for sending didCreateFiles notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_create: Option<bool>,
-
- /// The server is interested in receiving willCreateFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_create: Option<bool>,
-
- /// The server is interested in receiving didRenameFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_rename: Option<bool>,
-
- /// The server is interested in receiving willRenameFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_rename: Option<bool>,
-
- /// The server is interested in receiving didDeleteFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_delete: Option<bool>,
-
- /// The server is interested in receiving willDeleteFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_delete: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceFileOperationsServerCapabilities {
- /// The server is interested in receiving didCreateFiles
- /// notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_create: Option<FileOperationRegistrationOptions>,
-
- /// The server is interested in receiving willCreateFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_create: Option<FileOperationRegistrationOptions>,
-
- /// The server is interested in receiving didRenameFiles
- /// notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_rename: Option<FileOperationRegistrationOptions>,
-
- /// The server is interested in receiving willRenameFiles requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_rename: Option<FileOperationRegistrationOptions>,
-
- /// The server is interested in receiving didDeleteFiles file
- /// notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_delete: Option<FileOperationRegistrationOptions>,
-
- /// The server is interested in receiving willDeleteFiles file
- /// requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_delete: Option<FileOperationRegistrationOptions>,
-}
-
-/// The options to register for file operations.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileOperationRegistrationOptions {
- /// The actual filters.
- pub filters: Vec<FileOperationFilter>,
-}
-
-/// A filter to describe in which file operation requests or notifications
-/// the server is interested in.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileOperationFilter {
- /// A Uri like `file` or `untitled`.
- pub scheme: Option<String>,
-
- /// The actual file operation pattern.
- pub pattern: FileOperationPattern,
-}
-
-/// A pattern kind describing if a glob pattern matches a file a folder or
-/// both.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "lowercase")]
-pub enum FileOperationPatternKind {
- /// The pattern matches a file only.
- File,
-
- /// The pattern matches a folder only.
- Folder,
-}
-
-/// Matching options for the file operation pattern.
-///
-/// @since 3.16.0
-///
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileOperationPatternOptions {
- /// The pattern should be matched ignoring casing.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ignore_case: Option<bool>,
-}
-
-/// A pattern to describe in which file operation requests or notifications
-/// the server is interested in.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileOperationPattern {
- /// The glob pattern to match. Glob patterns can have the following syntax:
- /// - `*` to match one or more characters in a path segment
- /// - `?` to match on one character in a path segment
- /// - `**` to match any number of path segments, including none
- /// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript
- /// and JavaScript files)
- /// - `[]` to declare a range of characters to match in a path segment
- /// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
- /// - `[!...]` to negate a range of characters to match in a path segment
- /// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but
- /// not `example.0`)
- pub glob: String,
-
- /// Whether to match files or folders with this pattern.
- ///
- /// Matches both if undefined.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub matches: Option<FileOperationPatternKind>,
-
- /// Additional options used during matching.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub options: Option<FileOperationPatternOptions>,
-}
-
-/// The parameters sent in notifications/requests for user-initiated creation
-/// of files.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CreateFilesParams {
- /// An array of all files/folders created in this operation.
- pub files: Vec<FileCreate>,
-}
-/// Represents information on a file/folder create.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileCreate {
- /// A file:// URI for the location of the file/folder being created.
- pub uri: String,
-}
-
-/// The parameters sent in notifications/requests for user-initiated renames
-/// of files.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameFilesParams {
- /// An array of all files/folders renamed in this operation. When a folder
- /// is renamed, only the folder will be included, and not its children.
- pub files: Vec<FileRename>,
-}
-
-/// Represents information on a file/folder rename.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileRename {
- /// A file:// URI for the original location of the file/folder being renamed.
- pub old_uri: String,
-
- /// A file:// URI for the new location of the file/folder being renamed.
- pub new_uri: String,
-}
-
-/// The parameters sent in notifications/requests for user-initiated deletes
-/// of files.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeleteFilesParams {
- /// An array of all files/folders deleted in this operation.
- pub files: Vec<FileDelete>,
-}
-
-/// Represents information on a file/folder delete.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileDelete {
- /// A file:// URI for the location of the file/folder being deleted.
- pub uri: String,
-}
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceFileOperationsClientCapabilities {
+ /// Whether the client supports dynamic registration for file
+ /// requests/notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client has support for sending didCreateFiles notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_create: Option<bool>,
+
+ /// The server is interested in receiving willCreateFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_create: Option<bool>,
+
+ /// The server is interested in receiving didRenameFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_rename: Option<bool>,
+
+ /// The server is interested in receiving willRenameFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_rename: Option<bool>,
+
+ /// The server is interested in receiving didDeleteFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_delete: Option<bool>,
+
+ /// The server is interested in receiving willDeleteFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_delete: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceFileOperationsServerCapabilities {
+ /// The server is interested in receiving didCreateFiles
+ /// notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_create: Option<FileOperationRegistrationOptions>,
+
+ /// The server is interested in receiving willCreateFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_create: Option<FileOperationRegistrationOptions>,
+
+ /// The server is interested in receiving didRenameFiles
+ /// notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_rename: Option<FileOperationRegistrationOptions>,
+
+ /// The server is interested in receiving willRenameFiles requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_rename: Option<FileOperationRegistrationOptions>,
+
+ /// The server is interested in receiving didDeleteFiles file
+ /// notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_delete: Option<FileOperationRegistrationOptions>,
+
+ /// The server is interested in receiving willDeleteFiles file
+ /// requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_delete: Option<FileOperationRegistrationOptions>,
+}
+
+/// The options to register for file operations.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileOperationRegistrationOptions {
+ /// The actual filters.
+ pub filters: Vec<FileOperationFilter>,
+}
+
+/// A filter to describe in which file operation requests or notifications
+/// the server is interested in.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileOperationFilter {
+ /// A Uri like `file` or `untitled`.
+ pub scheme: Option<String>,
+
+ /// The actual file operation pattern.
+ pub pattern: FileOperationPattern,
+}
+
+/// A pattern kind describing if a glob pattern matches a file a folder or
+/// both.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum FileOperationPatternKind {
+ /// The pattern matches a file only.
+ File,
+
+ /// The pattern matches a folder only.
+ Folder,
+}
+
+/// Matching options for the file operation pattern.
+///
+/// @since 3.16.0
+///
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileOperationPatternOptions {
+ /// The pattern should be matched ignoring casing.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub ignore_case: Option<bool>,
+}
+
+/// A pattern to describe in which file operation requests or notifications
+/// the server is interested in.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileOperationPattern {
+ /// The glob pattern to match. Glob patterns can have the following syntax:
+ /// - `*` to match one or more characters in a path segment
+ /// - `?` to match on one character in a path segment
+ /// - `**` to match any number of path segments, including none
+ /// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript
+ /// and JavaScript files)
+ /// - `[]` to declare a range of characters to match in a path segment
+ /// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
+ /// - `[!...]` to negate a range of characters to match in a path segment
+ /// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but
+ /// not `example.0`)
+ pub glob: String,
+
+ /// Whether to match files or folders with this pattern.
+ ///
+ /// Matches both if undefined.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub matches: Option<FileOperationPatternKind>,
+
+ /// Additional options used during matching.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub options: Option<FileOperationPatternOptions>,
+}
+
+/// The parameters sent in notifications/requests for user-initiated creation
+/// of files.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CreateFilesParams {
+ /// An array of all files/folders created in this operation.
+ pub files: Vec<FileCreate>,
+}
+/// Represents information on a file/folder create.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileCreate {
+ /// A file:// URI for the location of the file/folder being created.
+ pub uri: String,
+}
+
+/// The parameters sent in notifications/requests for user-initiated renames
+/// of files.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameFilesParams {
+ /// An array of all files/folders renamed in this operation. When a folder
+ /// is renamed, only the folder will be included, and not its children.
+ pub files: Vec<FileRename>,
+}
+
+/// Represents information on a file/folder rename.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileRename {
+ /// A file:// URI for the original location of the file/folder being renamed.
+ pub old_uri: String,
+
+ /// A file:// URI for the new location of the file/folder being renamed.
+ pub new_uri: String,
+}
+
+/// The parameters sent in notifications/requests for user-initiated deletes
+/// of files.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeleteFilesParams {
+ /// An array of all files/folders deleted in this operation.
+ pub files: Vec<FileDelete>,
+}
+
+/// Represents information on a file/folder delete.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileDelete {
+ /// A file:// URI for the location of the file/folder being deleted.
+ pub uri: String,
+}
diff --git a/vendor/lsp-types/src/folding_range.rs b/vendor/lsp-types/src/folding_range.rs
index 60adaaa18..d3cd01f4f 100644
--- a/vendor/lsp-types/src/folding_range.rs
+++ b/vendor/lsp-types/src/folding_range.rs
@@ -1,102 +1,144 @@
-use crate::{
- PartialResultParams, StaticTextDocumentColorProviderOptions, TextDocumentIdentifier,
- WorkDoneProgressParams,
-};
-use serde::{Deserialize, Serialize};
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FoldingRangeParams {
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum FoldingRangeProviderCapability {
- Simple(bool),
- FoldingProvider(FoldingProviderOptions),
- Options(StaticTextDocumentColorProviderOptions),
-}
-
-impl From<StaticTextDocumentColorProviderOptions> for FoldingRangeProviderCapability {
- fn from(from: StaticTextDocumentColorProviderOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<FoldingProviderOptions> for FoldingRangeProviderCapability {
- fn from(from: FoldingProviderOptions) -> Self {
- Self::FoldingProvider(from)
- }
-}
-
-impl From<bool> for FoldingRangeProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct FoldingProviderOptions {}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FoldingRangeClientCapabilities {
- /// Whether implementation supports dynamic registration for folding range providers. If this is set to `true`
- /// the client supports the new `(FoldingRangeProviderOptions & TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- /// return value for the corresponding server capability as well.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The maximum number of folding ranges that the client prefers to receive per document. The value serves as a
- /// hint, servers are free to follow the limit.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range_limit: Option<u32>,
- /// If set, the client signals that it only supports folding complete lines. If set, client will
- /// ignore specified `startCharacter` and `endCharacter` properties in a FoldingRange.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub line_folding_only: Option<bool>,
-}
-
-/// Enum of known range kinds
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "lowercase")]
-pub enum FoldingRangeKind {
- /// Folding range for a comment
- Comment,
- /// Folding range for a imports or includes
- Imports,
- /// Folding range for a region (e.g. `#region`)
- Region,
-}
-
-/// Represents a folding range.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FoldingRange {
- /// The zero-based line number from where the folded range starts.
- pub start_line: u32,
-
- /// The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub start_character: Option<u32>,
-
- /// The zero-based line number where the folded range ends.
- pub end_line: u32,
-
- /// The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub end_character: Option<u32>,
-
- /// Describes the kind of the folding range such as `comment' or 'region'. The kind
- /// is used to categorize folding ranges and used by commands like 'Fold all comments'. See
- /// [FoldingRangeKind](#FoldingRangeKind) for an enumeration of standardized kinds.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<FoldingRangeKind>,
-}
+use crate::{
+ PartialResultParams, StaticTextDocumentColorProviderOptions, TextDocumentIdentifier,
+ WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FoldingRangeParams {
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum FoldingRangeProviderCapability {
+ Simple(bool),
+ FoldingProvider(FoldingProviderOptions),
+ Options(StaticTextDocumentColorProviderOptions),
+}
+
+impl From<StaticTextDocumentColorProviderOptions> for FoldingRangeProviderCapability {
+ fn from(from: StaticTextDocumentColorProviderOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<FoldingProviderOptions> for FoldingRangeProviderCapability {
+ fn from(from: FoldingProviderOptions) -> Self {
+ Self::FoldingProvider(from)
+ }
+}
+
+impl From<bool> for FoldingRangeProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct FoldingProviderOptions {}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FoldingRangeKindCapability {
+ /// The folding range kind values the client supports. When this
+ /// property exists the client also guarantees that it will
+ /// handle values outside its set gracefully and falls back
+ /// to a default value when unknown.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub value_set: Option<Vec<FoldingRangeKind>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FoldingRangeCapability {
+ /// If set, the client signals that it supports setting collapsedText on
+ /// folding ranges to display custom labels instead of the default text.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub collapsed_text: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FoldingRangeClientCapabilities {
+ /// Whether implementation supports dynamic registration for folding range providers. If this is set to `true`
+ /// the client supports the new `(FoldingRangeProviderOptions & TextDocumentRegistrationOptions & StaticRegistrationOptions)`
+ /// return value for the corresponding server capability as well.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The maximum number of folding ranges that the client prefers to receive per document. The value serves as a
+ /// hint, servers are free to follow the limit.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range_limit: Option<u32>,
+
+ /// If set, the client signals that it only supports folding complete lines. If set, client will
+ /// ignore specified `startCharacter` and `endCharacter` properties in a FoldingRange.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub line_folding_only: Option<bool>,
+
+ /// Specific options for the folding range kind.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub folding_range_kind: Option<FoldingRangeKindCapability>,
+
+ /// Specific options for the folding range.
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub folding_range: Option<FoldingRangeCapability>,
+}
+
+/// Enum of known range kinds
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum FoldingRangeKind {
+ /// Folding range for a comment
+ Comment,
+ /// Folding range for a imports or includes
+ Imports,
+ /// Folding range for a region (e.g. `#region`)
+ Region,
+}
+
+/// Represents a folding range.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FoldingRange {
+ /// The zero-based line number from where the folded range starts.
+ pub start_line: u32,
+
+ /// The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub start_character: Option<u32>,
+
+ /// The zero-based line number where the folded range ends.
+ pub end_line: u32,
+
+ /// The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub end_character: Option<u32>,
+
+ /// Describes the kind of the folding range such as `comment' or 'region'. The kind
+ /// is used to categorize folding ranges and used by commands like 'Fold all comments'. See
+ /// [FoldingRangeKind](#FoldingRangeKind) for an enumeration of standardized kinds.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<FoldingRangeKind>,
+
+ /// The text that the client should show when the specified range is
+ /// collapsed. If not defined or not supported by the client, a default
+ /// will be chosen by the client.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub collapsed_text: Option<String>,
+}
diff --git a/vendor/lsp-types/src/formatting.rs b/vendor/lsp-types/src/formatting.rs
index 2fb5e6b63..4c9a5b45a 100644
--- a/vendor/lsp-types/src/formatting.rs
+++ b/vendor/lsp-types/src/formatting.rs
@@ -1,153 +1,153 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- DocumentSelector, DynamicRegistrationClientCapabilities, Range, TextDocumentIdentifier,
- TextDocumentPositionParams, WorkDoneProgressParams,
-};
-
-use std::collections::HashMap;
-
-pub type DocumentFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
-pub type DocumentRangeFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
-pub type DocumentOnTypeFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
-
-/// Format document on type options
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentOnTypeFormattingOptions {
- /// A character on which formatting should be triggered, like `}`.
- pub first_trigger_character: String,
-
- /// More trigger characters.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub more_trigger_character: Option<Vec<String>>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentFormattingParams {
- /// The document to format.
- pub text_document: TextDocumentIdentifier,
-
- /// The format options.
- pub options: FormattingOptions,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-/// Value-object describing what options formatting should use.
-#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FormattingOptions {
- /// Size of a tab in spaces.
- pub tab_size: u32,
-
- /// Prefer spaces over tabs.
- pub insert_spaces: bool,
-
- /// Signature for further properties.
- #[serde(flatten)]
- pub properties: HashMap<String, FormattingProperty>,
-
- /// Trim trailing whitespaces on a line.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trim_trailing_whitespace: Option<bool>,
-
- /// Insert a newline character at the end of the file if one does not exist.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub insert_final_newline: Option<bool>,
-
- /// Trim all newlines after the final newline at the end of the file.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trim_final_newlines: Option<bool>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum FormattingProperty {
- Bool(bool),
- Number(i32),
- String(String),
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentRangeFormattingParams {
- /// The document to format.
- pub text_document: TextDocumentIdentifier,
-
- /// The range to format
- pub range: Range,
-
- /// The format options
- pub options: FormattingOptions,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentOnTypeFormattingParams {
- /// Text Document and Position fields.
- #[serde(flatten)]
- pub text_document_position: TextDocumentPositionParams,
-
- /// The character that has been typed.
- pub ch: String,
-
- /// The format options.
- pub options: FormattingOptions,
-}
-
-/// Extends TextDocumentRegistrationOptions
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentOnTypeFormattingRegistrationOptions {
- /// A document selector to identify the scope of the registration. If set to null
- /// the document selector provided on the client side will be used.
- pub document_selector: Option<DocumentSelector>,
-
- /// A character on which formatting should be triggered, like `}`.
- pub first_trigger_character: String,
-
- /// More trigger characters.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub more_trigger_character: Option<Vec<String>>,
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::tests::test_serialization;
-
- #[test]
- fn formatting_options() {
- test_serialization(
- &FormattingOptions {
- tab_size: 123,
- insert_spaces: true,
- properties: HashMap::new(),
- trim_trailing_whitespace: None,
- insert_final_newline: None,
- trim_final_newlines: None,
- },
- r#"{"tabSize":123,"insertSpaces":true}"#,
- );
-
- test_serialization(
- &FormattingOptions {
- tab_size: 123,
- insert_spaces: true,
- properties: vec![("prop".to_string(), FormattingProperty::Number(1))]
- .into_iter()
- .collect(),
- trim_trailing_whitespace: None,
- insert_final_newline: None,
- trim_final_newlines: None,
- },
- r#"{"tabSize":123,"insertSpaces":true,"prop":1}"#,
- );
- }
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ DocumentSelector, DynamicRegistrationClientCapabilities, Range, TextDocumentIdentifier,
+ TextDocumentPositionParams, WorkDoneProgressParams,
+};
+
+use std::collections::HashMap;
+
+pub type DocumentFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
+pub type DocumentRangeFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
+pub type DocumentOnTypeFormattingClientCapabilities = DynamicRegistrationClientCapabilities;
+
+/// Format document on type options
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentOnTypeFormattingOptions {
+ /// A character on which formatting should be triggered, like `}`.
+ pub first_trigger_character: String,
+
+ /// More trigger characters.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub more_trigger_character: Option<Vec<String>>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentFormattingParams {
+ /// The document to format.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The format options.
+ pub options: FormattingOptions,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+/// Value-object describing what options formatting should use.
+#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FormattingOptions {
+ /// Size of a tab in spaces.
+ pub tab_size: u32,
+
+ /// Prefer spaces over tabs.
+ pub insert_spaces: bool,
+
+ /// Signature for further properties.
+ #[serde(flatten)]
+ pub properties: HashMap<String, FormattingProperty>,
+
+ /// Trim trailing whitespaces on a line.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trim_trailing_whitespace: Option<bool>,
+
+ /// Insert a newline character at the end of the file if one does not exist.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_final_newline: Option<bool>,
+
+ /// Trim all newlines after the final newline at the end of the file.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trim_final_newlines: Option<bool>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum FormattingProperty {
+ Bool(bool),
+ Number(i32),
+ String(String),
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentRangeFormattingParams {
+ /// The document to format.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The range to format
+ pub range: Range,
+
+ /// The format options
+ pub options: FormattingOptions,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentOnTypeFormattingParams {
+ /// Text Document and Position fields.
+ #[serde(flatten)]
+ pub text_document_position: TextDocumentPositionParams,
+
+ /// The character that has been typed.
+ pub ch: String,
+
+ /// The format options.
+ pub options: FormattingOptions,
+}
+
+/// Extends TextDocumentRegistrationOptions
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentOnTypeFormattingRegistrationOptions {
+ /// A document selector to identify the scope of the registration. If set to null
+ /// the document selector provided on the client side will be used.
+ pub document_selector: Option<DocumentSelector>,
+
+ /// A character on which formatting should be triggered, like `}`.
+ pub first_trigger_character: String,
+
+ /// More trigger characters.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub more_trigger_character: Option<Vec<String>>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::test_serialization;
+
+ #[test]
+ fn formatting_options() {
+ test_serialization(
+ &FormattingOptions {
+ tab_size: 123,
+ insert_spaces: true,
+ properties: HashMap::new(),
+ trim_trailing_whitespace: None,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ },
+ r#"{"tabSize":123,"insertSpaces":true}"#,
+ );
+
+ test_serialization(
+ &FormattingOptions {
+ tab_size: 123,
+ insert_spaces: true,
+ properties: vec![("prop".to_string(), FormattingProperty::Number(1))]
+ .into_iter()
+ .collect(),
+ trim_trailing_whitespace: None,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ },
+ r#"{"tabSize":123,"insertSpaces":true,"prop":1}"#,
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/hover.rs b/vendor/lsp-types/src/hover.rs
index 01bd2f8d1..4c94055e6 100644
--- a/vendor/lsp-types/src/hover.rs
+++ b/vendor/lsp-types/src/hover.rs
@@ -1,86 +1,86 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- MarkedString, MarkupContent, MarkupKind, Range, TextDocumentPositionParams,
- TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct HoverClientCapabilities {
- /// Whether completion supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Client supports the follow content formats for the content
- /// property. The order describes the preferred format of the client.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub content_format: Option<Vec<MarkupKind>>,
-}
-
-/// Hover options.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct HoverOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct HoverRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub hover_options: HoverOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum HoverProviderCapability {
- Simple(bool),
- Options(HoverOptions),
-}
-
-impl From<HoverOptions> for HoverProviderCapability {
- fn from(from: HoverOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for HoverProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct HoverParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-/// The result of a hover request.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct Hover {
- /// The hover's content
- pub contents: HoverContents,
- /// An optional range is a range inside a text document
- /// that is used to visualize a hover, e.g. by changing the background color.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range: Option<Range>,
-}
-
-/// Hover contents could be single entry or multiple entries.
-#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum HoverContents {
- Scalar(MarkedString),
- Array(Vec<MarkedString>),
- Markup(MarkupContent),
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ MarkedString, MarkupContent, MarkupKind, Range, TextDocumentPositionParams,
+ TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverClientCapabilities {
+ /// Whether completion supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Client supports the follow content formats for the content
+ /// property. The order describes the preferred format of the client.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub content_format: Option<Vec<MarkupKind>>,
+}
+
+/// Hover options.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub hover_options: HoverOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum HoverProviderCapability {
+ Simple(bool),
+ Options(HoverOptions),
+}
+
+impl From<HoverOptions> for HoverProviderCapability {
+ fn from(from: HoverOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for HoverProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+/// The result of a hover request.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct Hover {
+ /// The hover's content
+ pub contents: HoverContents,
+ /// An optional range is a range inside a text document
+ /// that is used to visualize a hover, e.g. by changing the background color.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range: Option<Range>,
+}
+
+/// Hover contents could be single entry or multiple entries.
+#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum HoverContents {
+ Scalar(MarkedString),
+ Array(Vec<MarkedString>),
+ Markup(MarkupContent),
+}
diff --git a/vendor/lsp-types/src/inlay_hint.rs b/vendor/lsp-types/src/inlay_hint.rs
index c1482c907..cdc9a0b70 100644
--- a/vendor/lsp-types/src/inlay_hint.rs
+++ b/vendor/lsp-types/src/inlay_hint.rs
@@ -1,279 +1,281 @@
-#![cfg(feature = "proposed")]
-
-use crate::{
- Command, LSPAny, Location, MarkupContent, Position, Range, StaticRegistrationOptions,
- TextDocumentIdentifier, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions,
- WorkDoneProgressParams,
-};
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum InlayHintServerCapabilities {
- Options(InlayHintOptions),
- RegistrationOptions(InlayHintRegistrationOptions),
-}
-
-/// Inlay hint client capabilities.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintClientCapabilities {
- /// Whether inlay hints support dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Indicates which properties a client can resolve lazily on a inlay
- /// hint.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_support: Option<InlayHintResolveClientCapabilities>,
-}
-
-/// Inlay hint options used during static registration.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-
- /// The server provides support to resolve additional
- /// information for an inlay hint item.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resolve_provider: Option<bool>,
-}
-
-/// Inlay hint options used during static or dynamic registration.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintRegistrationOptions {
- #[serde(flatten)]
- pub inlay_hint_options: InlayHintOptions,
-
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub static_registration_options: StaticRegistrationOptions,
-}
-
-/// A parameter literal used in inlay hint requests.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintParams {
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The visible document range for which inlay hints should be computed.
- pub range: Range,
-}
-
-/// Inlay hint information.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHint {
- /// The position of this hint.
- pub position: Position,
-
- /// The label of this hint. A human readable string or an array of
- /// InlayHintLabelPart label parts.
- ///
- /// *Note* that neither the string nor the label part can be empty.
- pub label: InlayHintLabel,
-
- /// The kind of this hint. Can be omitted in which case the client
- /// should fall back to a reasonable default.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<InlayHintKind>,
-
- /// Optional text edits that are performed when accepting this inlay hint.
- ///
- /// *Note* that edits are expected to change the document so that the inlay
- /// hint (or its nearest variant) is now part of the document and the inlay
- /// hint itself is now obsolete.
- ///
- /// Depending on the client capability `inlayHint.resolveSupport` clients
- /// might resolve this property late using the resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text_edits: Option<Vec<TextEdit>>,
-
- /// The tooltip text when you hover over this item.
- ///
- /// Depending on the client capability `inlayHint.resolveSupport` clients
- /// might resolve this property late using the resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tooltip: Option<InlayHintTooltip>,
-
- /// Render padding before the hint.
- ///
- /// Note: Padding should use the editor's background color, not the
- /// background color of the hint itself. That means padding can be used
- /// to visually align/separate an inlay hint.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub padding_left: Option<bool>,
-
- /// Render padding after the hint.
- ///
- /// Note: Padding should use the editor's background color, not the
- /// background color of the hint itself. That means padding can be used
- /// to visually align/separate an inlay hint.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub padding_right: Option<bool>,
-
- /// A data entry field that is preserved on a inlay hint between
- /// a `textDocument/inlayHint` and a `inlayHint/resolve` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<LSPAny>,
-}
-
-#[derive(Debug, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum InlayHintLabel {
- String(String),
- LabelParts(Vec<InlayHintLabelPart>),
-}
-
-impl From<String> for InlayHintLabel {
- #[inline]
- fn from(from: String) -> Self {
- Self::String(from)
- }
-}
-
-impl From<Vec<InlayHintLabelPart>> for InlayHintLabel {
- #[inline]
- fn from(from: Vec<InlayHintLabelPart>) -> Self {
- Self::LabelParts(from)
- }
-}
-
-#[derive(Debug, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum InlayHintTooltip {
- String(String),
- MarkupContent(MarkupContent),
-}
-
-impl From<String> for InlayHintTooltip {
- #[inline]
- fn from(from: String) -> Self {
- Self::String(from)
- }
-}
-
-impl From<MarkupContent> for InlayHintTooltip {
- #[inline]
- fn from(from: MarkupContent) -> Self {
- Self::MarkupContent(from)
- }
-}
-
-/// An inlay hint label part allows for interactive and composite labels
-/// of inlay hints.
-#[derive(Debug, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintLabelPart {
- /// The value of this label part.
- pub value: String,
-
- /// The tooltip text when you hover over this label part. Depending on
- /// the client capability `inlayHint.resolveSupport` clients might resolve
- /// this property late using the resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tooltip: Option<InlayHintLabelPartTooltip>,
-
- /// An optional source code location that represents this
- /// label part.
- ///
- /// The editor will use this location for the hover and for code navigation
- /// features: This part will become a clickable link that resolves to the
- /// definition of the symbol at the given location (not necessarily the
- /// location itself), it shows the hover that shows at the given location,
- /// and it shows a context menu with further code navigation commands.
- ///
- /// Depending on the client capability `inlayHint.resolveSupport` clients
- /// might resolve this property late using the resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub location: Option<Location>,
-
- /// An optional command for this label part.
- ///
- /// Depending on the client capability `inlayHint.resolveSupport` clients
- /// might resolve this property late using the resolve request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub command: Option<Command>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum InlayHintLabelPartTooltip {
- String(String),
- MarkupContent(MarkupContent),
-}
-
-impl From<String> for InlayHintLabelPartTooltip {
- #[inline]
- fn from(from: String) -> Self {
- Self::String(from)
- }
-}
-
-impl From<MarkupContent> for InlayHintLabelPartTooltip {
- #[inline]
- fn from(from: MarkupContent) -> Self {
- Self::MarkupContent(from)
- }
-}
-
-/// Inlay hint kinds.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct InlayHintKind(i32);
-lsp_enum! {
-impl InlayHintKind {
- /// An inlay hint that for a type annotation.
- pub const TYPE: InlayHintKind = InlayHintKind(1);
-
- /// An inlay hint that is for a parameter.
- pub const PARAMETER: InlayHintKind = InlayHintKind(2);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintResolveClientCapabilities {
- pub properties: Vec<String>,
-}
-
-/// Client workspace capabilities specific to inlay hints.
-///
-/// @since 3.17.0 - proposed state
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintWorkspaceClientCapabilities {
- /// Whether the client implementation supports a refresh request sent from
- /// the server to the client.
- ///
- /// Note that this event is global and will force the client to refresh all
- /// inlay hints currently shown. It should be used with absolute care and
- /// is useful for situation where a server for example detects a project wide
- /// change that requires such a calculation.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub refresh_support: Option<bool>,
-}
-
-// TODO(sno2): add tests once stabilized
+use crate::{
+ Command, LSPAny, Location, MarkupContent, Position, Range, StaticRegistrationOptions,
+ TextDocumentIdentifier, TextDocumentRegistrationOptions, TextEdit, WorkDoneProgressOptions,
+ WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum InlayHintServerCapabilities {
+ Options(InlayHintOptions),
+ RegistrationOptions(InlayHintRegistrationOptions),
+}
+
+/// Inlay hint client capabilities.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintClientCapabilities {
+ /// Whether inlay hints support dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Indicates which properties a client can resolve lazily on a inlay
+ /// hint.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_support: Option<InlayHintResolveClientCapabilities>,
+}
+
+/// Inlay hint options used during static registration.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+
+ /// The server provides support to resolve additional
+ /// information for an inlay hint item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+}
+
+/// Inlay hint options used during static or dynamic registration.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintRegistrationOptions {
+ #[serde(flatten)]
+ pub inlay_hint_options: InlayHintOptions,
+
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+/// A parameter literal used in inlay hint requests.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintParams {
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The visible document range for which inlay hints should be computed.
+ pub range: Range,
+}
+
+/// Inlay hint information.
+///
+/// @since 3.17.0
+#[derive(Debug, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHint {
+ /// The position of this hint.
+ pub position: Position,
+
+ /// The label of this hint. A human readable string or an array of
+ /// InlayHintLabelPart label parts.
+ ///
+ /// *Note* that neither the string nor the label part can be empty.
+ pub label: InlayHintLabel,
+
+ /// The kind of this hint. Can be omitted in which case the client
+ /// should fall back to a reasonable default.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<InlayHintKind>,
+
+ /// Optional text edits that are performed when accepting this inlay hint.
+ ///
+ /// *Note* that edits are expected to change the document so that the inlay
+ /// hint (or its nearest variant) is now part of the document and the inlay
+ /// hint itself is now obsolete.
+ ///
+ /// Depending on the client capability `inlayHint.resolveSupport` clients
+ /// might resolve this property late using the resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text_edits: Option<Vec<TextEdit>>,
+
+ /// The tooltip text when you hover over this item.
+ ///
+ /// Depending on the client capability `inlayHint.resolveSupport` clients
+ /// might resolve this property late using the resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip: Option<InlayHintTooltip>,
+
+ /// Render padding before the hint.
+ ///
+ /// Note: Padding should use the editor's background color, not the
+ /// background color of the hint itself. That means padding can be used
+ /// to visually align/separate an inlay hint.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub padding_left: Option<bool>,
+
+ /// Render padding after the hint.
+ ///
+ /// Note: Padding should use the editor's background color, not the
+ /// background color of the hint itself. That means padding can be used
+ /// to visually align/separate an inlay hint.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub padding_right: Option<bool>,
+
+ /// A data entry field that is preserved on a inlay hint between
+ /// a `textDocument/inlayHint` and a `inlayHint/resolve` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<LSPAny>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum InlayHintLabel {
+ String(String),
+ LabelParts(Vec<InlayHintLabelPart>),
+}
+
+impl From<String> for InlayHintLabel {
+ #[inline]
+ fn from(from: String) -> Self {
+ Self::String(from)
+ }
+}
+
+impl From<Vec<InlayHintLabelPart>> for InlayHintLabel {
+ #[inline]
+ fn from(from: Vec<InlayHintLabelPart>) -> Self {
+ Self::LabelParts(from)
+ }
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum InlayHintTooltip {
+ String(String),
+ MarkupContent(MarkupContent),
+}
+
+impl From<String> for InlayHintTooltip {
+ #[inline]
+ fn from(from: String) -> Self {
+ Self::String(from)
+ }
+}
+
+impl From<MarkupContent> for InlayHintTooltip {
+ #[inline]
+ fn from(from: MarkupContent) -> Self {
+ Self::MarkupContent(from)
+ }
+}
+
+/// An inlay hint label part allows for interactive and composite labels
+/// of inlay hints.
+#[derive(Debug, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintLabelPart {
+ /// The value of this label part.
+ pub value: String,
+
+ /// The tooltip text when you hover over this label part. Depending on
+ /// the client capability `inlayHint.resolveSupport` clients might resolve
+ /// this property late using the resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip: Option<InlayHintLabelPartTooltip>,
+
+ /// An optional source code location that represents this
+ /// label part.
+ ///
+ /// The editor will use this location for the hover and for code navigation
+ /// features: This part will become a clickable link that resolves to the
+ /// definition of the symbol at the given location (not necessarily the
+ /// location itself), it shows the hover that shows at the given location,
+ /// and it shows a context menu with further code navigation commands.
+ ///
+ /// Depending on the client capability `inlayHint.resolveSupport` clients
+ /// might resolve this property late using the resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub location: Option<Location>,
+
+ /// An optional command for this label part.
+ ///
+ /// Depending on the client capability `inlayHint.resolveSupport` clients
+ /// might resolve this property late using the resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<Command>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum InlayHintLabelPartTooltip {
+ String(String),
+ MarkupContent(MarkupContent),
+}
+
+impl From<String> for InlayHintLabelPartTooltip {
+ #[inline]
+ fn from(from: String) -> Self {
+ Self::String(from)
+ }
+}
+
+impl From<MarkupContent> for InlayHintLabelPartTooltip {
+ #[inline]
+ fn from(from: MarkupContent) -> Self {
+ Self::MarkupContent(from)
+ }
+}
+
+/// Inlay hint kinds.
+///
+/// @since 3.17.0
+#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct InlayHintKind(i32);
+lsp_enum! {
+impl InlayHintKind {
+ /// An inlay hint that for a type annotation.
+ pub const TYPE: InlayHintKind = InlayHintKind(1);
+
+ /// An inlay hint that is for a parameter.
+ pub const PARAMETER: InlayHintKind = InlayHintKind(2);
+}
+}
+
+/// Inlay hint client capabilities.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintResolveClientCapabilities {
+ /// The properties that a client can resolve lazily.
+ pub properties: Vec<String>,
+}
+
+/// Client workspace capabilities specific to inlay hints.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintWorkspaceClientCapabilities {
+ /// Whether the client implementation supports a refresh request sent from
+ /// the server to the client.
+ ///
+ /// Note that this event is global and will force the client to refresh all
+ /// inlay hints currently shown. It should be used with absolute care and
+ /// is useful for situation where a server for example detects a project wide
+ /// change that requires such a calculation.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub refresh_support: Option<bool>,
+}
+
+// TODO(sno2): add tests once stabilized
diff --git a/vendor/lsp-types/src/inline_value.rs b/vendor/lsp-types/src/inline_value.rs
new file mode 100644
index 000000000..35dae188f
--- /dev/null
+++ b/vendor/lsp-types/src/inline_value.rs
@@ -0,0 +1,217 @@
+use crate::{
+ DynamicRegistrationClientCapabilities, Range, StaticRegistrationOptions,
+ TextDocumentIdentifier, TextDocumentRegistrationOptions, WorkDoneProgressOptions,
+ WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+pub type InlineValueClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum InlineValueServerCapabilities {
+ Options(InlineValueOptions),
+ RegistrationOptions(InlineValueRegistrationOptions),
+}
+
+/// Inline value options used during static registration.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct InlineValueOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+/// Inline value options used during static or dynamic registration.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct InlineValueRegistrationOptions {
+ #[serde(flatten)]
+ pub inline_value_options: InlineValueOptions,
+
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+/// A parameter literal used in inline value requests.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlineValueParams {
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The document range for which inline values should be computed.
+ pub range: Range,
+
+ /// Additional information about the context in which inline values were
+ /// requested.
+ pub context: InlineValueContext,
+}
+
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlineValueContext {
+ /// The stack frame (as a DAP Id) where the execution has stopped.
+ pub frame_id: i32,
+
+ /// The document range where execution has stopped.
+ /// Typically the end position of the range denotes the line where the
+ /// inline values are shown.
+ pub stopped_location: Range,
+}
+
+/// Provide inline value as text.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct InlineValueText {
+ /// The document range for which the inline value applies.
+ pub range: Range,
+
+ /// The text of the inline value.
+ pub text: String,
+}
+
+/// Provide inline value through a variable lookup.
+///
+/// If only a range is specified, the variable name will be extracted from
+/// the underlying document.
+///
+/// An optional variable name can be used to override the extracted name.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlineValueVariableLookup {
+ /// The document range for which the inline value applies.
+ /// The range is used to extract the variable name from the underlying
+ /// document.
+ pub range: Range,
+
+ /// If specified the name of the variable to look up.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub variable_name: Option<String>,
+
+ /// How to perform the lookup.
+ pub case_sensitive_lookup: bool,
+}
+
+/// Provide an inline value through an expression evaluation.
+///
+/// If only a range is specified, the expression will be extracted from the
+/// underlying document.
+///
+/// An optional expression can be used to override the extracted expression.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InlineValueEvaluatableExpression {
+ /// The document range for which the inline value applies.
+ /// The range is used to extract the evaluatable expression from the
+ /// underlying document.
+ pub range: Range,
+
+ /// If specified the expression overrides the extracted expression.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub expression: Option<String>,
+}
+
+/// Inline value information can be provided by different means:
+/// - directly as a text value (class InlineValueText).
+/// - as a name to use for a variable lookup (class InlineValueVariableLookup)
+/// - as an evaluatable expression (class InlineValueEvaluatableExpression)
+/// The InlineValue types combines all inline value types into one type.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum InlineValue {
+ Text(InlineValueText),
+ VariableLookup(InlineValueVariableLookup),
+ EvaluatableExpression(InlineValueEvaluatableExpression),
+}
+
+impl From<InlineValueText> for InlineValue {
+ #[inline]
+ fn from(from: InlineValueText) -> Self {
+ Self::Text(from)
+ }
+}
+
+impl From<InlineValueVariableLookup> for InlineValue {
+ #[inline]
+ fn from(from: InlineValueVariableLookup) -> Self {
+ Self::VariableLookup(from)
+ }
+}
+
+impl From<InlineValueEvaluatableExpression> for InlineValue {
+ #[inline]
+ fn from(from: InlineValueEvaluatableExpression) -> Self {
+ Self::EvaluatableExpression(from)
+ }
+}
+
+/// Client workspace capabilities specific to inline values.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+///
+/// @since 3.17.0
+#[serde(rename_all = "camelCase")]
+pub struct InlineValueWorkspaceClientCapabilities {
+ /// Whether the client implementation supports a refresh request sent from
+ /// the server to the client.
+ ///
+ /// Note that this event is global and will force the client to refresh all
+ /// inline values currently shown. It should be used with absolute care and
+ /// is useful for situation where a server for example detect a project wide
+ /// change that requires such a calculation.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub refresh_support: Option<bool>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::test_serialization;
+ use crate::Position;
+
+ #[test]
+ fn inline_values() {
+ test_serialization(
+ &InlineValueText {
+ range: Range::new(Position::new(0, 0), Position::new(0, 4)),
+ text: "one".to_owned(),
+ },
+ r#"{"range":{"start":{"line":0,"character":0},"end":{"line":0,"character":4}},"text":"one"}"#,
+ );
+
+ test_serialization(
+ &InlineValue::VariableLookup(InlineValueVariableLookup {
+ range: Range::new(Position::new(1, 0), Position::new(1, 4)),
+ variable_name: None,
+ case_sensitive_lookup: false,
+ }),
+ r#"{"range":{"start":{"line":1,"character":0},"end":{"line":1,"character":4}},"caseSensitiveLookup":false}"#,
+ );
+
+ test_serialization(
+ &InlineValue::EvaluatableExpression(InlineValueEvaluatableExpression {
+ range: Range::new(Position::new(2, 0), Position::new(2, 4)),
+ expression: None,
+ }),
+ r#"{"range":{"start":{"line":2,"character":0},"end":{"line":2,"character":4}}}"#,
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/lib.rs b/vendor/lsp-types/src/lib.rs
index 7469e67a6..027e9ef89 100644
--- a/vendor/lsp-types/src/lib.rs
+++ b/vendor/lsp-types/src/lib.rs
@@ -1,2692 +1,2798 @@
-/*!
-
-Language Server Protocol types for Rust.
-
-Based on: <https://microsoft.github.io/language-server-protocol/specification>
-
-This library uses the URL crate for parsing URIs. Note that there is
-some confusion on the meaning of URLs vs URIs:
-<http://stackoverflow.com/a/28865728/393898>. According to that
-information, on the classical sense of "URLs", "URLs" are a subset of
-URIs, But on the modern/new meaning of URLs, they are the same as
-URIs. The important take-away aspect is that the URL crate should be
-able to parse any URI, such as `urn:isbn:0451450523`.
-
-
-*/
-#![allow(non_upper_case_globals)]
-#[forbid(unsafe_code)]
-#[macro_use]
-extern crate bitflags;
-
-use std::{collections::HashMap, fmt::Debug};
-
-use serde::{de, de::Error as Error_, Deserialize, Serialize};
-use serde_json::Value;
-pub use url::Url;
-
-// Large enough to contain any enumeration name defined in this crate
-type PascalCaseBuf = [u8; 32];
-const fn fmt_pascal_case_const(name: &str) -> (PascalCaseBuf, usize) {
- let mut buf = [0; 32];
- let mut buf_i = 0;
- let mut name_i = 0;
- let name = name.as_bytes();
- while name_i < name.len() {
- let first = name[name_i];
- name_i += 1;
-
- buf[buf_i] = first;
- buf_i += 1;
-
- while name_i < name.len() {
- let rest = name[name_i];
- name_i += 1;
- if rest == b'_' {
- break;
- }
-
- buf[buf_i] = rest.to_ascii_lowercase();
- buf_i += 1;
- }
- }
- (buf, buf_i)
-}
-
-fn fmt_pascal_case(f: &mut std::fmt::Formatter<'_>, name: &str) -> std::fmt::Result {
- for word in name.split('_') {
- let mut chars = word.chars();
- let first = chars.next().unwrap();
- write!(f, "{}", first)?;
- for rest in chars {
- write!(f, "{}", rest.to_lowercase())?;
- }
- }
- Ok(())
-}
-
-macro_rules! lsp_enum {
- (impl $typ: ident { $( $(#[$attr:meta])* pub const $name: ident : $enum_type: ty = $value: expr; )* }) => {
- impl $typ {
- $(
- $(#[$attr])*
- pub const $name: $enum_type = $value;
- )*
- }
-
- impl std::fmt::Debug for $typ {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match *self {
- $(
- Self::$name => crate::fmt_pascal_case(f, stringify!($name)),
- )*
- _ => write!(f, "{}({})", stringify!($typ), self.0),
- }
- }
- }
-
- impl std::convert::TryFrom<&str> for $typ {
- type Error = &'static str;
- fn try_from(value: &str) -> Result<Self, Self::Error> {
- match () {
- $(
- _ if {
- const X: (crate::PascalCaseBuf, usize) = crate::fmt_pascal_case_const(stringify!($name));
- let (buf, len) = X;
- &buf[..len] == value.as_bytes()
- } => Ok(Self::$name),
- )*
- _ => Err("unknown enum variant"),
- }
- }
- }
-
- }
-}
-
-pub mod error_codes;
-pub mod notification;
-pub mod request;
-
-mod call_hierarchy;
-pub use call_hierarchy::*;
-
-mod code_action;
-pub use code_action::*;
-
-mod code_lens;
-pub use code_lens::*;
-
-mod color;
-pub use color::*;
-
-mod completion;
-pub use completion::*;
-
-mod document_highlight;
-pub use document_highlight::*;
-
-mod document_link;
-pub use document_link::*;
-
-mod document_symbols;
-pub use document_symbols::*;
-
-mod file_operations;
-pub use file_operations::*;
-
-mod folding_range;
-pub use folding_range::*;
-
-mod formatting;
-pub use formatting::*;
-
-mod hover;
-pub use hover::*;
-
-#[cfg(feature = "proposed")]
-mod inlay_hint;
-#[cfg(feature = "proposed")]
-pub use inlay_hint::*;
-
-mod moniker;
-pub use moniker::*;
-
-mod progress;
-pub use progress::*;
-
-mod references;
-pub use references::*;
-
-mod rename;
-pub use rename::*;
-
-pub mod selection_range;
-pub use selection_range::*;
-
-mod semantic_tokens;
-pub use semantic_tokens::*;
-
-mod signature_help;
-pub use signature_help::*;
-
-mod linked_editing;
-pub use linked_editing::*;
-
-mod window;
-pub use window::*;
-
-mod workspace_folders;
-pub use workspace_folders::*;
-
-mod workspace_symbols;
-pub use workspace_symbols::*;
-
-pub mod lsif;
-
-mod trace;
-pub use trace::*;
-
-/* ----------------- Auxiliary types ----------------- */
-
-#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum NumberOrString {
- Number(i32),
- String(String),
-}
-
-/* ----------------- Cancel support ----------------- */
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct CancelParams {
- /// The request id to cancel.
- pub id: NumberOrString,
-}
-
-/* ----------------- Basic JSON Structures ----------------- */
-
-/// The LSP any type
-///
-/// @since 3.17.0
-#[cfg(feature = "proposed")]
-pub type LSPAny = serde_json::Value;
-
-/// LSP object definition.
-///
-/// @since 3.17.0
-#[cfg(feature = "proposed")]
-pub type LSPObject = serde_json::Map<String, serde_json::Value>;
-
-/// LSP arrays.
-///
-/// @since 3.17.0
-#[cfg(feature = "proposed")]
-pub type LSPArray = Vec<serde_json::Value>;
-
-/// Position in a text document expressed as zero-based line and character offset.
-/// A position is between two characters like an 'insert' cursor in a editor.
-#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default, Deserialize, Serialize)]
-pub struct Position {
- /// Line position in a document (zero-based).
- pub line: u32,
- /// Character offset on a line in a document (zero-based). The meaning of this
- /// offset is determined by the negotiated `PositionEncodingKind`.
- ///
- /// If the character value is greater than the line length it defaults back
- /// to the line length.
- pub character: u32,
-}
-
-impl Position {
- pub fn new(line: u32, character: u32) -> Position {
- Position { line, character }
- }
-}
-
-/// A range in a text document expressed as (zero-based) start and end positions.
-/// A range is comparable to a selection in an editor. Therefore the end position is exclusive.
-#[derive(Debug, Eq, PartialEq, Copy, Clone, Default, Deserialize, Serialize)]
-pub struct Range {
- /// The range's start position.
- pub start: Position,
- /// The range's end position.
- pub end: Position,
-}
-
-impl Range {
- pub fn new(start: Position, end: Position) -> Range {
- Range { start, end }
- }
-}
-
-/// Represents a location inside a resource, such as a line inside a text file.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct Location {
- pub uri: Url,
- pub range: Range,
-}
-
-impl Location {
- pub fn new(uri: Url, range: Range) -> Location {
- Location { uri, range }
- }
-}
-
-/// Represents a link between a source and a target location.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LocationLink {
- /// Span of the origin of this link.
- ///
- /// Used as the underlined span for mouse interaction. Defaults to the word range at
- /// the mouse position.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub origin_selection_range: Option<Range>,
-
- /// The target resource identifier of this link.
- pub target_uri: Url,
-
- /// The full target range of this link.
- pub target_range: Range,
-
- /// The span of this link.
- pub target_selection_range: Range,
-}
-
-/// A type indicating how positions are encoded,
-/// specifically what column offsets mean.
-///
-/// @since 3.17.0
-#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
-#[cfg(feature = "proposed")]
-pub struct PositionEncodingKind(std::borrow::Cow<'static, str>);
-
-#[cfg(feature = "proposed")]
-impl PositionEncodingKind {
- /// Character offsets count UTF-8 code units.
- pub const UTF8: PositionEncodingKind = PositionEncodingKind::new("utf-8");
-
- /// Character offsets count UTF-16 code units.
- ///
- /// This is the default and must always be supported
- /// by servers
- pub const UTF16: PositionEncodingKind = PositionEncodingKind::new("utf-16");
-
- /// Character offsets count UTF-32 code units.
- ///
- /// Implementation note: these are the same as Unicode code points,
- /// so this `PositionEncodingKind` may also be used for an
- /// encoding-agnostic representation of character offsets.
- pub const UTF32: PositionEncodingKind = PositionEncodingKind::new("utf-32");
-
- pub const fn new(tag: &'static str) -> Self {
- PositionEncodingKind(std::borrow::Cow::Borrowed(tag))
- }
-
- pub fn as_str(&self) -> &str {
- &self.0
- }
-}
-
-#[cfg(feature = "proposed")]
-impl From<String> for PositionEncodingKind {
- fn from(from: String) -> Self {
- PositionEncodingKind(std::borrow::Cow::from(from))
- }
-}
-
-#[cfg(feature = "proposed")]
-impl From<&'static str> for PositionEncodingKind {
- fn from(from: &'static str) -> Self {
- PositionEncodingKind::new(from)
- }
-}
-
-/// Represents a diagnostic, such as a compiler error or warning.
-/// Diagnostic objects are only valid in the scope of a resource.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Diagnostic {
- /// The range at which the message applies.
- pub range: Range,
-
- /// The diagnostic's severity. Can be omitted. If omitted it is up to the
- /// client to interpret diagnostics as error, warning, info or hint.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub severity: Option<DiagnosticSeverity>,
-
- /// The diagnostic's code. Can be omitted.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code: Option<NumberOrString>,
-
- /// An optional property to describe the error code.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_description: Option<CodeDescription>,
-
- /// A human-readable string describing the source of this
- /// diagnostic, e.g. 'typescript' or 'super lint'.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub source: Option<String>,
-
- /// The diagnostic's message.
- pub message: String,
-
- /// An array of related diagnostic information, e.g. when symbol-names within
- /// a scope collide all definitions can be marked via this property.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub related_information: Option<Vec<DiagnosticRelatedInformation>>,
-
- /// Additional metadata about the diagnostic.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tags: Option<Vec<DiagnosticTag>>,
-
- /// A data entry field that is preserved between a `textDocument/publishDiagnostics`
- /// notification and `textDocument/codeAction` request.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data: Option<serde_json::Value>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CodeDescription {
- pub href: Url,
-}
-
-impl Diagnostic {
- pub fn new(
- range: Range,
- severity: Option<DiagnosticSeverity>,
- code: Option<NumberOrString>,
- source: Option<String>,
- message: String,
- related_information: Option<Vec<DiagnosticRelatedInformation>>,
- tags: Option<Vec<DiagnosticTag>>,
- ) -> Diagnostic {
- Diagnostic {
- range,
- severity,
- code,
- source,
- message,
- related_information,
- tags,
- ..Diagnostic::default()
- }
- }
-
- pub fn new_simple(range: Range, message: String) -> Diagnostic {
- Self::new(range, None, None, None, message, None, None)
- }
-
- pub fn new_with_code_number(
- range: Range,
- severity: DiagnosticSeverity,
- code_number: i32,
- source: Option<String>,
- message: String,
- ) -> Diagnostic {
- let code = Some(NumberOrString::Number(code_number));
- Self::new(range, Some(severity), code, source, message, None, None)
- }
-}
-
-/// The protocol currently supports the following diagnostic severities:
-#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct DiagnosticSeverity(i32);
-lsp_enum! {
-impl DiagnosticSeverity {
- /// Reports an error.
- pub const ERROR: DiagnosticSeverity = DiagnosticSeverity(1);
- /// Reports a warning.
- pub const WARNING: DiagnosticSeverity = DiagnosticSeverity(2);
- /// Reports an information.
- pub const INFORMATION: DiagnosticSeverity = DiagnosticSeverity(3);
- /// Reports a hint.
- pub const HINT: DiagnosticSeverity = DiagnosticSeverity(4);
-}
-}
-
-/// Represents a related message and source code location for a diagnostic. This
-/// should be used to point to code locations that cause or related to a
-/// diagnostics, e.g when duplicating a symbol in a scope.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DiagnosticRelatedInformation {
- /// The location of this related diagnostic information.
- pub location: Location,
-
- /// The message of this related diagnostic information.
- pub message: String,
-}
-
-/// The diagnostic tags.
-#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct DiagnosticTag(i32);
-lsp_enum! {
-impl DiagnosticTag {
- /// Unused or unnecessary code.
- /// Clients are allowed to render diagnostics with this tag faded out instead of having
- /// an error squiggle.
- pub const UNNECESSARY: DiagnosticTag = DiagnosticTag(1);
-
- /// Deprecated or obsolete code.
- /// Clients are allowed to rendered diagnostics with this tag strike through.
- pub const DEPRECATED: DiagnosticTag = DiagnosticTag(2);
-}
-}
-
-/// Represents a reference to a command. Provides a title which will be used to represent a command in the UI.
-/// Commands are identitifed using a string identifier and the protocol currently doesn't specify a set of
-/// well known commands. So executing a command requires some tool extension code.
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct Command {
- /// Title of the command, like `save`.
- pub title: String,
- /// The identifier of the actual command handler.
- pub command: String,
- /// Arguments that the command handler should be
- /// invoked with.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub arguments: Option<Vec<Value>>,
-}
-
-impl Command {
- pub fn new(title: String, command: String, arguments: Option<Vec<Value>>) -> Command {
- Command {
- title,
- command,
- arguments,
- }
- }
-}
-
-/// A textual edit applicable to a text document.
-///
-/// If n `TextEdit`s are applied to a text document all text edits describe changes to the initial document version.
-/// Execution wise text edits should applied from the bottom to the top of the text document. Overlapping text edits
-/// are not supported.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextEdit {
- /// The range of the text document to be manipulated. To insert
- /// text into a document create a range where start === end.
- pub range: Range,
- /// The string to be inserted. For delete operations use an
- /// empty string.
- pub new_text: String,
-}
-
-impl TextEdit {
- pub fn new(range: Range, new_text: String) -> TextEdit {
- TextEdit { range, new_text }
- }
-}
-
-/// An identifier referring to a change annotation managed by a workspace
-/// edit.
-///
-/// @since 3.16.0.
-pub type ChangeAnnotationIdentifier = String;
-
-/// A special text edit with an additional change annotation.
-///
-/// @since 3.16.0.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct AnnotatedTextEdit {
- #[serde(flatten)]
- pub text_edit: TextEdit,
-
- /// The actual annotation
- pub annotation_id: ChangeAnnotationIdentifier,
-}
-
-/// Describes textual changes on a single text document. The text document is referred to as a
-/// `OptionalVersionedTextDocumentIdentifier` to allow clients to check the text document version before an
-/// edit is applied. A `TextDocumentEdit` describes all changes on a version Si and after they are
-/// applied move the document to version Si+1. So the creator of a `TextDocumentEdit` doesn't need to
-/// sort the array or do any kind of ordering. However the edits must be non overlapping.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentEdit {
- /// The text document to change.
- pub text_document: OptionalVersionedTextDocumentIdentifier,
-
- /// The edits to be applied.
- ///
- /// @since 3.16.0 - support for AnnotatedTextEdit. This is guarded by the
- /// client capability `workspace.workspaceEdit.changeAnnotationSupport`
- pub edits: Vec<OneOf<TextEdit, AnnotatedTextEdit>>,
-}
-
-/// Additional information that describes document changes.
-///
-/// @since 3.16.0.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ChangeAnnotation {
- /// A human-readable string describing the actual change. The string
- /// is rendered prominent in the user interface.
- pub label: String,
-
- /// A flag which indicates that user confirmation is needed
- /// before applying the change.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub needs_confirmation: Option<bool>,
-
- /// A human-readable string which is rendered less prominent in
- /// the user interface.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub description: Option<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ChangeAnnotationWorkspaceEditClientCapabilities {
- /// Whether the client groups edits with equal labels into tree nodes,
- /// for instance all edits labelled with "Changes in Strings" would
- /// be a tree node.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub groups_on_label: Option<bool>,
-}
-
-/// Options to create a file.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CreateFileOptions {
- /// Overwrite existing file. Overwrite wins over `ignoreIfExists`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub overwrite: Option<bool>,
- /// Ignore if exists.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ignore_if_exists: Option<bool>,
-}
-
-/// Create file operation
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct CreateFile {
- /// The resource to create.
- pub uri: Url,
- /// Additional options
- #[serde(skip_serializing_if = "Option::is_none")]
- pub options: Option<CreateFileOptions>,
-
- /// An optional annotation identifer describing the operation.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub annotation_id: Option<ChangeAnnotationIdentifier>,
-}
-
-/// Rename file options
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameFileOptions {
- /// Overwrite target if existing. Overwrite wins over `ignoreIfExists`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub overwrite: Option<bool>,
- /// Ignores if target exists.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ignore_if_exists: Option<bool>,
-}
-
-/// Rename file operation
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameFile {
- /// The old (existing) location.
- pub old_uri: Url,
- /// The new location.
- pub new_uri: Url,
- /// Rename options.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub options: Option<RenameFileOptions>,
-
- /// An optional annotation identifer describing the operation.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub annotation_id: Option<ChangeAnnotationIdentifier>,
-}
-
-/// Delete file options
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeleteFileOptions {
- /// Delete the content recursively if a folder is denoted.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub recursive: Option<bool>,
- /// Ignore the operation if the file doesn't exist.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ignore_if_not_exists: Option<bool>,
-
- /// An optional annotation identifer describing the operation.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub annotation_id: Option<ChangeAnnotationIdentifier>,
-}
-
-/// Delete file operation
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeleteFile {
- /// The file to delete.
- pub uri: Url,
- /// Delete options.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub options: Option<DeleteFileOptions>,
-}
-
-/// A workspace edit represents changes to many resources managed in the workspace.
-/// The edit should either provide `changes` or `documentChanges`.
-/// If the client can handle versioned document edits and if `documentChanges` are present,
-/// the latter are preferred over `changes`.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceEdit {
- /// Holds changes to existing resources.
- #[serde(with = "url_map")]
- #[serde(skip_serializing_if = "Option::is_none")]
- #[serde(default)]
- pub changes: Option<HashMap<Url, Vec<TextEdit>>>, // changes?: { [uri: string]: TextEdit[]; };
-
- /// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
- /// are either an array of `TextDocumentEdit`s to express changes to n different text documents
- /// where each text document edit addresses a specific version of a text document. Or it can contain
- /// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.
- ///
- /// Whether a client supports versioned document edits is expressed via
- /// `workspace.workspaceEdit.documentChanges` client capability.
- ///
- /// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then
- /// only plain `TextEdit`s using the `changes` property are supported.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_changes: Option<DocumentChanges>,
-
- /// A map of change annotations that can be referenced in
- /// `AnnotatedTextEdit`s or create, rename and delete file / folder
- /// operations.
- ///
- /// Whether clients honor this property depends on the client capability
- /// `workspace.changeAnnotationSupport`.
- ///
- /// @since 3.16.0
- ///
- #[serde(skip_serializing_if = "Option::is_none")]
- pub change_annotations: Option<HashMap<ChangeAnnotationIdentifier, ChangeAnnotation>>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum DocumentChanges {
- Edits(Vec<TextDocumentEdit>),
- Operations(Vec<DocumentChangeOperation>),
-}
-
-// TODO: Once https://github.com/serde-rs/serde/issues/912 is solved
-// we can remove ResourceOp and switch to the following implementation
-// of DocumentChangeOperation:
-//
-// #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-// #[serde(tag = "kind", rename_all="lowercase" )]
-// pub enum DocumentChangeOperation {
-// Create(CreateFile),
-// Rename(RenameFile),
-// Delete(DeleteFile),
-//
-// #[serde(other)]
-// Edit(TextDocumentEdit),
-// }
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged, rename_all = "lowercase")]
-pub enum DocumentChangeOperation {
- Op(ResourceOp),
- Edit(TextDocumentEdit),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(tag = "kind", rename_all = "lowercase")]
-pub enum ResourceOp {
- Create(CreateFile),
- Rename(RenameFile),
- Delete(DeleteFile),
-}
-
-pub type DidChangeConfigurationClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ConfigurationParams {
- pub items: Vec<ConfigurationItem>,
-}
-
-#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ConfigurationItem {
- /// The scope to get the configuration section for.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub scope_uri: Option<Url>,
-
- ///The configuration section asked for.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub section: Option<String>,
-}
-
-mod url_map {
- use std::fmt;
-
- use super::*;
-
- pub fn deserialize<'de, D>(
- deserializer: D,
- ) -> Result<Option<HashMap<Url, Vec<TextEdit>>>, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- struct UrlMapVisitor;
- impl<'de> de::Visitor<'de> for UrlMapVisitor {
- type Value = HashMap<Url, Vec<TextEdit>>;
-
- fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- formatter.write_str("map")
- }
-
- fn visit_map<M>(self, mut visitor: M) -> Result<Self::Value, M::Error>
- where
- M: de::MapAccess<'de>,
- {
- let mut values = HashMap::with_capacity(visitor.size_hint().unwrap_or(0));
-
- // While there are entries remaining in the input, add them
- // into our map.
- while let Some((key, value)) = visitor.next_entry::<Url, _>()? {
- values.insert(key, value);
- }
-
- Ok(values)
- }
- }
-
- struct OptionUrlMapVisitor;
- impl<'de> de::Visitor<'de> for OptionUrlMapVisitor {
- type Value = Option<HashMap<Url, Vec<TextEdit>>>;
-
- fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- formatter.write_str("option")
- }
-
- #[inline]
- fn visit_unit<E>(self) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- Ok(None)
- }
-
- #[inline]
- fn visit_none<E>(self) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- Ok(None)
- }
-
- #[inline]
- fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- deserializer.deserialize_map(UrlMapVisitor).map(Some)
- }
- }
-
- // Instantiate our Visitor and ask the Deserializer to drive
- // it over the input data, resulting in an instance of MyMap.
- deserializer.deserialize_option(OptionUrlMapVisitor)
- }
-
- pub fn serialize<S>(
- changes: &Option<HashMap<Url, Vec<TextEdit>>>,
- serializer: S,
- ) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- use serde::ser::SerializeMap;
-
- match *changes {
- Some(ref changes) => {
- let mut map = serializer.serialize_map(Some(changes.len()))?;
- for (k, v) in changes {
- map.serialize_entry(k.as_str(), v)?;
- }
- map.end()
- }
- None => serializer.serialize_none(),
- }
- }
-}
-
-impl WorkspaceEdit {
- pub fn new(changes: HashMap<Url, Vec<TextEdit>>) -> WorkspaceEdit {
- WorkspaceEdit {
- changes: Some(changes),
- document_changes: None,
- ..Default::default()
- }
- }
-}
-
-/// Text documents are identified using a URI. On the protocol level, URIs are passed as strings.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct TextDocumentIdentifier {
- // !!!!!! Note:
- // In the spec VersionedTextDocumentIdentifier extends TextDocumentIdentifier
- // This modelled by "mixing-in" TextDocumentIdentifier in VersionedTextDocumentIdentifier,
- // so any changes to this type must be effected in the sub-type as well.
- /// The text document's URI.
- pub uri: Url,
-}
-
-impl TextDocumentIdentifier {
- pub fn new(uri: Url) -> TextDocumentIdentifier {
- TextDocumentIdentifier { uri }
- }
-}
-
-/// An item to transfer a text document from the client to the server.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentItem {
- /// The text document's URI.
- pub uri: Url,
-
- /// The text document's language identifier.
- pub language_id: String,
-
- /// The version number of this document (it will strictly increase after each
- /// change, including undo/redo).
- pub version: i32,
-
- /// The content of the opened text document.
- pub text: String,
-}
-
-impl TextDocumentItem {
- pub fn new(uri: Url, language_id: String, version: i32, text: String) -> TextDocumentItem {
- TextDocumentItem {
- uri,
- language_id,
- version,
- text,
- }
- }
-}
-
-/// An identifier to denote a specific version of a text document. This information usually flows from the client to the server.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct VersionedTextDocumentIdentifier {
- // This field was "mixed-in" from TextDocumentIdentifier
- /// The text document's URI.
- pub uri: Url,
-
- /// The version number of this document.
- ///
- /// The version number of a document will increase after each change,
- /// including undo/redo. The number doesn't need to be consecutive.
- pub version: i32,
-}
-
-impl VersionedTextDocumentIdentifier {
- pub fn new(uri: Url, version: i32) -> VersionedTextDocumentIdentifier {
- VersionedTextDocumentIdentifier { uri, version }
- }
-}
-
-/// An identifier which optionally denotes a specific version of a text document. This information usually flows from the server to the client
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct OptionalVersionedTextDocumentIdentifier {
- // This field was "mixed-in" from TextDocumentIdentifier
- /// The text document's URI.
- pub uri: Url,
-
- /// The version number of this document. If an optional versioned text document
- /// identifier is sent from the server to the client and the file is not
- /// open in the editor (the server has not received an open notification
- /// before) the server can send `null` to indicate that the version is
- /// known and the content on disk is the master (as specified with document
- /// content ownership).
- ///
- /// The version number of a document will increase after each change,
- /// including undo/redo. The number doesn't need to be consecutive.
- pub version: Option<i32>,
-}
-
-impl OptionalVersionedTextDocumentIdentifier {
- pub fn new(uri: Url, version: i32) -> OptionalVersionedTextDocumentIdentifier {
- OptionalVersionedTextDocumentIdentifier {
- uri,
- version: Some(version),
- }
- }
-}
-
-/// A parameter literal used in requests to pass a text document and a position inside that document.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentPositionParams {
- // !!!!!! Note:
- // In the spec ReferenceParams extends TextDocumentPositionParams
- // This modelled by "mixing-in" TextDocumentPositionParams in ReferenceParams,
- // so any changes to this type must be effected in sub-type as well.
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The position inside the text document.
- pub position: Position,
-}
-
-impl TextDocumentPositionParams {
- pub fn new(
- text_document: TextDocumentIdentifier,
- position: Position,
- ) -> TextDocumentPositionParams {
- TextDocumentPositionParams {
- text_document,
- position,
- }
- }
-}
-
-/// A document filter denotes a document through properties like language, schema or pattern.
-/// Examples are a filter that applies to TypeScript files on disk or a filter the applies to JSON
-/// files with name package.json:
-///
-/// { language: 'typescript', scheme: 'file' }
-/// { language: 'json', pattern: '**/package.json' }
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DocumentFilter {
- /// A language id, like `typescript`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub language: Option<String>,
-
- /// A Uri [scheme](#Uri.scheme), like `file` or `untitled`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub scheme: Option<String>,
-
- /// A glob pattern, like `*.{ts,js}`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub pattern: Option<String>,
-}
-
-/// A document selector is the combination of one or many document filters.
-pub type DocumentSelector = Vec<DocumentFilter>;
-
-// ========================= Actual Protocol =========================
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize, Default)]
-#[serde(rename_all = "camelCase")]
-pub struct InitializeParams {
- /// The process Id of the parent process that started
- /// the server. Is null if the process has not been started by another process.
- /// If the parent process is not alive then the server should exit (see exit notification) its process.
- pub process_id: Option<u32>,
-
- /// The rootPath of the workspace. Is null
- /// if no folder is open.
- #[serde(skip_serializing_if = "Option::is_none")]
- #[deprecated(note = "Use `root_uri` instead when possible")]
- pub root_path: Option<String>,
-
- /// The rootUri of the workspace. Is null if no
- /// folder is open. If both `rootPath` and `rootUri` are set
- /// `rootUri` wins.
- ///
- /// Deprecated in favour of `workspaceFolders`
- #[serde(default)]
- pub root_uri: Option<Url>,
-
- /// User provided initialization options.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub initialization_options: Option<Value>,
-
- /// The capabilities provided by the client (editor or tool)
- pub capabilities: ClientCapabilities,
-
- /// The initial trace setting. If omitted trace is disabled ('off').
- #[serde(default)]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trace: Option<TraceValue>,
-
- /// The workspace folders configured in the client when the server starts.
- /// This property is only available if the client supports workspace folders.
- /// It can be `null` if the client supports workspace folders but none are
- /// configured.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_folders: Option<Vec<WorkspaceFolder>>,
-
- /// Information about the client.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub client_info: Option<ClientInfo>,
-
- /// The locale the client is currently showing the user interface
- /// in. This must not necessarily be the locale of the operating
- /// system.
- ///
- /// Uses IETF language tags as the value's syntax
- /// (See <https://en.wikipedia.org/wiki/IETF_language_tag>)
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub locale: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-pub struct ClientInfo {
- /// The name of the client as defined by the client.
- pub name: String,
- /// The client's version as defined by the client.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Clone, Copy, Deserialize, Serialize)]
-pub struct InitializedParams {}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct GenericRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub options: GenericOptions,
-
- #[serde(flatten)]
- pub static_registration_options: StaticRegistrationOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct GenericOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct GenericParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DynamicRegistrationClientCapabilities {
- /// This capability supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GotoCapability {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client supports additional metadata in the form of definition links.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub link_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceEditClientCapabilities {
- /// The client supports versioned document changes in `WorkspaceEdit`s
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_changes: Option<bool>,
-
- /// The resource operations the client supports. Clients should at least
- /// support 'create', 'rename' and 'delete' files and folders.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resource_operations: Option<Vec<ResourceOperationKind>>,
-
- /// The failure handling strategy of a client if applying the workspace edit
- /// failes.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub failure_handling: Option<FailureHandlingKind>,
-
- /// Whether the client normalizes line endings to the client specific
- /// setting.
- /// If set to `true` the client will normalize line ending characters
- /// in a workspace edit containg to the client specific new line
- /// character.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub normalizes_line_endings: Option<bool>,
-
- /// Whether the client in general supports change annotations on text edits,
- /// create file, rename file and delete file changes.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub change_annotation_support: Option<ChangeAnnotationWorkspaceEditClientCapabilities>,
-}
-
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
-#[serde(rename_all = "lowercase")]
-pub enum ResourceOperationKind {
- Create,
- Rename,
- Delete,
-}
-
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
-#[serde(rename_all = "camelCase")]
-pub enum FailureHandlingKind {
- Abort,
- Transactional,
- TextOnlyTransactional,
- Undo,
-}
-
-/// A symbol kind.
-#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct SymbolKind(i32);
-lsp_enum! {
-impl SymbolKind {
- pub const FILE: SymbolKind = SymbolKind(1);
- pub const MODULE: SymbolKind = SymbolKind(2);
- pub const NAMESPACE: SymbolKind = SymbolKind(3);
- pub const PACKAGE: SymbolKind = SymbolKind(4);
- pub const CLASS: SymbolKind = SymbolKind(5);
- pub const METHOD: SymbolKind = SymbolKind(6);
- pub const PROPERTY: SymbolKind = SymbolKind(7);
- pub const FIELD: SymbolKind = SymbolKind(8);
- pub const CONSTRUCTOR: SymbolKind = SymbolKind(9);
- pub const ENUM: SymbolKind = SymbolKind(10);
- pub const INTERFACE: SymbolKind = SymbolKind(11);
- pub const FUNCTION: SymbolKind = SymbolKind(12);
- pub const VARIABLE: SymbolKind = SymbolKind(13);
- pub const CONSTANT: SymbolKind = SymbolKind(14);
- pub const STRING: SymbolKind = SymbolKind(15);
- pub const NUMBER: SymbolKind = SymbolKind(16);
- pub const BOOLEAN: SymbolKind = SymbolKind(17);
- pub const ARRAY: SymbolKind = SymbolKind(18);
- pub const OBJECT: SymbolKind = SymbolKind(19);
- pub const KEY: SymbolKind = SymbolKind(20);
- pub const NULL: SymbolKind = SymbolKind(21);
- pub const ENUM_MEMBER: SymbolKind = SymbolKind(22);
- pub const STRUCT: SymbolKind = SymbolKind(23);
- pub const EVENT: SymbolKind = SymbolKind(24);
- pub const OPERATOR: SymbolKind = SymbolKind(25);
- pub const TYPE_PARAMETER: SymbolKind = SymbolKind(26);
-}
-}
-
-/// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SymbolKindCapability {
- /// The symbol kind values the client supports. When this
- /// property exists the client also guarantees that it will
- /// handle values outside its set gracefully and falls back
- /// to a default value when unknown.
- ///
- /// If this property is not present the client only supports
- /// the symbol kinds from `File` to `Array` as defined in
- /// the initial version of the protocol.
- pub value_set: Option<Vec<SymbolKind>>,
-}
-
-/// Workspace specific client capabilities.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceClientCapabilities {
- /// The client supports applying batch edits to the workspace by supporting
- /// the request 'workspace/applyEdit'
- #[serde(skip_serializing_if = "Option::is_none")]
- pub apply_edit: Option<bool>,
-
- /// Capabilities specific to `WorkspaceEdit`s
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_edit: Option<WorkspaceEditClientCapabilities>,
-
- /// Capabilities specific to the `workspace/didChangeConfiguration` notification.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_change_configuration: Option<DidChangeConfigurationClientCapabilities>,
-
- /// Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_change_watched_files: Option<DidChangeWatchedFilesClientCapabilities>,
-
- /// Capabilities specific to the `workspace/symbol` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub symbol: Option<WorkspaceSymbolClientCapabilities>,
-
- /// Capabilities specific to the `workspace/executeCommand` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub execute_command: Option<ExecuteCommandClientCapabilities>,
-
- /// The client has support for workspace folders.
- /// since 3.6.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_folders: Option<bool>,
-
- /// The client supports `workspace/configuration` requests.
- /// since 3.6.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub configuration: Option<bool>,
-
- /// Capabilities specific to the semantic token requsts scoped to the workspace.
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub semantic_tokens: Option<SemanticTokensWorkspaceClientCapabilities>,
-
- /// Capabilities specific to the code lens requests scoped to the workspace.
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_lens: Option<CodeLensWorkspaceClientCapabilities>,
-
- /// The client has support for file requests/notifications.
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub file_operations: Option<WorkspaceFileOperationsClientCapabilities>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub inlay_hint: Option<InlayHintWorkspaceClientCapabilities>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentSyncClientCapabilities {
- /// Whether text document synchronization supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client supports sending will save notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_save: Option<bool>,
-
- /// The client supports sending a will save request and
- /// waits for a response providing text edits which will
- /// be applied to the document before it is saved.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_save_wait_until: Option<bool>,
-
- /// The client supports did save notifications.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub did_save: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct PublishDiagnosticsClientCapabilities {
- /// Whether the clients accepts diagnostics with related information.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub related_information: Option<bool>,
-
- /// Client supports the tag property to provide meta data about a diagnostic.
- /// Clients supporting tags have to handle unknown tags gracefully.
- #[serde(
- default,
- skip_serializing_if = "Option::is_none",
- deserialize_with = "TagSupport::deserialize_compat"
- )]
- pub tag_support: Option<TagSupport<DiagnosticTag>>,
-
- /// Whether the client interprets the version property of the
- /// `textDocument/publishDiagnostics` notification's parameter.
- ///
- /// 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version_support: Option<bool>,
-
- /// Client supports a codeDescription property
- ///
- /// 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_description_support: Option<bool>,
-
- /// Whether code action supports the `data` property which is
- /// preserved between a `textDocument/publishDiagnostics` and
- /// `textDocument/codeAction` request.
- ///
- /// 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub data_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TagSupport<T> {
- /// The tags supported by the client.
- pub value_set: Vec<T>,
-}
-
-impl<T> TagSupport<T> {
- /// Support for deserializing a boolean tag Support, in case it's present.
- ///
- /// This is currently the case for vscode 1.41.1
- fn deserialize_compat<'de, S>(serializer: S) -> Result<Option<TagSupport<T>>, S::Error>
- where
- S: serde::Deserializer<'de>,
- T: serde::Deserialize<'de>,
- {
- Ok(
- match Option::<Value>::deserialize(serializer).map_err(serde::de::Error::custom)? {
- Some(Value::Bool(false)) => None,
- Some(Value::Bool(true)) => Some(TagSupport { value_set: vec![] }),
- Some(other) => {
- Some(TagSupport::<T>::deserialize(other).map_err(serde::de::Error::custom)?)
- }
- None => None,
- },
- )
- }
-}
-
-/// Text document specific client capabilities.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentClientCapabilities {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub synchronization: Option<TextDocumentSyncClientCapabilities>,
- /// Capabilities specific to the `textDocument/completion`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub completion: Option<CompletionClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/hover`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub hover: Option<HoverClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/signatureHelp`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub signature_help: Option<SignatureHelpClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/references`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub references: Option<ReferenceClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/documentHighlight`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_highlight: Option<DocumentHighlightClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/documentSymbol`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_symbol: Option<DocumentSymbolClientCapabilities>,
- /// Capabilities specific to the `textDocument/formatting`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub formatting: Option<DocumentFormattingClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/rangeFormatting`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range_formatting: Option<DocumentRangeFormattingClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/onTypeFormatting`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub on_type_formatting: Option<DocumentOnTypeFormattingClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/declaration`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub declaration: Option<GotoCapability>,
-
- /// Capabilities specific to the `textDocument/definition`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub definition: Option<GotoCapability>,
-
- /// Capabilities specific to the `textDocument/typeDefinition`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub type_definition: Option<GotoCapability>,
-
- /// Capabilities specific to the `textDocument/implementation`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub implementation: Option<GotoCapability>,
-
- /// Capabilities specific to the `textDocument/codeAction`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_action: Option<CodeActionClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/codeLens`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_lens: Option<CodeLensClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/documentLink`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_link: Option<DocumentLinkClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/documentColor` and the
- /// `textDocument/colorPresentation` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub color_provider: Option<DocumentColorClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/rename`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub rename: Option<RenameClientCapabilities>,
-
- /// Capabilities specific to `textDocument/publishDiagnostics`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub publish_diagnostics: Option<PublishDiagnosticsClientCapabilities>,
-
- /// Capabilities specific to `textDocument/foldingRange` requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub folding_range: Option<FoldingRangeClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/selectionRange` request.
- ///
- /// @since 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub selection_range: Option<SelectionRangeClientCapabilities>,
-
- /// Capabilities specific to `textDocument/linkedEditingRange` requests.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub linked_editing_range: Option<LinkedEditingRangeClientCapabilities>,
-
- /// Capabilities specific to the various call hierarchy requests.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub call_hierarchy: Option<CallHierarchyClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/semanticTokens/*` requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub semantic_tokens: Option<SemanticTokensClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/moniker` request.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub moniker: Option<MonikerClientCapabilities>,
-
- /// Capabilities specific to the `textDocument/inlayHint` request.
- ///
- /// @since 3.17.0 - proposed state
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub inlay_hint: Option<InlayHintClientCapabilities>,
-}
-
-/// Where ClientCapabilities are currently empty:
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ClientCapabilities {
- /// Workspace specific client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace: Option<WorkspaceClientCapabilities>,
-
- /// Text document specific client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text_document: Option<TextDocumentClientCapabilities>,
-
- /// Window specific client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub window: Option<WindowClientCapabilities>,
-
- /// General client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub general: Option<GeneralClientCapabilities>,
-
- /// Unofficial UT8-offsets extension.
- ///
- /// See https://clangd.llvm.org/extensions.html#utf-8-offsets.
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub offset_encoding: Option<Vec<String>>,
-
- /// Experimental client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub experimental: Option<Value>,
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GeneralClientCapabilities {
- /// Client capabilities specific to regular expressions.
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub regular_expressions: Option<RegularExpressionsClientCapabilities>,
-
- /// Client capabilities specific to the client's markdown parser.
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub markdown: Option<MarkdownClientCapabilities>,
-
- /// @since 3.17.0
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub stale_request_support: Option<StaleRequestSupportClientCapabilities>,
-
- /// The position encodings supported by the client. Client and server
- /// have to agree on the same position encoding to ensure that offsets
- /// (e.g. character position in a line) are interpreted the same on both
- /// side.
- ///
- /// To keep the protocol backwards compatible the following applies: if
- /// the value 'utf-16' is missing from the array of position encodings
- /// servers can assume that the client supports UTF-16. UTF-16 is
- /// therefore a mandatory encoding.
- ///
- /// If omitted it defaults to ['utf-16'].
- ///
- /// Implementation considerations: since the conversion from one encoding
- /// into another requires the content of the file / line the conversion
- /// is best done where the file is read which is usually on the server
- /// side.
- ///
- /// @since 3.17.0
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub position_encodings: Option<Vec<PositionEncodingKind>>,
-}
-
-/// Client capability that signals how the client
-/// handles stale requests (e.g. a request
-/// for which the client will not process the response
-/// anymore since the information is outdated).
-///
-/// @since 3.17.0
-#[cfg(feature = "proposed")]
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct StaleRequestSupportClientCapabilities {
- /// The client will actively cancel the request.
- pub cancel: bool,
-
- /// The list of requests for which the client
- /// will retry the request if it receives a
- /// response with error code `ContentModified``
- pub retry_on_content_modified: Vec<String>,
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RegularExpressionsClientCapabilities {
- /// The engine's name.
- pub engine: String,
-
- /// The engine's version
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MarkdownClientCapabilities {
- /// The name of the parser.
- pub parser: String,
-
- /// The version of the parser.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct InitializeResult {
- /// The capabilities the language server provides.
- pub capabilities: ServerCapabilities,
-
- /// Information about the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub server_info: Option<ServerInfo>,
-
- /// Unofficial UT8-offsets extension.
- ///
- /// See https://clangd.llvm.org/extensions.html#utf-8-offsets.
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub offset_encoding: Option<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct ServerInfo {
- /// The name of the server as defined by the server.
- pub name: String,
- /// The servers's version as defined by the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct InitializeError {
- /// Indicates whether the client should retry to send the
- /// initilize request after showing the message provided
- /// in the ResponseError.
- pub retry: bool,
-}
-
-// The server can signal the following capabilities:
-
-/// Defines how the host (editor) should sync document changes to the language server.
-#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct TextDocumentSyncKind(i32);
-lsp_enum! {
-impl TextDocumentSyncKind {
- /// Documents should not be synced at all.
- pub const NONE: TextDocumentSyncKind = TextDocumentSyncKind(0);
-
- /// Documents are synced by always sending the full content of the document.
- pub const FULL: TextDocumentSyncKind = TextDocumentSyncKind(1);
-
- /// Documents are synced by sending the full content on open. After that only
- /// incremental updates to the document are sent.
- pub const INCREMENTAL: TextDocumentSyncKind = TextDocumentSyncKind(2);
-}
-}
-
-pub type ExecuteCommandClientCapabilities = DynamicRegistrationClientCapabilities;
-
-/// Execute command options.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct ExecuteCommandOptions {
- /// The commands to be executed on the server
- pub commands: Vec<String>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-/// Save options.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SaveOptions {
- /// The client is supposed to include the content on save.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub include_text: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum TextDocumentSyncSaveOptions {
- Supported(bool),
- SaveOptions(SaveOptions),
-}
-
-impl From<SaveOptions> for TextDocumentSyncSaveOptions {
- fn from(from: SaveOptions) -> Self {
- Self::SaveOptions(from)
- }
-}
-
-impl From<bool> for TextDocumentSyncSaveOptions {
- fn from(from: bool) -> Self {
- Self::Supported(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentSyncOptions {
- /// Open and close notifications are sent to the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub open_close: Option<bool>,
-
- /// Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full
- /// and TextDocumentSyncKindIncremental.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub change: Option<TextDocumentSyncKind>,
-
- /// Will save notifications are sent to the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_save: Option<bool>,
-
- /// Will save wait until requests are sent to the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub will_save_wait_until: Option<bool>,
-
- /// Save notifications are sent to the server.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub save: Option<TextDocumentSyncSaveOptions>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum OneOf<A, B> {
- Left(A),
- Right(B),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum TextDocumentSyncCapability {
- Kind(TextDocumentSyncKind),
- Options(TextDocumentSyncOptions),
-}
-
-impl From<TextDocumentSyncOptions> for TextDocumentSyncCapability {
- fn from(from: TextDocumentSyncOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<TextDocumentSyncKind> for TextDocumentSyncCapability {
- fn from(from: TextDocumentSyncKind) -> Self {
- Self::Kind(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum ImplementationProviderCapability {
- Simple(bool),
- Options(StaticTextDocumentRegistrationOptions),
-}
-
-impl From<StaticTextDocumentRegistrationOptions> for ImplementationProviderCapability {
- fn from(from: StaticTextDocumentRegistrationOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for ImplementationProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum TypeDefinitionProviderCapability {
- Simple(bool),
- Options(StaticTextDocumentRegistrationOptions),
-}
-
-impl From<StaticTextDocumentRegistrationOptions> for TypeDefinitionProviderCapability {
- fn from(from: StaticTextDocumentRegistrationOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for TypeDefinitionProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ServerCapabilities {
- /// The position encoding the server picked from the encodings offered
- /// by the client via the client capability `general.positionEncodings`.
- ///
- /// If the client didn't provide any position encodings the only valid
- /// value that a server can return is 'utf-16'.
- ///
- /// If omitted it defaults to 'utf-16'.
- ///
- /// @since 3.17.0
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub position_encoding: Option<PositionEncodingKind>,
-
- /// Defines how text documents are synced.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text_document_sync: Option<TextDocumentSyncCapability>,
-
- /// Capabilities specific to `textDocument/selectionRange` requests.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub selection_range_provider: Option<SelectionRangeProviderCapability>,
-
- /// The server provides hover support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub hover_provider: Option<HoverProviderCapability>,
-
- /// The server provides completion support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub completion_provider: Option<CompletionOptions>,
-
- /// The server provides signature help support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub signature_help_provider: Option<SignatureHelpOptions>,
-
- /// The server provides goto definition support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub definition_provider: Option<OneOf<bool, DefinitionOptions>>,
-
- /// The server provides goto type definition support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub type_definition_provider: Option<TypeDefinitionProviderCapability>,
-
- /// The server provides goto implementation support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub implementation_provider: Option<ImplementationProviderCapability>,
-
- /// The server provides find references support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub references_provider: Option<OneOf<bool, ReferencesOptions>>,
-
- /// The server provides document highlight support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_highlight_provider: Option<OneOf<bool, DocumentHighlightOptions>>,
-
- /// The server provides document symbol support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_symbol_provider: Option<OneOf<bool, DocumentSymbolOptions>>,
-
- /// The server provides workspace symbol support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_symbol_provider: Option<OneOf<bool, WorkspaceSymbolOptions>>,
-
- /// The server provides code actions.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_action_provider: Option<CodeActionProviderCapability>,
-
- /// The server provides code lens.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub code_lens_provider: Option<CodeLensOptions>,
-
- /// The server provides document formatting.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_formatting_provider: Option<OneOf<bool, DocumentFormattingOptions>>,
-
- /// The server provides document range formatting.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_range_formatting_provider: Option<OneOf<bool, DocumentRangeFormattingOptions>>,
-
- /// The server provides document formatting on typing.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_on_type_formatting_provider: Option<DocumentOnTypeFormattingOptions>,
-
- /// The server provides rename support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub rename_provider: Option<OneOf<bool, RenameOptions>>,
-
- /// The server provides document link support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub document_link_provider: Option<DocumentLinkOptions>,
-
- /// The server provides color provider support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub color_provider: Option<ColorProviderCapability>,
-
- /// The server provides folding provider support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub folding_range_provider: Option<FoldingRangeProviderCapability>,
-
- /// The server provides go to declaration support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub declaration_provider: Option<DeclarationCapability>,
-
- /// The server provides execute command support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub execute_command_provider: Option<ExecuteCommandOptions>,
-
- /// Workspace specific server capabilities
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace: Option<WorkspaceServerCapabilities>,
-
- /// Call hierarchy provider capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub call_hierarchy_provider: Option<CallHierarchyServerCapability>,
-
- /// Semantic tokens server capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub semantic_tokens_provider: Option<SemanticTokensServerCapabilities>,
-
- /// Whether server provides moniker support.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub moniker_provider: Option<OneOf<bool, MonikerServerCapabilities>>,
-
- /// The server provides inlay hints.
- ///
- /// @since 3.17.0 - proposed state
- #[serde(skip_serializing_if = "Option::is_none")]
- #[cfg(feature = "proposed")]
- pub inlay_hint_provider: Option<OneOf<bool, InlayHintServerCapabilities>>,
-
- /// The server provides linked editing range support.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub linked_editing_range_provider: Option<LinkedEditingRangeServerCapabilities>,
-
- /// Experimental server capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub experimental: Option<Value>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceServerCapabilities {
- /// The server supports workspace folder.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_folders: Option<WorkspaceFoldersServerCapabilities>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- pub file_operations: Option<WorkspaceFileOperationsServerCapabilities>,
-}
-
-/// General parameters to to register for a capability.
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Registration {
- /// The id used to register the request. The id can be used to deregister
- /// the request again.
- pub id: String,
-
- /// The method / capability to register for.
- pub method: String,
-
- /// Options necessary for the registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub register_options: Option<Value>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-pub struct RegistrationParams {
- pub registrations: Vec<Registration>,
-}
-
-/// Since most of the registration options require to specify a document selector there is a base
-/// interface that can be used.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentRegistrationOptions {
- /// A document selector to identify the scope of the registration. If set to null
- /// the document selector provided on the client side will be used.
- pub document_selector: Option<DocumentSelector>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum DeclarationCapability {
- Simple(bool),
- RegistrationOptions(DeclarationRegistrationOptions),
- Options(DeclarationOptions),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeclarationRegistrationOptions {
- #[serde(flatten)]
- pub declaration_options: DeclarationOptions,
-
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub static_registration_options: StaticRegistrationOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeclarationOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct StaticRegistrationOptions {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub id: Option<String>,
-}
-
-#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressOptions {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub work_done_progress: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentFormattingOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentRangeFormattingOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DefinitionOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentSymbolOptions {
- /// A human-readable string that is shown when multiple outlines trees are
- /// shown for the same document.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label: Option<String>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ReferencesOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DocumentHighlightOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceSymbolOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct StaticTextDocumentRegistrationOptions {
- /// A document selector to identify the scope of the registration. If set to null
- /// the document selector provided on the client side will be used.
- pub document_selector: Option<DocumentSelector>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- pub id: Option<String>,
-}
-
-/// General parameters to unregister a capability.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct Unregistration {
- /// The id used to unregister the request or notification. Usually an id
- /// provided during the register request.
- pub id: String,
-
- /// The method / capability to unregister for.
- pub method: String,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct UnregistrationParams {
- pub unregisterations: Vec<Unregistration>,
-}
-
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DidChangeConfigurationParams {
- /// The actual changed settings
- pub settings: Value,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DidOpenTextDocumentParams {
- /// The document that was opened.
- pub text_document: TextDocumentItem,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DidChangeTextDocumentParams {
- /// The document that did change. The version number points
- /// to the version after all provided content changes have
- /// been applied.
- pub text_document: VersionedTextDocumentIdentifier,
- /// The actual content changes.
- pub content_changes: Vec<TextDocumentContentChangeEvent>,
-}
-
-/// An event describing a change to a text document. If range and rangeLength are omitted
-/// the new text is considered to be the full content of the document.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentContentChangeEvent {
- /// The range of the document that changed.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range: Option<Range>,
-
- /// The length of the range that got replaced.
- ///
- /// Deprecated: Use range instead
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range_length: Option<u32>,
-
- /// The new text of the document.
- pub text: String,
-}
-
-/// Descibe options to be used when registered for text document change events.
-///
-/// Extends TextDocumentRegistrationOptions
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentChangeRegistrationOptions {
- /// A document selector to identify the scope of the registration. If set to null
- /// the document selector provided on the client side will be used.
- pub document_selector: Option<DocumentSelector>,
-
- /// How documents are synced to the server. See TextDocumentSyncKind.Full
- /// and TextDocumentSyncKindIncremental.
- pub sync_kind: i32,
-}
-
-/// The parameters send in a will save text document notification.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WillSaveTextDocumentParams {
- /// The document that will be saved.
- pub text_document: TextDocumentIdentifier,
-
- /// The 'TextDocumentSaveReason'.
- pub reason: TextDocumentSaveReason,
-}
-
-/// Represents reasons why a text document is saved.
-#[derive(Copy, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct TextDocumentSaveReason(i32);
-lsp_enum! {
-impl TextDocumentSaveReason {
- /// Manually triggered, e.g. by the user pressing save, by starting debugging,
- /// or by an API call.
- pub const MANUAL: TextDocumentSaveReason = TextDocumentSaveReason(1);
-
- /// Automatic after a delay.
- pub const AFTER_DELAY: TextDocumentSaveReason = TextDocumentSaveReason(2);
-
- /// When the editor lost focus.
- pub const FOCUS_OUT: TextDocumentSaveReason = TextDocumentSaveReason(3);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DidCloseTextDocumentParams {
- /// The document that was closed.
- pub text_document: TextDocumentIdentifier,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DidSaveTextDocumentParams {
- /// The document that was saved.
- pub text_document: TextDocumentIdentifier,
-
- /// Optional the content when saved. Depends on the includeText value
- /// when the save notification was requested.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub text: Option<String>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct TextDocumentSaveRegistrationOptions {
- /// The client is supposed to include the content on save.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub include_text: Option<bool>,
-
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-}
-
-pub type DidChangeWatchedFilesClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct DidChangeWatchedFilesParams {
- /// The actual file events.
- pub changes: Vec<FileEvent>,
-}
-
-/// The file event type.
-#[derive(Eq, PartialEq, Copy, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct FileChangeType(i32);
-lsp_enum! {
-impl FileChangeType {
- /// The file got created.
- pub const CREATED: FileChangeType = FileChangeType(1);
-
- /// The file got changed.
- pub const CHANGED: FileChangeType = FileChangeType(2);
-
- /// The file got deleted.
- pub const DELETED: FileChangeType = FileChangeType(3);
-}
-}
-
-/// An event describing a file change.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct FileEvent {
- /// The file's URI.
- pub uri: Url,
-
- /// The change type.
- #[serde(rename = "type")]
- pub typ: FileChangeType,
-}
-
-impl FileEvent {
- pub fn new(uri: Url, typ: FileChangeType) -> FileEvent {
- FileEvent { uri, typ }
- }
-}
-
-/// Describe options to be used when registered for text document change events.
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
-pub struct DidChangeWatchedFilesRegistrationOptions {
- /// The watchers to register.
- pub watchers: Vec<FileSystemWatcher>,
-}
-
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct FileSystemWatcher {
- /// The glob pattern to watch
- pub glob_pattern: String,
-
- /// The kind of events of interest. If omitted it defaults to WatchKind.Create |
- /// WatchKind.Change | WatchKind.Delete which is 7.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<WatchKind>,
-}
-
-bitflags! {
-pub struct WatchKind: u8 {
- /// Interested in create events.
- const Create = 1;
- /// Interested in change events
- const Change = 2;
- /// Interested in delete events
- const Delete = 4;
-}
-}
-
-impl<'de> serde::Deserialize<'de> for WatchKind {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- let i = u8::deserialize(deserializer)?;
- WatchKind::from_bits(i).ok_or_else(|| {
- D::Error::invalid_value(de::Unexpected::Unsigned(u64::from(i)), &"Unknown flag")
- })
- }
-}
-
-impl serde::Serialize for WatchKind {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- serializer.serialize_u8(self.bits())
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct PublishDiagnosticsParams {
- /// The URI for which diagnostic information is reported.
- pub uri: Url,
-
- /// An array of diagnostic information items.
- pub diagnostics: Vec<Diagnostic>,
-
- /// Optional the version number of the document the diagnostics are published for.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<i32>,
-}
-
-impl PublishDiagnosticsParams {
- pub fn new(
- uri: Url,
- diagnostics: Vec<Diagnostic>,
- version: Option<i32>,
- ) -> PublishDiagnosticsParams {
- PublishDiagnosticsParams {
- uri,
- diagnostics,
- version,
- }
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(untagged)]
-pub enum Documentation {
- String(String),
- MarkupContent(MarkupContent),
-}
-
-/// The marked string is rendered:
-/// - as markdown if it is represented as a string
-/// - as code block of the given langauge if it is represented as a pair of a language and a value
-///
-/// The pair of a language and a value is an equivalent to markdown:
-/// ```${language}
-/// ${value}
-/// ```
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum MarkedString {
- String(String),
- LanguageString(LanguageString),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct LanguageString {
- pub language: String,
- pub value: String,
-}
-
-impl MarkedString {
- pub fn from_markdown(markdown: String) -> MarkedString {
- MarkedString::String(markdown)
- }
-
- pub fn from_language_code(language: String, code_block: String) -> MarkedString {
- MarkedString::LanguageString(LanguageString {
- language,
- value: code_block,
- })
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct GotoDefinitionParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// GotoDefinition response can be single location, or multiple Locations or a link.
-#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
-#[serde(untagged)]
-pub enum GotoDefinitionResponse {
- Scalar(Location),
- Array(Vec<Location>),
- Link(Vec<LocationLink>),
-}
-
-impl From<Location> for GotoDefinitionResponse {
- fn from(location: Location) -> Self {
- GotoDefinitionResponse::Scalar(location)
- }
-}
-
-impl From<Vec<Location>> for GotoDefinitionResponse {
- fn from(locations: Vec<Location>) -> Self {
- GotoDefinitionResponse::Array(locations)
- }
-}
-
-impl From<Vec<LocationLink>> for GotoDefinitionResponse {
- fn from(locations: Vec<LocationLink>) -> Self {
- GotoDefinitionResponse::Link(locations)
- }
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct ExecuteCommandParams {
- /// The identifier of the actual command handler.
- pub command: String,
- /// Arguments that the command should be invoked with.
- #[serde(default)]
- pub arguments: Vec<Value>,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-/// Execute command registration options.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct ExecuteCommandRegistrationOptions {
- /// The commands to be executed on the server
- pub commands: Vec<String>,
-
- #[serde(flatten)]
- pub execute_command_options: ExecuteCommandOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ApplyWorkspaceEditParams {
- /// An optional label of the workspace edit. This label is
- /// presented in the user interface for example on an undo
- /// stack to undo the workspace edit.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label: Option<String>,
-
- /// The edits to apply.
- pub edit: WorkspaceEdit,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ApplyWorkspaceEditResponse {
- /// Indicates whether the edit was applied or not.
- pub applied: bool,
-
- /// An optional textual description for why the edit was not applied.
- /// This may be used may be used by the server for diagnostic
- /// logging or to provide a suitable error for a request that
- /// triggered the edit
- #[serde(skip_serializing_if = "Option::is_none")]
- pub failure_reason: Option<String>,
-
- /// Depending on the client's failure handling strategy `failedChange` might
- /// contain the index of the change that failed. This property is only available
- /// if the client signals a `failureHandlingStrategy` in its client capabilities.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub failed_change: Option<u32>,
-}
-
-/// Describes the content type that a client supports in various
-/// result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
-///
-/// Please note that `MarkupKinds` must not start with a `$`. This kinds
-/// are reserved for internal usage.
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "lowercase")]
-pub enum MarkupKind {
- /// Plain text is supported as a content format
- PlainText,
- /// Markdown is supported as a content format
- Markdown,
-}
-
-/// A `MarkupContent` literal represents a string value which content can be represented in different formats.
-/// Currently `plaintext` and `markdown` are supported formats. A `MarkupContent` is usually used in
-/// documentation properties of result literals like `CompletionItem` or `SignatureInformation`.
-/// If the format is `markdown` the content should follow the [GitHub Flavored Markdown Specification](https://github.github.com/gfm/).
-///
-/// Here is an example how such a string can be constructed using JavaScript / TypeScript:
-/// ```ignore
-/// let markdown: MarkupContent = {
-/// kind: MarkupKind::Markdown,
-/// value: [
-/// "# Header",
-/// "Some text",
-/// "```typescript",
-/// "someCode();",
-/// "```"
-/// ]
-/// .join("\n"),
-/// };
-/// ```
-///
-/// Please Note* that clients might sanitize the return markdown. A client could decide to
-/// remove HTML from the markdown to avoid script execution.
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
-pub struct MarkupContent {
- pub kind: MarkupKind,
- pub value: String,
-}
-
-/// A parameter literal used to pass a partial result token.
-#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct PartialResultParams {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub partial_result_token: Option<ProgressToken>,
-}
-
-/// Symbol tags are extra annotations that tweak the rendering of a symbol.
-/// Since 3.15
-#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct SymbolTag(i32);
-lsp_enum! {
-impl SymbolTag {
- /// Render a symbol as obsolete, usually using a strike-out.
- pub const DEPRECATED: SymbolTag = SymbolTag(1);
-}
-}
-
-#[cfg(test)]
-mod tests {
- use serde::{Deserialize, Serialize};
-
- use super::*;
-
- pub(crate) fn test_serialization<SER>(ms: &SER, expected: &str)
- where
- SER: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
- {
- let json_str = serde_json::to_string(ms).unwrap();
- assert_eq!(&json_str, expected);
- let deserialized: SER = serde_json::from_str(&json_str).unwrap();
- assert_eq!(&deserialized, ms);
- }
-
- pub(crate) fn test_deserialization<T>(json: &str, expected: &T)
- where
- T: for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
- {
- let value = serde_json::from_str::<T>(json).unwrap();
- assert_eq!(&value, expected);
- }
-
- #[test]
- fn one_of() {
- test_serialization(&OneOf::<bool, ()>::Left(true), r#"true"#);
- test_serialization(&OneOf::<String, ()>::Left("abcd".into()), r#""abcd""#);
- test_serialization(
- &OneOf::<String, WorkDoneProgressOptions>::Right(WorkDoneProgressOptions {
- work_done_progress: Some(false),
- }),
- r#"{"workDoneProgress":false}"#,
- );
- }
-
- #[test]
- fn number_or_string() {
- test_serialization(&NumberOrString::Number(123), r#"123"#);
-
- test_serialization(&NumberOrString::String("abcd".into()), r#""abcd""#);
- }
-
- #[test]
- fn marked_string() {
- test_serialization(&MarkedString::from_markdown("xxx".into()), r#""xxx""#);
-
- test_serialization(
- &MarkedString::from_language_code("lang".into(), "code".into()),
- r#"{"language":"lang","value":"code"}"#,
- );
- }
-
- #[test]
- fn language_string() {
- test_serialization(
- &LanguageString {
- language: "LL".into(),
- value: "VV".into(),
- },
- r#"{"language":"LL","value":"VV"}"#,
- );
- }
-
- #[test]
- fn workspace_edit() {
- test_serialization(
- &WorkspaceEdit {
- changes: Some(vec![].into_iter().collect()),
- document_changes: None,
- ..Default::default()
- },
- r#"{"changes":{}}"#,
- );
-
- test_serialization(
- &WorkspaceEdit {
- changes: None,
- document_changes: None,
- ..Default::default()
- },
- r#"{}"#,
- );
-
- test_serialization(
- &WorkspaceEdit {
- changes: Some(
- vec![(Url::parse("file://test").unwrap(), vec![])]
- .into_iter()
- .collect(),
- ),
- document_changes: None,
- ..Default::default()
- },
- r#"{"changes":{"file://test/":[]}}"#,
- );
- }
-
- #[test]
- fn root_uri_can_be_missing() {
- serde_json::from_str::<InitializeParams>(r#"{ "capabilities": {} }"#).unwrap();
- }
-
- #[test]
- fn test_watch_kind() {
- test_serialization(&WatchKind::Create, "1");
- test_serialization(&(WatchKind::Create | WatchKind::Change), "3");
- test_serialization(
- &(WatchKind::Create | WatchKind::Change | WatchKind::Delete),
- "7",
- );
- }
-
- #[test]
- fn test_resource_operation_kind() {
- test_serialization(
- &vec![
- ResourceOperationKind::Create,
- ResourceOperationKind::Rename,
- ResourceOperationKind::Delete,
- ],
- r#"["create","rename","delete"]"#,
- );
- }
-}
+/*!
+
+Language Server Protocol types for Rust.
+
+Based on: <https://microsoft.github.io/language-server-protocol/specification>
+
+This library uses the URL crate for parsing URIs. Note that there is
+some confusion on the meaning of URLs vs URIs:
+<http://stackoverflow.com/a/28865728/393898>. According to that
+information, on the classical sense of "URLs", "URLs" are a subset of
+URIs, But on the modern/new meaning of URLs, they are the same as
+URIs. The important take-away aspect is that the URL crate should be
+able to parse any URI, such as `urn:isbn:0451450523`.
+
+
+*/
+#![allow(non_upper_case_globals)]
+#[forbid(unsafe_code)]
+#[macro_use]
+extern crate bitflags;
+
+use std::{collections::HashMap, fmt::Debug};
+
+use serde::{de, de::Error as Error_, Deserialize, Serialize};
+use serde_json::Value;
+pub use url::Url;
+
+// Large enough to contain any enumeration name defined in this crate
+type PascalCaseBuf = [u8; 32];
+const fn fmt_pascal_case_const(name: &str) -> (PascalCaseBuf, usize) {
+ let mut buf = [0; 32];
+ let mut buf_i = 0;
+ let mut name_i = 0;
+ let name = name.as_bytes();
+ while name_i < name.len() {
+ let first = name[name_i];
+ name_i += 1;
+
+ buf[buf_i] = first;
+ buf_i += 1;
+
+ while name_i < name.len() {
+ let rest = name[name_i];
+ name_i += 1;
+ if rest == b'_' {
+ break;
+ }
+
+ buf[buf_i] = rest.to_ascii_lowercase();
+ buf_i += 1;
+ }
+ }
+ (buf, buf_i)
+}
+
+fn fmt_pascal_case(f: &mut std::fmt::Formatter<'_>, name: &str) -> std::fmt::Result {
+ for word in name.split('_') {
+ let mut chars = word.chars();
+ let first = chars.next().unwrap();
+ write!(f, "{}", first)?;
+ for rest in chars {
+ write!(f, "{}", rest.to_lowercase())?;
+ }
+ }
+ Ok(())
+}
+
+macro_rules! lsp_enum {
+ (impl $typ: ident { $( $(#[$attr:meta])* pub const $name: ident : $enum_type: ty = $value: expr; )* }) => {
+ impl $typ {
+ $(
+ $(#[$attr])*
+ pub const $name: $enum_type = $value;
+ )*
+ }
+
+ impl std::fmt::Debug for $typ {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match *self {
+ $(
+ Self::$name => crate::fmt_pascal_case(f, stringify!($name)),
+ )*
+ _ => write!(f, "{}({})", stringify!($typ), self.0),
+ }
+ }
+ }
+
+ impl std::convert::TryFrom<&str> for $typ {
+ type Error = &'static str;
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ match () {
+ $(
+ _ if {
+ const X: (crate::PascalCaseBuf, usize) = crate::fmt_pascal_case_const(stringify!($name));
+ let (buf, len) = X;
+ &buf[..len] == value.as_bytes()
+ } => Ok(Self::$name),
+ )*
+ _ => Err("unknown enum variant"),
+ }
+ }
+ }
+
+ }
+}
+
+pub mod error_codes;
+pub mod notification;
+pub mod request;
+
+mod call_hierarchy;
+pub use call_hierarchy::*;
+
+mod code_action;
+pub use code_action::*;
+
+mod code_lens;
+pub use code_lens::*;
+
+mod color;
+pub use color::*;
+
+mod completion;
+pub use completion::*;
+
+mod document_highlight;
+pub use document_highlight::*;
+
+mod document_link;
+pub use document_link::*;
+
+mod document_symbols;
+pub use document_symbols::*;
+
+mod file_operations;
+pub use file_operations::*;
+
+mod folding_range;
+pub use folding_range::*;
+
+mod formatting;
+pub use formatting::*;
+
+mod hover;
+pub use hover::*;
+
+mod inlay_hint;
+pub use inlay_hint::*;
+
+mod inline_value;
+pub use inline_value::*;
+
+mod moniker;
+pub use moniker::*;
+
+mod progress;
+pub use progress::*;
+
+mod references;
+pub use references::*;
+
+mod rename;
+pub use rename::*;
+
+pub mod selection_range;
+pub use selection_range::*;
+
+mod semantic_tokens;
+pub use semantic_tokens::*;
+
+mod signature_help;
+pub use signature_help::*;
+
+mod type_hierarchy;
+pub use type_hierarchy::*;
+
+mod linked_editing;
+pub use linked_editing::*;
+
+mod window;
+pub use window::*;
+
+mod workspace_folders;
+pub use workspace_folders::*;
+
+mod workspace_symbols;
+pub use workspace_symbols::*;
+
+pub mod lsif;
+
+mod trace;
+pub use trace::*;
+
+/* ----------------- Auxiliary types ----------------- */
+
+#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum NumberOrString {
+ Number(i32),
+ String(String),
+}
+
+/* ----------------- Cancel support ----------------- */
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct CancelParams {
+ /// The request id to cancel.
+ pub id: NumberOrString,
+}
+
+/* ----------------- Basic JSON Structures ----------------- */
+
+/// The LSP any type
+///
+/// @since 3.17.0
+pub type LSPAny = serde_json::Value;
+
+/// LSP object definition.
+///
+/// @since 3.17.0
+pub type LSPObject = serde_json::Map<String, serde_json::Value>;
+
+/// LSP arrays.
+///
+/// @since 3.17.0
+pub type LSPArray = Vec<serde_json::Value>;
+
+/// Position in a text document expressed as zero-based line and character offset.
+/// A position is between two characters like an 'insert' cursor in a editor.
+#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default, Deserialize, Serialize)]
+pub struct Position {
+ /// Line position in a document (zero-based).
+ pub line: u32,
+ /// Character offset on a line in a document (zero-based). The meaning of this
+ /// offset is determined by the negotiated `PositionEncodingKind`.
+ ///
+ /// If the character value is greater than the line length it defaults back
+ /// to the line length.
+ pub character: u32,
+}
+
+impl Position {
+ pub fn new(line: u32, character: u32) -> Position {
+ Position { line, character }
+ }
+}
+
+/// A range in a text document expressed as (zero-based) start and end positions.
+/// A range is comparable to a selection in an editor. Therefore the end position is exclusive.
+#[derive(Debug, Eq, PartialEq, Copy, Clone, Default, Deserialize, Serialize)]
+pub struct Range {
+ /// The range's start position.
+ pub start: Position,
+ /// The range's end position.
+ pub end: Position,
+}
+
+impl Range {
+ pub fn new(start: Position, end: Position) -> Range {
+ Range { start, end }
+ }
+}
+
+/// Represents a location inside a resource, such as a line inside a text file.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct Location {
+ pub uri: Url,
+ pub range: Range,
+}
+
+impl Location {
+ pub fn new(uri: Url, range: Range) -> Location {
+ Location { uri, range }
+ }
+}
+
+/// Represents a link between a source and a target location.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LocationLink {
+ /// Span of the origin of this link.
+ ///
+ /// Used as the underlined span for mouse interaction. Defaults to the word range at
+ /// the mouse position.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub origin_selection_range: Option<Range>,
+
+ /// The target resource identifier of this link.
+ pub target_uri: Url,
+
+ /// The full target range of this link.
+ pub target_range: Range,
+
+ /// The span of this link.
+ pub target_selection_range: Range,
+}
+
+/// A type indicating how positions are encoded,
+/// specifically what column offsets mean.
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
+pub struct PositionEncodingKind(std::borrow::Cow<'static, str>);
+
+impl PositionEncodingKind {
+ /// Character offsets count UTF-8 code units.
+ pub const UTF8: PositionEncodingKind = PositionEncodingKind::new("utf-8");
+
+ /// Character offsets count UTF-16 code units.
+ ///
+ /// This is the default and must always be supported
+ /// by servers
+ pub const UTF16: PositionEncodingKind = PositionEncodingKind::new("utf-16");
+
+ /// Character offsets count UTF-32 code units.
+ ///
+ /// Implementation note: these are the same as Unicode code points,
+ /// so this `PositionEncodingKind` may also be used for an
+ /// encoding-agnostic representation of character offsets.
+ pub const UTF32: PositionEncodingKind = PositionEncodingKind::new("utf-32");
+
+ pub const fn new(tag: &'static str) -> Self {
+ PositionEncodingKind(std::borrow::Cow::Borrowed(tag))
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for PositionEncodingKind {
+ fn from(from: String) -> Self {
+ PositionEncodingKind(std::borrow::Cow::from(from))
+ }
+}
+
+impl From<&'static str> for PositionEncodingKind {
+ fn from(from: &'static str) -> Self {
+ PositionEncodingKind::new(from)
+ }
+}
+
+/// Represents a diagnostic, such as a compiler error or warning.
+/// Diagnostic objects are only valid in the scope of a resource.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Diagnostic {
+ /// The range at which the message applies.
+ pub range: Range,
+
+ /// The diagnostic's severity. Can be omitted. If omitted it is up to the
+ /// client to interpret diagnostics as error, warning, info or hint.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub severity: Option<DiagnosticSeverity>,
+
+ /// The diagnostic's code. Can be omitted.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code: Option<NumberOrString>,
+
+ /// An optional property to describe the error code.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_description: Option<CodeDescription>,
+
+ /// A human-readable string describing the source of this
+ /// diagnostic, e.g. 'typescript' or 'super lint'.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub source: Option<String>,
+
+ /// The diagnostic's message.
+ pub message: String,
+
+ /// An array of related diagnostic information, e.g. when symbol-names within
+ /// a scope collide all definitions can be marked via this property.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub related_information: Option<Vec<DiagnosticRelatedInformation>>,
+
+ /// Additional metadata about the diagnostic.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<DiagnosticTag>>,
+
+ /// A data entry field that is preserved between a `textDocument/publishDiagnostics`
+ /// notification and `textDocument/codeAction` request.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<serde_json::Value>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeDescription {
+ pub href: Url,
+}
+
+impl Diagnostic {
+ pub fn new(
+ range: Range,
+ severity: Option<DiagnosticSeverity>,
+ code: Option<NumberOrString>,
+ source: Option<String>,
+ message: String,
+ related_information: Option<Vec<DiagnosticRelatedInformation>>,
+ tags: Option<Vec<DiagnosticTag>>,
+ ) -> Diagnostic {
+ Diagnostic {
+ range,
+ severity,
+ code,
+ source,
+ message,
+ related_information,
+ tags,
+ ..Diagnostic::default()
+ }
+ }
+
+ pub fn new_simple(range: Range, message: String) -> Diagnostic {
+ Self::new(range, None, None, None, message, None, None)
+ }
+
+ pub fn new_with_code_number(
+ range: Range,
+ severity: DiagnosticSeverity,
+ code_number: i32,
+ source: Option<String>,
+ message: String,
+ ) -> Diagnostic {
+ let code = Some(NumberOrString::Number(code_number));
+ Self::new(range, Some(severity), code, source, message, None, None)
+ }
+}
+
+/// The protocol currently supports the following diagnostic severities:
+#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct DiagnosticSeverity(i32);
+lsp_enum! {
+impl DiagnosticSeverity {
+ /// Reports an error.
+ pub const ERROR: DiagnosticSeverity = DiagnosticSeverity(1);
+ /// Reports a warning.
+ pub const WARNING: DiagnosticSeverity = DiagnosticSeverity(2);
+ /// Reports an information.
+ pub const INFORMATION: DiagnosticSeverity = DiagnosticSeverity(3);
+ /// Reports a hint.
+ pub const HINT: DiagnosticSeverity = DiagnosticSeverity(4);
+}
+}
+
+/// Represents a related message and source code location for a diagnostic. This
+/// should be used to point to code locations that cause or related to a
+/// diagnostics, e.g when duplicating a symbol in a scope.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DiagnosticRelatedInformation {
+ /// The location of this related diagnostic information.
+ pub location: Location,
+
+ /// The message of this related diagnostic information.
+ pub message: String,
+}
+
+/// The diagnostic tags.
+#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct DiagnosticTag(i32);
+lsp_enum! {
+impl DiagnosticTag {
+ /// Unused or unnecessary code.
+ /// Clients are allowed to render diagnostics with this tag faded out instead of having
+ /// an error squiggle.
+ pub const UNNECESSARY: DiagnosticTag = DiagnosticTag(1);
+
+ /// Deprecated or obsolete code.
+ /// Clients are allowed to rendered diagnostics with this tag strike through.
+ pub const DEPRECATED: DiagnosticTag = DiagnosticTag(2);
+}
+}
+
+/// Represents a reference to a command. Provides a title which will be used to represent a command in the UI.
+/// Commands are identitifed using a string identifier and the protocol currently doesn't specify a set of
+/// well known commands. So executing a command requires some tool extension code.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct Command {
+ /// Title of the command, like `save`.
+ pub title: String,
+ /// The identifier of the actual command handler.
+ pub command: String,
+ /// Arguments that the command handler should be
+ /// invoked with.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub arguments: Option<Vec<Value>>,
+}
+
+impl Command {
+ pub fn new(title: String, command: String, arguments: Option<Vec<Value>>) -> Command {
+ Command {
+ title,
+ command,
+ arguments,
+ }
+ }
+}
+
+/// A textual edit applicable to a text document.
+///
+/// If n `TextEdit`s are applied to a text document all text edits describe changes to the initial document version.
+/// Execution wise text edits should applied from the bottom to the top of the text document. Overlapping text edits
+/// are not supported.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextEdit {
+ /// The range of the text document to be manipulated. To insert
+ /// text into a document create a range where start === end.
+ pub range: Range,
+ /// The string to be inserted. For delete operations use an
+ /// empty string.
+ pub new_text: String,
+}
+
+impl TextEdit {
+ pub fn new(range: Range, new_text: String) -> TextEdit {
+ TextEdit { range, new_text }
+ }
+}
+
+/// An identifier referring to a change annotation managed by a workspace
+/// edit.
+///
+/// @since 3.16.0.
+pub type ChangeAnnotationIdentifier = String;
+
+/// A special text edit with an additional change annotation.
+///
+/// @since 3.16.0.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct AnnotatedTextEdit {
+ #[serde(flatten)]
+ pub text_edit: TextEdit,
+
+ /// The actual annotation
+ pub annotation_id: ChangeAnnotationIdentifier,
+}
+
+/// Describes textual changes on a single text document. The text document is referred to as a
+/// `OptionalVersionedTextDocumentIdentifier` to allow clients to check the text document version before an
+/// edit is applied. A `TextDocumentEdit` describes all changes on a version Si and after they are
+/// applied move the document to version Si+1. So the creator of a `TextDocumentEdit` doesn't need to
+/// sort the array or do any kind of ordering. However the edits must be non overlapping.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentEdit {
+ /// The text document to change.
+ pub text_document: OptionalVersionedTextDocumentIdentifier,
+
+ /// The edits to be applied.
+ ///
+ /// @since 3.16.0 - support for AnnotatedTextEdit. This is guarded by the
+ /// client capability `workspace.workspaceEdit.changeAnnotationSupport`
+ pub edits: Vec<OneOf<TextEdit, AnnotatedTextEdit>>,
+}
+
+/// Additional information that describes document changes.
+///
+/// @since 3.16.0.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ChangeAnnotation {
+ /// A human-readable string describing the actual change. The string
+ /// is rendered prominent in the user interface.
+ pub label: String,
+
+ /// A flag which indicates that user confirmation is needed
+ /// before applying the change.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub needs_confirmation: Option<bool>,
+
+ /// A human-readable string which is rendered less prominent in
+ /// the user interface.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub description: Option<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ChangeAnnotationWorkspaceEditClientCapabilities {
+ /// Whether the client groups edits with equal labels into tree nodes,
+ /// for instance all edits labelled with "Changes in Strings" would
+ /// be a tree node.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub groups_on_label: Option<bool>,
+}
+
+/// Options to create a file.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CreateFileOptions {
+ /// Overwrite existing file. Overwrite wins over `ignoreIfExists`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub overwrite: Option<bool>,
+ /// Ignore if exists.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub ignore_if_exists: Option<bool>,
+}
+
+/// Create file operation
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CreateFile {
+ /// The resource to create.
+ pub uri: Url,
+ /// Additional options
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub options: Option<CreateFileOptions>,
+
+ /// An optional annotation identifer describing the operation.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub annotation_id: Option<ChangeAnnotationIdentifier>,
+}
+
+/// Rename file options
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameFileOptions {
+ /// Overwrite target if existing. Overwrite wins over `ignoreIfExists`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub overwrite: Option<bool>,
+ /// Ignores if target exists.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub ignore_if_exists: Option<bool>,
+}
+
+/// Rename file operation
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameFile {
+ /// The old (existing) location.
+ pub old_uri: Url,
+ /// The new location.
+ pub new_uri: Url,
+ /// Rename options.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub options: Option<RenameFileOptions>,
+
+ /// An optional annotation identifer describing the operation.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub annotation_id: Option<ChangeAnnotationIdentifier>,
+}
+
+/// Delete file options
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeleteFileOptions {
+ /// Delete the content recursively if a folder is denoted.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub recursive: Option<bool>,
+ /// Ignore the operation if the file doesn't exist.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub ignore_if_not_exists: Option<bool>,
+
+ /// An optional annotation identifer describing the operation.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub annotation_id: Option<ChangeAnnotationIdentifier>,
+}
+
+/// Delete file operation
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeleteFile {
+ /// The file to delete.
+ pub uri: Url,
+ /// Delete options.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub options: Option<DeleteFileOptions>,
+}
+
+/// A workspace edit represents changes to many resources managed in the workspace.
+/// The edit should either provide `changes` or `documentChanges`.
+/// If the client can handle versioned document edits and if `documentChanges` are present,
+/// the latter are preferred over `changes`.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceEdit {
+ /// Holds changes to existing resources.
+ #[serde(with = "url_map")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[serde(default)]
+ pub changes: Option<HashMap<Url, Vec<TextEdit>>>, // changes?: { [uri: string]: TextEdit[]; };
+
+ /// Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
+ /// are either an array of `TextDocumentEdit`s to express changes to n different text documents
+ /// where each text document edit addresses a specific version of a text document. Or it can contain
+ /// above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.
+ ///
+ /// Whether a client supports versioned document edits is expressed via
+ /// `workspace.workspaceEdit.documentChanges` client capability.
+ ///
+ /// If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then
+ /// only plain `TextEdit`s using the `changes` property are supported.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_changes: Option<DocumentChanges>,
+
+ /// A map of change annotations that can be referenced in
+ /// `AnnotatedTextEdit`s or create, rename and delete file / folder
+ /// operations.
+ ///
+ /// Whether clients honor this property depends on the client capability
+ /// `workspace.changeAnnotationSupport`.
+ ///
+ /// @since 3.16.0
+ ///
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change_annotations: Option<HashMap<ChangeAnnotationIdentifier, ChangeAnnotation>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum DocumentChanges {
+ Edits(Vec<TextDocumentEdit>),
+ Operations(Vec<DocumentChangeOperation>),
+}
+
+// TODO: Once https://github.com/serde-rs/serde/issues/912 is solved
+// we can remove ResourceOp and switch to the following implementation
+// of DocumentChangeOperation:
+//
+// #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+// #[serde(tag = "kind", rename_all="lowercase" )]
+// pub enum DocumentChangeOperation {
+// Create(CreateFile),
+// Rename(RenameFile),
+// Delete(DeleteFile),
+//
+// #[serde(other)]
+// Edit(TextDocumentEdit),
+// }
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged, rename_all = "lowercase")]
+pub enum DocumentChangeOperation {
+ Op(ResourceOp),
+ Edit(TextDocumentEdit),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(tag = "kind", rename_all = "lowercase")]
+pub enum ResourceOp {
+ Create(CreateFile),
+ Rename(RenameFile),
+ Delete(DeleteFile),
+}
+
+pub type DidChangeConfigurationClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ConfigurationParams {
+ pub items: Vec<ConfigurationItem>,
+}
+
+#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ConfigurationItem {
+ /// The scope to get the configuration section for.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub scope_uri: Option<Url>,
+
+ ///The configuration section asked for.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub section: Option<String>,
+}
+
+mod url_map {
+ use std::fmt;
+
+ use super::*;
+
+ pub fn deserialize<'de, D>(
+ deserializer: D,
+ ) -> Result<Option<HashMap<Url, Vec<TextEdit>>>, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct UrlMapVisitor;
+ impl<'de> de::Visitor<'de> for UrlMapVisitor {
+ type Value = HashMap<Url, Vec<TextEdit>>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("map")
+ }
+
+ fn visit_map<M>(self, mut visitor: M) -> Result<Self::Value, M::Error>
+ where
+ M: de::MapAccess<'de>,
+ {
+ let mut values = HashMap::with_capacity(visitor.size_hint().unwrap_or(0));
+
+ // While there are entries remaining in the input, add them
+ // into our map.
+ while let Some((key, value)) = visitor.next_entry::<Url, _>()? {
+ values.insert(key, value);
+ }
+
+ Ok(values)
+ }
+ }
+
+ struct OptionUrlMapVisitor;
+ impl<'de> de::Visitor<'de> for OptionUrlMapVisitor {
+ type Value = Option<HashMap<Url, Vec<TextEdit>>>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("option")
+ }
+
+ #[inline]
+ fn visit_unit<E>(self) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(None)
+ }
+
+ #[inline]
+ fn visit_none<E>(self) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(None)
+ }
+
+ #[inline]
+ fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_map(UrlMapVisitor).map(Some)
+ }
+ }
+
+ // Instantiate our Visitor and ask the Deserializer to drive
+ // it over the input data, resulting in an instance of MyMap.
+ deserializer.deserialize_option(OptionUrlMapVisitor)
+ }
+
+ pub fn serialize<S>(
+ changes: &Option<HashMap<Url, Vec<TextEdit>>>,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ use serde::ser::SerializeMap;
+
+ match *changes {
+ Some(ref changes) => {
+ let mut map = serializer.serialize_map(Some(changes.len()))?;
+ for (k, v) in changes {
+ map.serialize_entry(k.as_str(), v)?;
+ }
+ map.end()
+ }
+ None => serializer.serialize_none(),
+ }
+ }
+}
+
+impl WorkspaceEdit {
+ pub fn new(changes: HashMap<Url, Vec<TextEdit>>) -> WorkspaceEdit {
+ WorkspaceEdit {
+ changes: Some(changes),
+ document_changes: None,
+ ..Default::default()
+ }
+ }
+}
+
+/// Text documents are identified using a URI. On the protocol level, URIs are passed as strings.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct TextDocumentIdentifier {
+ // !!!!!! Note:
+ // In the spec VersionedTextDocumentIdentifier extends TextDocumentIdentifier
+ // This modelled by "mixing-in" TextDocumentIdentifier in VersionedTextDocumentIdentifier,
+ // so any changes to this type must be effected in the sub-type as well.
+ /// The text document's URI.
+ pub uri: Url,
+}
+
+impl TextDocumentIdentifier {
+ pub fn new(uri: Url) -> TextDocumentIdentifier {
+ TextDocumentIdentifier { uri }
+ }
+}
+
+/// An item to transfer a text document from the client to the server.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentItem {
+ /// The text document's URI.
+ pub uri: Url,
+
+ /// The text document's language identifier.
+ pub language_id: String,
+
+ /// The version number of this document (it will strictly increase after each
+ /// change, including undo/redo).
+ pub version: i32,
+
+ /// The content of the opened text document.
+ pub text: String,
+}
+
+impl TextDocumentItem {
+ pub fn new(uri: Url, language_id: String, version: i32, text: String) -> TextDocumentItem {
+ TextDocumentItem {
+ uri,
+ language_id,
+ version,
+ text,
+ }
+ }
+}
+
+/// An identifier to denote a specific version of a text document. This information usually flows from the client to the server.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct VersionedTextDocumentIdentifier {
+ // This field was "mixed-in" from TextDocumentIdentifier
+ /// The text document's URI.
+ pub uri: Url,
+
+ /// The version number of this document.
+ ///
+ /// The version number of a document will increase after each change,
+ /// including undo/redo. The number doesn't need to be consecutive.
+ pub version: i32,
+}
+
+impl VersionedTextDocumentIdentifier {
+ pub fn new(uri: Url, version: i32) -> VersionedTextDocumentIdentifier {
+ VersionedTextDocumentIdentifier { uri, version }
+ }
+}
+
+/// An identifier which optionally denotes a specific version of a text document. This information usually flows from the server to the client
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct OptionalVersionedTextDocumentIdentifier {
+ // This field was "mixed-in" from TextDocumentIdentifier
+ /// The text document's URI.
+ pub uri: Url,
+
+ /// The version number of this document. If an optional versioned text document
+ /// identifier is sent from the server to the client and the file is not
+ /// open in the editor (the server has not received an open notification
+ /// before) the server can send `null` to indicate that the version is
+ /// known and the content on disk is the master (as specified with document
+ /// content ownership).
+ ///
+ /// The version number of a document will increase after each change,
+ /// including undo/redo. The number doesn't need to be consecutive.
+ pub version: Option<i32>,
+}
+
+impl OptionalVersionedTextDocumentIdentifier {
+ pub fn new(uri: Url, version: i32) -> OptionalVersionedTextDocumentIdentifier {
+ OptionalVersionedTextDocumentIdentifier {
+ uri,
+ version: Some(version),
+ }
+ }
+}
+
+/// A parameter literal used in requests to pass a text document and a position inside that document.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentPositionParams {
+ // !!!!!! Note:
+ // In the spec ReferenceParams extends TextDocumentPositionParams
+ // This modelled by "mixing-in" TextDocumentPositionParams in ReferenceParams,
+ // so any changes to this type must be effected in sub-type as well.
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The position inside the text document.
+ pub position: Position,
+}
+
+impl TextDocumentPositionParams {
+ pub fn new(
+ text_document: TextDocumentIdentifier,
+ position: Position,
+ ) -> TextDocumentPositionParams {
+ TextDocumentPositionParams {
+ text_document,
+ position,
+ }
+ }
+}
+
+/// A document filter denotes a document through properties like language, schema or pattern.
+/// Examples are a filter that applies to TypeScript files on disk or a filter the applies to JSON
+/// files with name package.json:
+///
+/// { language: 'typescript', scheme: 'file' }
+/// { language: 'json', pattern: '**/package.json' }
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DocumentFilter {
+ /// A language id, like `typescript`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub language: Option<String>,
+
+ /// A Uri [scheme](#Uri.scheme), like `file` or `untitled`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub scheme: Option<String>,
+
+ /// A glob pattern, like `*.{ts,js}`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub pattern: Option<String>,
+}
+
+/// A document selector is the combination of one or many document filters.
+pub type DocumentSelector = Vec<DocumentFilter>;
+
+// ========================= Actual Protocol =========================
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct InitializeParams {
+ /// The process Id of the parent process that started
+ /// the server. Is null if the process has not been started by another process.
+ /// If the parent process is not alive then the server should exit (see exit notification) its process.
+ pub process_id: Option<u32>,
+
+ /// The rootPath of the workspace. Is null
+ /// if no folder is open.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[deprecated(note = "Use `root_uri` instead when possible")]
+ pub root_path: Option<String>,
+
+ /// The rootUri of the workspace. Is null if no
+ /// folder is open. If both `rootPath` and `rootUri` are set
+ /// `rootUri` wins.
+ ///
+ /// Deprecated in favour of `workspaceFolders`
+ #[serde(default)]
+ pub root_uri: Option<Url>,
+
+ /// User provided initialization options.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub initialization_options: Option<Value>,
+
+ /// The capabilities provided by the client (editor or tool)
+ pub capabilities: ClientCapabilities,
+
+ /// The initial trace setting. If omitted trace is disabled ('off').
+ #[serde(default)]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trace: Option<TraceValue>,
+
+ /// The workspace folders configured in the client when the server starts.
+ /// This property is only available if the client supports workspace folders.
+ /// It can be `null` if the client supports workspace folders but none are
+ /// configured.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_folders: Option<Vec<WorkspaceFolder>>,
+
+ /// Information about the client.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub client_info: Option<ClientInfo>,
+
+ /// The locale the client is currently showing the user interface
+ /// in. This must not necessarily be the locale of the operating
+ /// system.
+ ///
+ /// Uses IETF language tags as the value's syntax
+ /// (See <https://en.wikipedia.org/wiki/IETF_language_tag>)
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub locale: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+pub struct ClientInfo {
+ /// The name of the client as defined by the client.
+ pub name: String,
+ /// The client's version as defined by the client.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Clone, Copy, Deserialize, Serialize)]
+pub struct InitializedParams {}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct GenericRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub options: GenericOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct GenericOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct GenericParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DynamicRegistrationClientCapabilities {
+ /// This capability supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct GotoCapability {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client supports additional metadata in the form of definition links.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub link_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceEditClientCapabilities {
+ /// The client supports versioned document changes in `WorkspaceEdit`s
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_changes: Option<bool>,
+
+ /// The resource operations the client supports. Clients should at least
+ /// support 'create', 'rename' and 'delete' files and folders.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resource_operations: Option<Vec<ResourceOperationKind>>,
+
+ /// The failure handling strategy of a client if applying the workspace edit
+ /// failes.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub failure_handling: Option<FailureHandlingKind>,
+
+ /// Whether the client normalizes line endings to the client specific
+ /// setting.
+ /// If set to `true` the client will normalize line ending characters
+ /// in a workspace edit containg to the client specific new line
+ /// character.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub normalizes_line_endings: Option<bool>,
+
+ /// Whether the client in general supports change annotations on text edits,
+ /// create file, rename file and delete file changes.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change_annotation_support: Option<ChangeAnnotationWorkspaceEditClientCapabilities>,
+}
+
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum ResourceOperationKind {
+ Create,
+ Rename,
+ Delete,
+}
+
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
+#[serde(rename_all = "camelCase")]
+pub enum FailureHandlingKind {
+ Abort,
+ Transactional,
+ TextOnlyTransactional,
+ Undo,
+}
+
+/// A symbol kind.
+#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct SymbolKind(i32);
+lsp_enum! {
+impl SymbolKind {
+ pub const FILE: SymbolKind = SymbolKind(1);
+ pub const MODULE: SymbolKind = SymbolKind(2);
+ pub const NAMESPACE: SymbolKind = SymbolKind(3);
+ pub const PACKAGE: SymbolKind = SymbolKind(4);
+ pub const CLASS: SymbolKind = SymbolKind(5);
+ pub const METHOD: SymbolKind = SymbolKind(6);
+ pub const PROPERTY: SymbolKind = SymbolKind(7);
+ pub const FIELD: SymbolKind = SymbolKind(8);
+ pub const CONSTRUCTOR: SymbolKind = SymbolKind(9);
+ pub const ENUM: SymbolKind = SymbolKind(10);
+ pub const INTERFACE: SymbolKind = SymbolKind(11);
+ pub const FUNCTION: SymbolKind = SymbolKind(12);
+ pub const VARIABLE: SymbolKind = SymbolKind(13);
+ pub const CONSTANT: SymbolKind = SymbolKind(14);
+ pub const STRING: SymbolKind = SymbolKind(15);
+ pub const NUMBER: SymbolKind = SymbolKind(16);
+ pub const BOOLEAN: SymbolKind = SymbolKind(17);
+ pub const ARRAY: SymbolKind = SymbolKind(18);
+ pub const OBJECT: SymbolKind = SymbolKind(19);
+ pub const KEY: SymbolKind = SymbolKind(20);
+ pub const NULL: SymbolKind = SymbolKind(21);
+ pub const ENUM_MEMBER: SymbolKind = SymbolKind(22);
+ pub const STRUCT: SymbolKind = SymbolKind(23);
+ pub const EVENT: SymbolKind = SymbolKind(24);
+ pub const OPERATOR: SymbolKind = SymbolKind(25);
+ pub const TYPE_PARAMETER: SymbolKind = SymbolKind(26);
+}
+}
+
+/// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SymbolKindCapability {
+ /// The symbol kind values the client supports. When this
+ /// property exists the client also guarantees that it will
+ /// handle values outside its set gracefully and falls back
+ /// to a default value when unknown.
+ ///
+ /// If this property is not present the client only supports
+ /// the symbol kinds from `File` to `Array` as defined in
+ /// the initial version of the protocol.
+ pub value_set: Option<Vec<SymbolKind>>,
+}
+
+/// Workspace specific client capabilities.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceClientCapabilities {
+ /// The client supports applying batch edits to the workspace by supporting
+ /// the request 'workspace/applyEdit'
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub apply_edit: Option<bool>,
+
+ /// Capabilities specific to `WorkspaceEdit`s
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_edit: Option<WorkspaceEditClientCapabilities>,
+
+ /// Capabilities specific to the `workspace/didChangeConfiguration` notification.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_change_configuration: Option<DidChangeConfigurationClientCapabilities>,
+
+ /// Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_change_watched_files: Option<DidChangeWatchedFilesClientCapabilities>,
+
+ /// Capabilities specific to the `workspace/symbol` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub symbol: Option<WorkspaceSymbolClientCapabilities>,
+
+ /// Capabilities specific to the `workspace/executeCommand` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub execute_command: Option<ExecuteCommandClientCapabilities>,
+
+ /// The client has support for workspace folders.
+ /// since 3.6.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_folders: Option<bool>,
+
+ /// The client supports `workspace/configuration` requests.
+ /// since 3.6.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub configuration: Option<bool>,
+
+ /// Capabilities specific to the semantic token requsts scoped to the workspace.
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub semantic_tokens: Option<SemanticTokensWorkspaceClientCapabilities>,
+
+ /// Capabilities specific to the code lens requests scoped to the workspace.
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_lens: Option<CodeLensWorkspaceClientCapabilities>,
+
+ /// The client has support for file requests/notifications.
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub file_operations: Option<WorkspaceFileOperationsClientCapabilities>,
+
+ /// Client workspace capabilities specific to inline values.
+ /// since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inline_value: Option<InlineValueWorkspaceClientCapabilities>,
+
+ /// Client workspace capabilities specific to inlay hints.
+ /// since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inlay_hint: Option<InlayHintWorkspaceClientCapabilities>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentSyncClientCapabilities {
+ /// Whether text document synchronization supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client supports sending will save notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_save: Option<bool>,
+
+ /// The client supports sending a will save request and
+ /// waits for a response providing text edits which will
+ /// be applied to the document before it is saved.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_save_wait_until: Option<bool>,
+
+ /// The client supports did save notifications.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub did_save: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct PublishDiagnosticsClientCapabilities {
+ /// Whether the clients accepts diagnostics with related information.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub related_information: Option<bool>,
+
+ /// Client supports the tag property to provide meta data about a diagnostic.
+ /// Clients supporting tags have to handle unknown tags gracefully.
+ #[serde(
+ default,
+ skip_serializing_if = "Option::is_none",
+ deserialize_with = "TagSupport::deserialize_compat"
+ )]
+ pub tag_support: Option<TagSupport<DiagnosticTag>>,
+
+ /// Whether the client interprets the version property of the
+ /// `textDocument/publishDiagnostics` notification's parameter.
+ ///
+ /// 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version_support: Option<bool>,
+
+ /// Client supports a codeDescription property
+ ///
+ /// 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_description_support: Option<bool>,
+
+ /// Whether code action supports the `data` property which is
+ /// preserved between a `textDocument/publishDiagnostics` and
+ /// `textDocument/codeAction` request.
+ ///
+ /// 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TagSupport<T> {
+ /// The tags supported by the client.
+ pub value_set: Vec<T>,
+}
+
+impl<T> TagSupport<T> {
+ /// Support for deserializing a boolean tag Support, in case it's present.
+ ///
+ /// This is currently the case for vscode 1.41.1
+ fn deserialize_compat<'de, S>(serializer: S) -> Result<Option<TagSupport<T>>, S::Error>
+ where
+ S: serde::Deserializer<'de>,
+ T: serde::Deserialize<'de>,
+ {
+ Ok(
+ match Option::<Value>::deserialize(serializer).map_err(serde::de::Error::custom)? {
+ Some(Value::Bool(false)) => None,
+ Some(Value::Bool(true)) => Some(TagSupport { value_set: vec![] }),
+ Some(other) => {
+ Some(TagSupport::<T>::deserialize(other).map_err(serde::de::Error::custom)?)
+ }
+ None => None,
+ },
+ )
+ }
+}
+
+/// Text document specific client capabilities.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentClientCapabilities {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub synchronization: Option<TextDocumentSyncClientCapabilities>,
+ /// Capabilities specific to the `textDocument/completion`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion: Option<CompletionClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/hover`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub hover: Option<HoverClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/signatureHelp`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub signature_help: Option<SignatureHelpClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/references`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub references: Option<ReferenceClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/documentHighlight`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_highlight: Option<DocumentHighlightClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/documentSymbol`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_symbol: Option<DocumentSymbolClientCapabilities>,
+ /// Capabilities specific to the `textDocument/formatting`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub formatting: Option<DocumentFormattingClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/rangeFormatting`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range_formatting: Option<DocumentRangeFormattingClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/onTypeFormatting`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub on_type_formatting: Option<DocumentOnTypeFormattingClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/declaration`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub declaration: Option<GotoCapability>,
+
+ /// Capabilities specific to the `textDocument/definition`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub definition: Option<GotoCapability>,
+
+ /// Capabilities specific to the `textDocument/typeDefinition`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub type_definition: Option<GotoCapability>,
+
+ /// Capabilities specific to the `textDocument/implementation`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub implementation: Option<GotoCapability>,
+
+ /// Capabilities specific to the `textDocument/codeAction`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_action: Option<CodeActionClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/codeLens`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_lens: Option<CodeLensClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/documentLink`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_link: Option<DocumentLinkClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/documentColor` and the
+ /// `textDocument/colorPresentation` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub color_provider: Option<DocumentColorClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/rename`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub rename: Option<RenameClientCapabilities>,
+
+ /// Capabilities specific to `textDocument/publishDiagnostics`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub publish_diagnostics: Option<PublishDiagnosticsClientCapabilities>,
+
+ /// Capabilities specific to `textDocument/foldingRange` requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub folding_range: Option<FoldingRangeClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/selectionRange` request.
+ ///
+ /// @since 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub selection_range: Option<SelectionRangeClientCapabilities>,
+
+ /// Capabilities specific to `textDocument/linkedEditingRange` requests.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub linked_editing_range: Option<LinkedEditingRangeClientCapabilities>,
+
+ /// Capabilities specific to the various call hierarchy requests.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub call_hierarchy: Option<CallHierarchyClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/semanticTokens/*` requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub semantic_tokens: Option<SemanticTokensClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/moniker` request.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub moniker: Option<MonikerClientCapabilities>,
+
+ /// Capabilities specific to the various type hierarchy requests.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub type_hierarchy: Option<TypeHierarchyClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/inlineValue` request.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inline_value: Option<InlineValueClientCapabilities>,
+
+ /// Capabilities specific to the `textDocument/inlayHint` request.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inlay_hint: Option<InlayHintClientCapabilities>,
+}
+
+/// Where ClientCapabilities are currently empty:
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ClientCapabilities {
+ /// Workspace specific client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace: Option<WorkspaceClientCapabilities>,
+
+ /// Text document specific client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text_document: Option<TextDocumentClientCapabilities>,
+
+ /// Window specific client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub window: Option<WindowClientCapabilities>,
+
+ /// General client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub general: Option<GeneralClientCapabilities>,
+
+ /// Unofficial UT8-offsets extension.
+ ///
+ /// See https://clangd.llvm.org/extensions.html#utf-8-offsets.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[cfg(feature = "proposed")]
+ pub offset_encoding: Option<Vec<String>>,
+
+ /// Experimental client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub experimental: Option<Value>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct GeneralClientCapabilities {
+ /// Client capabilities specific to regular expressions.
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub regular_expressions: Option<RegularExpressionsClientCapabilities>,
+
+ /// Client capabilities specific to the client's markdown parser.
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub markdown: Option<MarkdownClientCapabilities>,
+
+ /// Client capability that signals how the client handles stale requests (e.g. a request for
+ /// which the client will not process the response anymore since the information is outdated).
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub stale_request_support: Option<StaleRequestSupportClientCapabilities>,
+
+ /// The position encodings supported by the client. Client and server
+ /// have to agree on the same position encoding to ensure that offsets
+ /// (e.g. character position in a line) are interpreted the same on both
+ /// side.
+ ///
+ /// To keep the protocol backwards compatible the following applies: if
+ /// the value 'utf-16' is missing from the array of position encodings
+ /// servers can assume that the client supports UTF-16. UTF-16 is
+ /// therefore a mandatory encoding.
+ ///
+ /// If omitted it defaults to ['utf-16'].
+ ///
+ /// Implementation considerations: since the conversion from one encoding
+ /// into another requires the content of the file / line the conversion
+ /// is best done where the file is read which is usually on the server
+ /// side.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub position_encodings: Option<Vec<PositionEncodingKind>>,
+}
+
+/// Client capability that signals how the client
+/// handles stale requests (e.g. a request
+/// for which the client will not process the response
+/// anymore since the information is outdated).
+///
+/// @since 3.17.0
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StaleRequestSupportClientCapabilities {
+ /// The client will actively cancel the request.
+ pub cancel: bool,
+
+ /// The list of requests for which the client
+ /// will retry the request if it receives a
+ /// response with error code `ContentModified``
+ pub retry_on_content_modified: Vec<String>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RegularExpressionsClientCapabilities {
+ /// The engine's name.
+ pub engine: String,
+
+ /// The engine's version
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MarkdownClientCapabilities {
+ /// The name of the parser.
+ pub parser: String,
+
+ /// The version of the parser.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+
+ /// A list of HTML tags that the client allows / supports in
+ /// Markdown.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub allowed_tags: Option<Vec<String>>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct InitializeResult {
+ /// The capabilities the language server provides.
+ pub capabilities: ServerCapabilities,
+
+ /// Information about the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub server_info: Option<ServerInfo>,
+
+ /// Unofficial UT8-offsets extension.
+ ///
+ /// See https://clangd.llvm.org/extensions.html#utf-8-offsets.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[cfg(feature = "proposed")]
+ pub offset_encoding: Option<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct ServerInfo {
+ /// The name of the server as defined by the server.
+ pub name: String,
+ /// The servers's version as defined by the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct InitializeError {
+ /// Indicates whether the client should retry to send the
+ /// initilize request after showing the message provided
+ /// in the ResponseError.
+ pub retry: bool,
+}
+
+// The server can signal the following capabilities:
+
+/// Defines how the host (editor) should sync document changes to the language server.
+#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct TextDocumentSyncKind(i32);
+lsp_enum! {
+impl TextDocumentSyncKind {
+ /// Documents should not be synced at all.
+ pub const NONE: TextDocumentSyncKind = TextDocumentSyncKind(0);
+
+ /// Documents are synced by always sending the full content of the document.
+ pub const FULL: TextDocumentSyncKind = TextDocumentSyncKind(1);
+
+ /// Documents are synced by sending the full content on open. After that only
+ /// incremental updates to the document are sent.
+ pub const INCREMENTAL: TextDocumentSyncKind = TextDocumentSyncKind(2);
+}
+}
+
+pub type ExecuteCommandClientCapabilities = DynamicRegistrationClientCapabilities;
+
+/// Execute command options.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct ExecuteCommandOptions {
+ /// The commands to be executed on the server
+ pub commands: Vec<String>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+/// Save options.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SaveOptions {
+ /// The client is supposed to include the content on save.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub include_text: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum TextDocumentSyncSaveOptions {
+ Supported(bool),
+ SaveOptions(SaveOptions),
+}
+
+impl From<SaveOptions> for TextDocumentSyncSaveOptions {
+ fn from(from: SaveOptions) -> Self {
+ Self::SaveOptions(from)
+ }
+}
+
+impl From<bool> for TextDocumentSyncSaveOptions {
+ fn from(from: bool) -> Self {
+ Self::Supported(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentSyncOptions {
+ /// Open and close notifications are sent to the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub open_close: Option<bool>,
+
+ /// Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full
+ /// and TextDocumentSyncKindIncremental.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change: Option<TextDocumentSyncKind>,
+
+ /// Will save notifications are sent to the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_save: Option<bool>,
+
+ /// Will save wait until requests are sent to the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub will_save_wait_until: Option<bool>,
+
+ /// Save notifications are sent to the server.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub save: Option<TextDocumentSyncSaveOptions>,
+}
+
+#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum OneOf<A, B> {
+ Left(A),
+ Right(B),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum TextDocumentSyncCapability {
+ Kind(TextDocumentSyncKind),
+ Options(TextDocumentSyncOptions),
+}
+
+impl From<TextDocumentSyncOptions> for TextDocumentSyncCapability {
+ fn from(from: TextDocumentSyncOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<TextDocumentSyncKind> for TextDocumentSyncCapability {
+ fn from(from: TextDocumentSyncKind) -> Self {
+ Self::Kind(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum ImplementationProviderCapability {
+ Simple(bool),
+ Options(StaticTextDocumentRegistrationOptions),
+}
+
+impl From<StaticTextDocumentRegistrationOptions> for ImplementationProviderCapability {
+ fn from(from: StaticTextDocumentRegistrationOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for ImplementationProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum TypeDefinitionProviderCapability {
+ Simple(bool),
+ Options(StaticTextDocumentRegistrationOptions),
+}
+
+impl From<StaticTextDocumentRegistrationOptions> for TypeDefinitionProviderCapability {
+ fn from(from: StaticTextDocumentRegistrationOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for TypeDefinitionProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ServerCapabilities {
+ /// The position encoding the server picked from the encodings offered
+ /// by the client via the client capability `general.positionEncodings`.
+ ///
+ /// If the client didn't provide any position encodings the only valid
+ /// value that a server can return is 'utf-16'.
+ ///
+ /// If omitted it defaults to 'utf-16'.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub position_encoding: Option<PositionEncodingKind>,
+
+ /// Defines how text documents are synced.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text_document_sync: Option<TextDocumentSyncCapability>,
+
+ /// Capabilities specific to `textDocument/selectionRange` requests.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub selection_range_provider: Option<SelectionRangeProviderCapability>,
+
+ /// The server provides hover support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub hover_provider: Option<HoverProviderCapability>,
+
+ /// The server provides completion support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub completion_provider: Option<CompletionOptions>,
+
+ /// The server provides signature help support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub signature_help_provider: Option<SignatureHelpOptions>,
+
+ /// The server provides goto definition support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub definition_provider: Option<OneOf<bool, DefinitionOptions>>,
+
+ /// The server provides goto type definition support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub type_definition_provider: Option<TypeDefinitionProviderCapability>,
+
+ /// The server provides goto implementation support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub implementation_provider: Option<ImplementationProviderCapability>,
+
+ /// The server provides find references support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub references_provider: Option<OneOf<bool, ReferencesOptions>>,
+
+ /// The server provides document highlight support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_highlight_provider: Option<OneOf<bool, DocumentHighlightOptions>>,
+
+ /// The server provides document symbol support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_symbol_provider: Option<OneOf<bool, DocumentSymbolOptions>>,
+
+ /// The server provides workspace symbol support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_symbol_provider: Option<OneOf<bool, WorkspaceSymbolOptions>>,
+
+ /// The server provides code actions.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_action_provider: Option<CodeActionProviderCapability>,
+
+ /// The server provides code lens.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub code_lens_provider: Option<CodeLensOptions>,
+
+ /// The server provides document formatting.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_formatting_provider: Option<OneOf<bool, DocumentFormattingOptions>>,
+
+ /// The server provides document range formatting.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_range_formatting_provider: Option<OneOf<bool, DocumentRangeFormattingOptions>>,
+
+ /// The server provides document formatting on typing.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_on_type_formatting_provider: Option<DocumentOnTypeFormattingOptions>,
+
+ /// The server provides rename support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub rename_provider: Option<OneOf<bool, RenameOptions>>,
+
+ /// The server provides document link support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_link_provider: Option<DocumentLinkOptions>,
+
+ /// The server provides color provider support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub color_provider: Option<ColorProviderCapability>,
+
+ /// The server provides folding provider support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub folding_range_provider: Option<FoldingRangeProviderCapability>,
+
+ /// The server provides go to declaration support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub declaration_provider: Option<DeclarationCapability>,
+
+ /// The server provides execute command support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub execute_command_provider: Option<ExecuteCommandOptions>,
+
+ /// Workspace specific server capabilities
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace: Option<WorkspaceServerCapabilities>,
+
+ /// Call hierarchy provider capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub call_hierarchy_provider: Option<CallHierarchyServerCapability>,
+
+ /// Semantic tokens server capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub semantic_tokens_provider: Option<SemanticTokensServerCapabilities>,
+
+ /// Whether server provides moniker support.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub moniker_provider: Option<OneOf<bool, MonikerServerCapabilities>>,
+
+ /// The server provides inline values.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inline_value_provider: Option<OneOf<bool, InlineValueServerCapabilities>>,
+
+ /// The server provides inlay hints.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub inlay_hint_provider: Option<OneOf<bool, InlayHintServerCapabilities>>,
+
+ /// The server provides linked editing range support.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub linked_editing_range_provider: Option<LinkedEditingRangeServerCapabilities>,
+
+ /// Experimental server capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub experimental: Option<Value>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceServerCapabilities {
+ /// The server supports workspace folder.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_folders: Option<WorkspaceFoldersServerCapabilities>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub file_operations: Option<WorkspaceFileOperationsServerCapabilities>,
+}
+
+/// General parameters to to register for a capability.
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Registration {
+ /// The id used to register the request. The id can be used to deregister
+ /// the request again.
+ pub id: String,
+
+ /// The method / capability to register for.
+ pub method: String,
+
+ /// Options necessary for the registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub register_options: Option<Value>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+pub struct RegistrationParams {
+ pub registrations: Vec<Registration>,
+}
+
+/// Since most of the registration options require to specify a document selector there is a base
+/// interface that can be used.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentRegistrationOptions {
+ /// A document selector to identify the scope of the registration. If set to null
+ /// the document selector provided on the client side will be used.
+ pub document_selector: Option<DocumentSelector>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum DeclarationCapability {
+ Simple(bool),
+ RegistrationOptions(DeclarationRegistrationOptions),
+ Options(DeclarationOptions),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeclarationRegistrationOptions {
+ #[serde(flatten)]
+ pub declaration_options: DeclarationOptions,
+
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeclarationOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StaticRegistrationOptions {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub id: Option<String>,
+}
+
+#[derive(Debug, Default, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressOptions {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub work_done_progress: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentFormattingOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentRangeFormattingOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DefinitionOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentSymbolOptions {
+ /// A human-readable string that is shown when multiple outlines trees are
+ /// shown for the same document.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label: Option<String>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ReferencesOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DocumentHighlightOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceSymbolOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+
+ /// The server provides support to resolve additional
+ /// information for a workspace symbol.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_provider: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct StaticTextDocumentRegistrationOptions {
+ /// A document selector to identify the scope of the registration. If set to null
+ /// the document selector provided on the client side will be used.
+ pub document_selector: Option<DocumentSelector>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub id: Option<String>,
+}
+
+/// General parameters to unregister a capability.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct Unregistration {
+ /// The id used to unregister the request or notification. Usually an id
+ /// provided during the register request.
+ pub id: String,
+
+ /// The method / capability to unregister for.
+ pub method: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct UnregistrationParams {
+ pub unregisterations: Vec<Unregistration>,
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DidChangeConfigurationParams {
+ /// The actual changed settings
+ pub settings: Value,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidOpenTextDocumentParams {
+ /// The document that was opened.
+ pub text_document: TextDocumentItem,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidChangeTextDocumentParams {
+ /// The document that did change. The version number points
+ /// to the version after all provided content changes have
+ /// been applied.
+ pub text_document: VersionedTextDocumentIdentifier,
+ /// The actual content changes.
+ pub content_changes: Vec<TextDocumentContentChangeEvent>,
+}
+
+/// An event describing a change to a text document. If range and rangeLength are omitted
+/// the new text is considered to be the full content of the document.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentContentChangeEvent {
+ /// The range of the document that changed.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range: Option<Range>,
+
+ /// The length of the range that got replaced.
+ ///
+ /// Deprecated: Use range instead
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range_length: Option<u32>,
+
+ /// The new text of the document.
+ pub text: String,
+}
+
+/// Descibe options to be used when registered for text document change events.
+///
+/// Extends TextDocumentRegistrationOptions
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentChangeRegistrationOptions {
+ /// A document selector to identify the scope of the registration. If set to null
+ /// the document selector provided on the client side will be used.
+ pub document_selector: Option<DocumentSelector>,
+
+ /// How documents are synced to the server. See TextDocumentSyncKind.Full
+ /// and TextDocumentSyncKindIncremental.
+ pub sync_kind: i32,
+}
+
+/// The parameters send in a will save text document notification.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WillSaveTextDocumentParams {
+ /// The document that will be saved.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The 'TextDocumentSaveReason'.
+ pub reason: TextDocumentSaveReason,
+}
+
+/// Represents reasons why a text document is saved.
+#[derive(Copy, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct TextDocumentSaveReason(i32);
+lsp_enum! {
+impl TextDocumentSaveReason {
+ /// Manually triggered, e.g. by the user pressing save, by starting debugging,
+ /// or by an API call.
+ pub const MANUAL: TextDocumentSaveReason = TextDocumentSaveReason(1);
+
+ /// Automatic after a delay.
+ pub const AFTER_DELAY: TextDocumentSaveReason = TextDocumentSaveReason(2);
+
+ /// When the editor lost focus.
+ pub const FOCUS_OUT: TextDocumentSaveReason = TextDocumentSaveReason(3);
+}
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidCloseTextDocumentParams {
+ /// The document that was closed.
+ pub text_document: TextDocumentIdentifier,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidSaveTextDocumentParams {
+ /// The document that was saved.
+ pub text_document: TextDocumentIdentifier,
+
+ /// Optional the content when saved. Depends on the includeText value
+ /// when the save notification was requested.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub text: Option<String>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TextDocumentSaveRegistrationOptions {
+ /// The client is supposed to include the content on save.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub include_text: Option<bool>,
+
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Copy, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidChangeWatchedFilesClientCapabilities {
+ /// Did change watched files notification supports dynamic registration.
+ /// Please note that the current protocol doesn't support static
+ /// configuration for file changes from the server side.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Whether the client has support for relative patterns
+ /// or not.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub relative_pattern_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct DidChangeWatchedFilesParams {
+ /// The actual file events.
+ pub changes: Vec<FileEvent>,
+}
+
+/// The file event type.
+#[derive(Eq, PartialEq, Hash, Copy, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct FileChangeType(i32);
+lsp_enum! {
+impl FileChangeType {
+ /// The file got created.
+ pub const CREATED: FileChangeType = FileChangeType(1);
+
+ /// The file got changed.
+ pub const CHANGED: FileChangeType = FileChangeType(2);
+
+ /// The file got deleted.
+ pub const DELETED: FileChangeType = FileChangeType(3);
+}
+}
+
+/// An event describing a file change.
+#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
+pub struct FileEvent {
+ /// The file's URI.
+ pub uri: Url,
+
+ /// The change type.
+ #[serde(rename = "type")]
+ pub typ: FileChangeType,
+}
+
+impl FileEvent {
+ pub fn new(uri: Url, typ: FileChangeType) -> FileEvent {
+ FileEvent { uri, typ }
+ }
+}
+
+/// Describe options to be used when registered for text document change events.
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
+pub struct DidChangeWatchedFilesRegistrationOptions {
+ /// The watchers to register.
+ pub watchers: Vec<FileSystemWatcher>,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct FileSystemWatcher {
+ /// The glob pattern to watch. See {@link GlobPattern glob pattern}
+ /// for more detail.
+ ///
+ /// @since 3.17.0 support for relative patterns.
+ pub glob_pattern: GlobPattern,
+
+ /// The kind of events of interest. If omitted it defaults to WatchKind.Create |
+ /// WatchKind.Change | WatchKind.Delete which is 7.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<WatchKind>,
+}
+
+/// The glob pattern. Either a string pattern or a relative pattern.
+///
+/// @since 3.17.0
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum GlobPattern {
+ String(Pattern),
+ Relative(RelativePattern),
+}
+
+impl From<Pattern> for GlobPattern {
+ #[inline]
+ fn from(from: Pattern) -> Self {
+ Self::String(from)
+ }
+}
+
+impl From<RelativePattern> for GlobPattern {
+ #[inline]
+ fn from(from: RelativePattern) -> Self {
+ Self::Relative(from)
+ }
+}
+
+/// A relative pattern is a helper to construct glob patterns that are matched
+/// relatively to a base URI. The common value for a `baseUri` is a workspace
+/// folder root, but it can be another absolute URI as well.
+///
+/// @since 3.17.0
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RelativePattern {
+ /// A workspace folder or a base URI to which this pattern will be matched
+ /// against relatively.
+ pub base_uri: OneOf<WorkspaceFolder, Url>,
+
+ /// The actual glob pattern.
+ pub pattern: Pattern,
+}
+
+/// The glob pattern to watch relative to the base path. Glob patterns can have
+/// the following syntax:
+/// - `*` to match one or more characters in a path segment
+/// - `?` to match on one character in a path segment
+/// - `**` to match any number of path segments, including none
+/// - `{}` to group conditions (e.g. `**​/*.{ts,js}` matches all TypeScript
+/// and JavaScript files)
+/// - `[]` to declare a range of characters to match in a path segment
+/// (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
+/// - `[!...]` to negate a range of characters to match in a path segment
+/// (e.g., `example.[!0-9]` to match on `example.a`, `example.b`,
+/// but not `example.0`)
+///
+/// @since 3.17.0
+pub type Pattern = String;
+
+bitflags! {
+pub struct WatchKind: u8 {
+ /// Interested in create events.
+ const Create = 1;
+ /// Interested in change events
+ const Change = 2;
+ /// Interested in delete events
+ const Delete = 4;
+}
+}
+
+impl<'de> serde::Deserialize<'de> for WatchKind {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let i = u8::deserialize(deserializer)?;
+ WatchKind::from_bits(i).ok_or_else(|| {
+ D::Error::invalid_value(de::Unexpected::Unsigned(u64::from(i)), &"Unknown flag")
+ })
+ }
+}
+
+impl serde::Serialize for WatchKind {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_u8(self.bits())
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct PublishDiagnosticsParams {
+ /// The URI for which diagnostic information is reported.
+ pub uri: Url,
+
+ /// An array of diagnostic information items.
+ pub diagnostics: Vec<Diagnostic>,
+
+ /// Optional the version number of the document the diagnostics are published for.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<i32>,
+}
+
+impl PublishDiagnosticsParams {
+ pub fn new(
+ uri: Url,
+ diagnostics: Vec<Diagnostic>,
+ version: Option<i32>,
+ ) -> PublishDiagnosticsParams {
+ PublishDiagnosticsParams {
+ uri,
+ diagnostics,
+ version,
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(untagged)]
+pub enum Documentation {
+ String(String),
+ MarkupContent(MarkupContent),
+}
+
+/// The marked string is rendered:
+/// - as markdown if it is represented as a string
+/// - as code block of the given langauge if it is represented as a pair of a language and a value
+///
+/// The pair of a language and a value is an equivalent to markdown:
+/// ```${language}
+/// ${value}
+/// ```
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum MarkedString {
+ String(String),
+ LanguageString(LanguageString),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct LanguageString {
+ pub language: String,
+ pub value: String,
+}
+
+impl MarkedString {
+ pub fn from_markdown(markdown: String) -> MarkedString {
+ MarkedString::String(markdown)
+ }
+
+ pub fn from_language_code(language: String, code_block: String) -> MarkedString {
+ MarkedString::LanguageString(LanguageString {
+ language,
+ value: code_block,
+ })
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct GotoDefinitionParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// GotoDefinition response can be single location, or multiple Locations or a link.
+#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
+#[serde(untagged)]
+pub enum GotoDefinitionResponse {
+ Scalar(Location),
+ Array(Vec<Location>),
+ Link(Vec<LocationLink>),
+}
+
+impl From<Location> for GotoDefinitionResponse {
+ fn from(location: Location) -> Self {
+ GotoDefinitionResponse::Scalar(location)
+ }
+}
+
+impl From<Vec<Location>> for GotoDefinitionResponse {
+ fn from(locations: Vec<Location>) -> Self {
+ GotoDefinitionResponse::Array(locations)
+ }
+}
+
+impl From<Vec<LocationLink>> for GotoDefinitionResponse {
+ fn from(locations: Vec<LocationLink>) -> Self {
+ GotoDefinitionResponse::Link(locations)
+ }
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct ExecuteCommandParams {
+ /// The identifier of the actual command handler.
+ pub command: String,
+ /// Arguments that the command should be invoked with.
+ #[serde(default)]
+ pub arguments: Vec<Value>,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+/// Execute command registration options.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct ExecuteCommandRegistrationOptions {
+ /// The commands to be executed on the server
+ pub commands: Vec<String>,
+
+ #[serde(flatten)]
+ pub execute_command_options: ExecuteCommandOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ApplyWorkspaceEditParams {
+ /// An optional label of the workspace edit. This label is
+ /// presented in the user interface for example on an undo
+ /// stack to undo the workspace edit.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label: Option<String>,
+
+ /// The edits to apply.
+ pub edit: WorkspaceEdit,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ApplyWorkspaceEditResponse {
+ /// Indicates whether the edit was applied or not.
+ pub applied: bool,
+
+ /// An optional textual description for why the edit was not applied.
+ /// This may be used may be used by the server for diagnostic
+ /// logging or to provide a suitable error for a request that
+ /// triggered the edit
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub failure_reason: Option<String>,
+
+ /// Depending on the client's failure handling strategy `failedChange` might
+ /// contain the index of the change that failed. This property is only available
+ /// if the client signals a `failureHandlingStrategy` in its client capabilities.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub failed_change: Option<u32>,
+}
+
+/// Describes the content type that a client supports in various
+/// result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
+///
+/// Please note that `MarkupKinds` must not start with a `$`. This kinds
+/// are reserved for internal usage.
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum MarkupKind {
+ /// Plain text is supported as a content format
+ PlainText,
+ /// Markdown is supported as a content format
+ Markdown,
+}
+
+/// A `MarkupContent` literal represents a string value which content can be represented in different formats.
+/// Currently `plaintext` and `markdown` are supported formats. A `MarkupContent` is usually used in
+/// documentation properties of result literals like `CompletionItem` or `SignatureInformation`.
+/// If the format is `markdown` the content should follow the [GitHub Flavored Markdown Specification](https://github.github.com/gfm/).
+///
+/// Here is an example how such a string can be constructed using JavaScript / TypeScript:
+/// ```ignore
+/// let markdown: MarkupContent = {
+/// kind: MarkupKind::Markdown,
+/// value: [
+/// "# Header",
+/// "Some text",
+/// "```typescript",
+/// "someCode();",
+/// "```"
+/// ]
+/// .join("\n"),
+/// };
+/// ```
+///
+/// Please Note* that clients might sanitize the return markdown. A client could decide to
+/// remove HTML from the markdown to avoid script execution.
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Clone)]
+pub struct MarkupContent {
+ pub kind: MarkupKind,
+ pub value: String,
+}
+
+/// A parameter literal used to pass a partial result token.
+#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct PartialResultParams {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub partial_result_token: Option<ProgressToken>,
+}
+
+/// Symbol tags are extra annotations that tweak the rendering of a symbol.
+/// Since 3.15
+#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct SymbolTag(i32);
+lsp_enum! {
+impl SymbolTag {
+ /// Render a symbol as obsolete, usually using a strike-out.
+ pub const DEPRECATED: SymbolTag = SymbolTag(1);
+}
+}
+
+#[cfg(test)]
+mod tests {
+ use serde::{Deserialize, Serialize};
+
+ use super::*;
+
+ pub(crate) fn test_serialization<SER>(ms: &SER, expected: &str)
+ where
+ SER: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
+ {
+ let json_str = serde_json::to_string(ms).unwrap();
+ assert_eq!(&json_str, expected);
+ let deserialized: SER = serde_json::from_str(&json_str).unwrap();
+ assert_eq!(&deserialized, ms);
+ }
+
+ pub(crate) fn test_deserialization<T>(json: &str, expected: &T)
+ where
+ T: for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
+ {
+ let value = serde_json::from_str::<T>(json).unwrap();
+ assert_eq!(&value, expected);
+ }
+
+ #[test]
+ fn one_of() {
+ test_serialization(&OneOf::<bool, ()>::Left(true), r#"true"#);
+ test_serialization(&OneOf::<String, ()>::Left("abcd".into()), r#""abcd""#);
+ test_serialization(
+ &OneOf::<String, WorkDoneProgressOptions>::Right(WorkDoneProgressOptions {
+ work_done_progress: Some(false),
+ }),
+ r#"{"workDoneProgress":false}"#,
+ );
+ }
+
+ #[test]
+ fn number_or_string() {
+ test_serialization(&NumberOrString::Number(123), r#"123"#);
+
+ test_serialization(&NumberOrString::String("abcd".into()), r#""abcd""#);
+ }
+
+ #[test]
+ fn marked_string() {
+ test_serialization(&MarkedString::from_markdown("xxx".into()), r#""xxx""#);
+
+ test_serialization(
+ &MarkedString::from_language_code("lang".into(), "code".into()),
+ r#"{"language":"lang","value":"code"}"#,
+ );
+ }
+
+ #[test]
+ fn language_string() {
+ test_serialization(
+ &LanguageString {
+ language: "LL".into(),
+ value: "VV".into(),
+ },
+ r#"{"language":"LL","value":"VV"}"#,
+ );
+ }
+
+ #[test]
+ fn workspace_edit() {
+ test_serialization(
+ &WorkspaceEdit {
+ changes: Some(vec![].into_iter().collect()),
+ document_changes: None,
+ ..Default::default()
+ },
+ r#"{"changes":{}}"#,
+ );
+
+ test_serialization(
+ &WorkspaceEdit {
+ changes: None,
+ document_changes: None,
+ ..Default::default()
+ },
+ r#"{}"#,
+ );
+
+ test_serialization(
+ &WorkspaceEdit {
+ changes: Some(
+ vec![(Url::parse("file://test").unwrap(), vec![])]
+ .into_iter()
+ .collect(),
+ ),
+ document_changes: None,
+ ..Default::default()
+ },
+ r#"{"changes":{"file://test/":[]}}"#,
+ );
+ }
+
+ #[test]
+ fn root_uri_can_be_missing() {
+ serde_json::from_str::<InitializeParams>(r#"{ "capabilities": {} }"#).unwrap();
+ }
+
+ #[test]
+ fn test_watch_kind() {
+ test_serialization(&WatchKind::Create, "1");
+ test_serialization(&(WatchKind::Create | WatchKind::Change), "3");
+ test_serialization(
+ &(WatchKind::Create | WatchKind::Change | WatchKind::Delete),
+ "7",
+ );
+ }
+
+ #[test]
+ fn test_resource_operation_kind() {
+ test_serialization(
+ &vec![
+ ResourceOperationKind::Create,
+ ResourceOperationKind::Rename,
+ ResourceOperationKind::Delete,
+ ],
+ r#"["create","rename","delete"]"#,
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/linked_editing.rs b/vendor/lsp-types/src/linked_editing.rs
index b23fb141f..615ca5a24 100644
--- a/vendor/lsp-types/src/linked_editing.rs
+++ b/vendor/lsp-types/src/linked_editing.rs
@@ -1,61 +1,61 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- DynamicRegistrationClientCapabilities, Range, StaticRegistrationOptions,
- TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions,
- WorkDoneProgressParams,
-};
-
-pub type LinkedEditingRangeClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LinkedEditingRangeOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LinkedEditingRangeRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub linked_editing_range_options: LinkedEditingRangeOptions,
-
- #[serde(flatten)]
- pub static_registration_options: StaticRegistrationOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum LinkedEditingRangeServerCapabilities {
- Simple(bool),
- Options(LinkedEditingRangeOptions),
- RegistrationOptions(LinkedEditingRangeRegistrationOptions),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LinkedEditingRangeParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LinkedEditingRanges {
- /// A list of ranges that can be renamed together. The ranges must have
- /// identical length and contain identical text content. The ranges cannot overlap.
- pub ranges: Vec<Range>,
-
- /// An optional word pattern (regular expression) that describes valid contents for
- /// the given ranges. If no pattern is provided, the client configuration's word
- /// pattern will be used.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub word_pattern: Option<String>,
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ DynamicRegistrationClientCapabilities, Range, StaticRegistrationOptions,
+ TextDocumentPositionParams, TextDocumentRegistrationOptions, WorkDoneProgressOptions,
+ WorkDoneProgressParams,
+};
+
+pub type LinkedEditingRangeClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LinkedEditingRangeOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LinkedEditingRangeRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub linked_editing_range_options: LinkedEditingRangeOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum LinkedEditingRangeServerCapabilities {
+ Simple(bool),
+ Options(LinkedEditingRangeOptions),
+ RegistrationOptions(LinkedEditingRangeRegistrationOptions),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LinkedEditingRangeParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LinkedEditingRanges {
+ /// A list of ranges that can be renamed together. The ranges must have
+ /// identical length and contain identical text content. The ranges cannot overlap.
+ pub ranges: Vec<Range>,
+
+ /// An optional word pattern (regular expression) that describes valid contents for
+ /// the given ranges. If no pattern is provided, the client configuration's word
+ /// pattern will be used.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub word_pattern: Option<String>,
+}
diff --git a/vendor/lsp-types/src/lsif.rs b/vendor/lsp-types/src/lsif.rs
index 164d4eb4a..a28256308 100644
--- a/vendor/lsp-types/src/lsif.rs
+++ b/vendor/lsp-types/src/lsif.rs
@@ -1,338 +1,338 @@
-//! Types of Language Server Index Format (LSIF). LSIF is a standard format
-//! for language servers or other programming tools to dump their knowledge
-//! about a workspace.
-//!
-//! Based on <https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/>
-
-use serde::{Deserialize, Serialize};
-use crate::{Url, Range};
-
-pub type Id = crate::NumberOrString;
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum LocationOrRangeId {
- Location(crate::Location),
- RangeId(Id),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Entry {
- pub id: Id,
- #[serde(flatten)]
- pub data: Element,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(tag = "type")]
-pub enum Element {
- Vertex(Vertex),
- Edge(Edge),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-pub struct ToolInfo {
- pub name: String,
- #[serde(default = "Default::default")]
- #[serde(skip_serializing_if = "Vec::is_empty")]
- pub args: Vec<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-pub enum Encoding {
- /// Currently only 'utf-16' is supported due to the limitations in LSP.
- #[serde(rename = "utf-16")]
- Utf16,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-pub struct RangeBasedDocumentSymbol {
- pub id: Id,
- #[serde(default = "Default::default")]
- #[serde(skip_serializing_if = "Vec::is_empty")]
- pub children: Vec<RangeBasedDocumentSymbol>,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum DocumentSymbolOrRangeBasedVec {
- DocumentSymbol(Vec<crate::DocumentSymbol>),
- RangeBased(Vec<RangeBasedDocumentSymbol>),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DefinitionTag {
- /// The text covered by the range
- text: String,
- /// The symbol kind.
- kind: crate::SymbolKind,
- /// Indicates if this symbol is deprecated.
- #[serde(default)]
- #[serde(skip_serializing_if = "std::ops::Not::not")]
- deprecated: bool,
- /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code.
- /// The range must be included in fullRange.
- full_range: Range,
- /// Optional detail information for the definition.
- #[serde(skip_serializing_if = "Option::is_none")]
- detail: Option<String>,
-}
-
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DeclarationTag {
- /// The text covered by the range
- text: String,
- /// The symbol kind.
- kind: crate::SymbolKind,
- /// Indicates if this symbol is deprecated.
- #[serde(default)]
- deprecated: bool,
- /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code.
- /// The range must be included in fullRange.
- full_range: Range,
- /// Optional detail information for the definition.
- #[serde(skip_serializing_if = "Option::is_none")]
- detail: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ReferenceTag {
- text: String,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct UnknownTag {
- text: String,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(tag = "type")]
-pub enum RangeTag {
- Definition(DefinitionTag),
- Declaration(DeclarationTag),
- Reference(ReferenceTag),
- Unknown(UnknownTag),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(tag = "label")]
-pub enum Vertex {
- MetaData(MetaData),
- /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
- Project(Project),
- Document(Document),
- /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#ranges>
- Range {
- #[serde(flatten)]
- range: Range,
- #[serde(skip_serializing_if = "Option::is_none")]
- tag: Option<RangeTag>,
- },
- /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
- ResultSet(ResultSet),
- Moniker(crate::Moniker),
- PackageInformation(PackageInformation),
-
- #[serde(rename = "$event")]
- Event(Event),
-
- DefinitionResult,
- DeclarationResult,
- TypeDefinitionResult,
- ReferenceResult,
- ImplementationResult,
- FoldingRangeResult {
- result: Vec<crate::FoldingRange>,
- },
- HoverResult {
- result: crate::Hover,
- },
- DocumentSymbolResult {
- result: DocumentSymbolOrRangeBasedVec,
- },
- DocumentLinkResult {
- result: Vec<crate::DocumentLink>,
- },
- DiagnosticResult {
- result: Vec<crate::Diagnostic>,
- },
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub enum EventKind {
- Begin,
- End,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub enum EventScope {
- Document,
- Project,
-}
-
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-pub struct Event {
- pub kind: EventKind,
- pub scope: EventScope,
- pub data: Id,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(tag = "label")]
-pub enum Edge {
- Contains(EdgeDataMultiIn),
- Moniker(EdgeData),
- NextMoniker(EdgeData),
- Next(EdgeData),
- PackageInformation(EdgeData),
- Item(Item),
-
- // Methods
- #[serde(rename = "textDocument/definition")]
- Definition(EdgeData),
- #[serde(rename = "textDocument/declaration")]
- Declaration(EdgeData),
- #[serde(rename = "textDocument/hover")]
- Hover(EdgeData),
- #[serde(rename = "textDocument/references")]
- References(EdgeData),
- #[serde(rename = "textDocument/implementation")]
- Implementation(EdgeData),
- #[serde(rename = "textDocument/typeDefinition")]
- TypeDefinition(EdgeData),
- #[serde(rename = "textDocument/foldingRange")]
- FoldingRange(EdgeData),
- #[serde(rename = "textDocument/documentLink")]
- DocumentLink(EdgeData),
- #[serde(rename = "textDocument/documentSymbol")]
- DocumentSymbol(EdgeData),
- #[serde(rename = "textDocument/diagnostic")]
- Diagnostic(EdgeData),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct EdgeData {
- pub in_v: Id,
- pub out_v: Id,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct EdgeDataMultiIn {
- pub in_vs: Vec<Id>,
- pub out_v: Id,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum DefinitionResultType {
- Scalar(LocationOrRangeId),
- Array(LocationOrRangeId),
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub enum ItemKind {
- Declarations,
- Definitions,
- References,
- ReferenceResults,
- ImplementationResults,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Item {
- pub document: Id,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub property: Option<ItemKind>,
- #[serde(flatten)]
- pub edge_data: EdgeDataMultiIn,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Document {
- pub uri: Url,
- pub language_id: String,
-}
-
-/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ResultSet {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub key: Option<String>,
-}
-
-/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Project {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub resource: Option<Url>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub content: Option<String>,
- pub kind: String,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MetaData {
- /// The version of the LSIF format using semver notation. See <https://semver.org/>. Please note
- /// the version numbers starting with 0 don't adhere to semver and adopters have to assume
- /// that each new version is breaking.
- pub version: String,
-
- /// The project root (in form of an URI) used to compute this dump.
- pub project_root: Url,
-
- /// The string encoding used to compute line and character values in
- /// positions and ranges.
- pub position_encoding: Encoding,
-
- /// Information about the tool that created the dump
- #[serde(skip_serializing_if = "Option::is_none")]
- pub tool_info: Option<ToolInfo>,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Repository {
- pub r#type: String,
- pub url: String,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub commit_id: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct PackageInformation {
- pub name: String,
- pub manager: String,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub uri: Option<Url>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub content: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub repository: Option<Repository>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub version: Option<String>,
-}
+//! Types of Language Server Index Format (LSIF). LSIF is a standard format
+//! for language servers or other programming tools to dump their knowledge
+//! about a workspace.
+//!
+//! Based on <https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/>
+
+use serde::{Deserialize, Serialize};
+use crate::{Url, Range};
+
+pub type Id = crate::NumberOrString;
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum LocationOrRangeId {
+ Location(crate::Location),
+ RangeId(Id),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Entry {
+ pub id: Id,
+ #[serde(flatten)]
+ pub data: Element,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(tag = "type")]
+pub enum Element {
+ Vertex(Vertex),
+ Edge(Edge),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct ToolInfo {
+ pub name: String,
+ #[serde(default = "Default::default")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub args: Vec<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub enum Encoding {
+ /// Currently only 'utf-16' is supported due to the limitations in LSP.
+ #[serde(rename = "utf-16")]
+ Utf16,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct RangeBasedDocumentSymbol {
+ pub id: Id,
+ #[serde(default = "Default::default")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub children: Vec<RangeBasedDocumentSymbol>,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum DocumentSymbolOrRangeBasedVec {
+ DocumentSymbol(Vec<crate::DocumentSymbol>),
+ RangeBased(Vec<RangeBasedDocumentSymbol>),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DefinitionTag {
+ /// The text covered by the range
+ text: String,
+ /// The symbol kind.
+ kind: crate::SymbolKind,
+ /// Indicates if this symbol is deprecated.
+ #[serde(default)]
+ #[serde(skip_serializing_if = "std::ops::Not::not")]
+ deprecated: bool,
+ /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code.
+ /// The range must be included in fullRange.
+ full_range: Range,
+ /// Optional detail information for the definition.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ detail: Option<String>,
+}
+
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DeclarationTag {
+ /// The text covered by the range
+ text: String,
+ /// The symbol kind.
+ kind: crate::SymbolKind,
+ /// Indicates if this symbol is deprecated.
+ #[serde(default)]
+ deprecated: bool,
+ /// The full range of the definition not including leading/trailing whitespace but everything else, e.g comments and code.
+ /// The range must be included in fullRange.
+ full_range: Range,
+ /// Optional detail information for the definition.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ detail: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ReferenceTag {
+ text: String,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct UnknownTag {
+ text: String,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(tag = "type")]
+pub enum RangeTag {
+ Definition(DefinitionTag),
+ Declaration(DeclarationTag),
+ Reference(ReferenceTag),
+ Unknown(UnknownTag),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(tag = "label")]
+pub enum Vertex {
+ MetaData(MetaData),
+ /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
+ Project(Project),
+ Document(Document),
+ /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#ranges>
+ Range {
+ #[serde(flatten)]
+ range: Range,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ tag: Option<RangeTag>,
+ },
+ /// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
+ ResultSet(ResultSet),
+ Moniker(crate::Moniker),
+ PackageInformation(PackageInformation),
+
+ #[serde(rename = "$event")]
+ Event(Event),
+
+ DefinitionResult,
+ DeclarationResult,
+ TypeDefinitionResult,
+ ReferenceResult,
+ ImplementationResult,
+ FoldingRangeResult {
+ result: Vec<crate::FoldingRange>,
+ },
+ HoverResult {
+ result: crate::Hover,
+ },
+ DocumentSymbolResult {
+ result: DocumentSymbolOrRangeBasedVec,
+ },
+ DocumentLinkResult {
+ result: Vec<crate::DocumentLink>,
+ },
+ DiagnosticResult {
+ result: Vec<crate::Diagnostic>,
+ },
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub enum EventKind {
+ Begin,
+ End,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub enum EventScope {
+ Document,
+ Project,
+}
+
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+pub struct Event {
+ pub kind: EventKind,
+ pub scope: EventScope,
+ pub data: Id,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(tag = "label")]
+pub enum Edge {
+ Contains(EdgeDataMultiIn),
+ Moniker(EdgeData),
+ NextMoniker(EdgeData),
+ Next(EdgeData),
+ PackageInformation(EdgeData),
+ Item(Item),
+
+ // Methods
+ #[serde(rename = "textDocument/definition")]
+ Definition(EdgeData),
+ #[serde(rename = "textDocument/declaration")]
+ Declaration(EdgeData),
+ #[serde(rename = "textDocument/hover")]
+ Hover(EdgeData),
+ #[serde(rename = "textDocument/references")]
+ References(EdgeData),
+ #[serde(rename = "textDocument/implementation")]
+ Implementation(EdgeData),
+ #[serde(rename = "textDocument/typeDefinition")]
+ TypeDefinition(EdgeData),
+ #[serde(rename = "textDocument/foldingRange")]
+ FoldingRange(EdgeData),
+ #[serde(rename = "textDocument/documentLink")]
+ DocumentLink(EdgeData),
+ #[serde(rename = "textDocument/documentSymbol")]
+ DocumentSymbol(EdgeData),
+ #[serde(rename = "textDocument/diagnostic")]
+ Diagnostic(EdgeData),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct EdgeData {
+ pub in_v: Id,
+ pub out_v: Id,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct EdgeDataMultiIn {
+ pub in_vs: Vec<Id>,
+ pub out_v: Id,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum DefinitionResultType {
+ Scalar(LocationOrRangeId),
+ Array(LocationOrRangeId),
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub enum ItemKind {
+ Declarations,
+ Definitions,
+ References,
+ ReferenceResults,
+ ImplementationResults,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Item {
+ pub document: Id,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub property: Option<ItemKind>,
+ #[serde(flatten)]
+ pub edge_data: EdgeDataMultiIn,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Document {
+ pub uri: Url,
+ pub language_id: String,
+}
+
+/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ResultSet {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub key: Option<String>,
+}
+
+/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Project {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resource: Option<Url>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub content: Option<String>,
+ pub kind: String,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MetaData {
+ /// The version of the LSIF format using semver notation. See <https://semver.org/>. Please note
+ /// the version numbers starting with 0 don't adhere to semver and adopters have to assume
+ /// that each new version is breaking.
+ pub version: String,
+
+ /// The project root (in form of an URI) used to compute this dump.
+ pub project_root: Url,
+
+ /// The string encoding used to compute line and character values in
+ /// positions and ranges.
+ pub position_encoding: Encoding,
+
+ /// Information about the tool that created the dump
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tool_info: Option<ToolInfo>,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Repository {
+ pub r#type: String,
+ pub url: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub commit_id: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct PackageInformation {
+ pub name: String,
+ pub manager: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub uri: Option<Url>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub content: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub repository: Option<Repository>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub version: Option<String>,
+}
diff --git a/vendor/lsp-types/src/moniker.rs b/vendor/lsp-types/src/moniker.rs
index 74bf89553..5a888bc5d 100644
--- a/vendor/lsp-types/src/moniker.rs
+++ b/vendor/lsp-types/src/moniker.rs
@@ -1,92 +1,92 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams,
- TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-
-pub type MonikerClientCapabilities = DynamicRegistrationClientCapabilities;
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum MonikerServerCapabilities {
- Options(MonikerOptions),
- RegistrationOptions(MonikerRegistrationOptions),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct MonikerOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MonikerRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub moniker_options: MonikerOptions,
-}
-
-/// Moniker uniqueness level to define scope of the moniker.
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
-#[serde(rename_all = "camelCase")]
-pub enum UniquenessLevel {
- /// The moniker is only unique inside a document
- Document,
- /// The moniker is unique inside a project for which a dump got created
- Project,
- /// The moniker is unique inside the group to which a project belongs
- Group,
- /// The moniker is unique inside the moniker scheme.
- Scheme,
- /// The moniker is globally unique
- Global,
-}
-
-/// The moniker kind.
-#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
-#[serde(rename_all = "camelCase")]
-pub enum MonikerKind {
- /// The moniker represent a symbol that is imported into a project
- Import,
- /// The moniker represent a symbol that is exported into a project
- Export,
- /// The moniker represents a symbol that is local to a project (e.g. a local
- /// variable of a function, a class not visible outside the project, ...)
- Local,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MonikerParams {
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// Moniker definition to match LSIF 0.5 moniker definition.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Moniker {
- /// The scheme of the moniker. For example tsc or .Net
- pub scheme: String,
-
- /// The identifier of the moniker. The value is opaque in LSIF however
- /// schema owners are allowed to define the structure if they want.
- pub identifier: String,
-
- /// The scope in which the moniker is unique
- pub unique: UniquenessLevel,
-
- /// The moniker kind if known.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub kind: Option<MonikerKind>,
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams,
+ TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+
+pub type MonikerClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum MonikerServerCapabilities {
+ Options(MonikerOptions),
+ RegistrationOptions(MonikerRegistrationOptions),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct MonikerOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MonikerRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub moniker_options: MonikerOptions,
+}
+
+/// Moniker uniqueness level to define scope of the moniker.
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
+#[serde(rename_all = "camelCase")]
+pub enum UniquenessLevel {
+ /// The moniker is only unique inside a document
+ Document,
+ /// The moniker is unique inside a project for which a dump got created
+ Project,
+ /// The moniker is unique inside the group to which a project belongs
+ Group,
+ /// The moniker is unique inside the moniker scheme.
+ Scheme,
+ /// The moniker is globally unique
+ Global,
+}
+
+/// The moniker kind.
+#[derive(Debug, Eq, PartialEq, Deserialize, Serialize, Copy, Clone)]
+#[serde(rename_all = "camelCase")]
+pub enum MonikerKind {
+ /// The moniker represent a symbol that is imported into a project
+ Import,
+ /// The moniker represent a symbol that is exported into a project
+ Export,
+ /// The moniker represents a symbol that is local to a project (e.g. a local
+ /// variable of a function, a class not visible outside the project, ...)
+ Local,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MonikerParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// Moniker definition to match LSIF 0.5 moniker definition.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Moniker {
+ /// The scheme of the moniker. For example tsc or .Net
+ pub scheme: String,
+
+ /// The identifier of the moniker. The value is opaque in LSIF however
+ /// schema owners are allowed to define the structure if they want.
+ pub identifier: String,
+
+ /// The scope in which the moniker is unique
+ pub unique: UniquenessLevel,
+
+ /// The moniker kind if known.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<MonikerKind>,
+}
diff --git a/vendor/lsp-types/src/notification.rs b/vendor/lsp-types/src/notification.rs
index 6417707b8..53cfaea89 100644
--- a/vendor/lsp-types/src/notification.rs
+++ b/vendor/lsp-types/src/notification.rs
@@ -1,361 +1,361 @@
-use super::*;
-
-use serde::{de::DeserializeOwned, Serialize};
-
-pub trait Notification {
- type Params: DeserializeOwned + Serialize;
- const METHOD: &'static str;
-}
-
-#[macro_export]
-macro_rules! lsp_notification {
- ("$/cancelRequest") => {
- $crate::notification::Cancel
- };
- ("$/setTrace") => {
- $crate::notification::SetTrace
- };
- ("$/logTrace") => {
- $crate::notification::LogTrace
- };
- ("initialized") => {
- $crate::notification::Initialized
- };
- ("exit") => {
- $crate::notification::Exit
- };
-
- ("window/showMessage") => {
- $crate::notification::ShowMessage
- };
- ("window/logMessage") => {
- $crate::notification::LogMessage
- };
- ("window/workDoneProgress/cancel") => {
- $crate::notification::WorkDoneProgressCancel
- };
-
- ("telemetry/event") => {
- $crate::notification::TelemetryEvent
- };
-
- ("textDocument/didOpen") => {
- $crate::notification::DidOpenTextDocument
- };
- ("textDocument/didChange") => {
- $crate::notification::DidChangeTextDocument
- };
- ("textDocument/willSave") => {
- $crate::notification::WillSaveTextDocument
- };
- ("textDocument/didSave") => {
- $crate::notification::DidSaveTextDocument
- };
- ("textDocument/didClose") => {
- $crate::notification::DidCloseTextDocument
- };
- ("textDocument/publishDiagnostics") => {
- $crate::notification::PublishDiagnostics
- };
-
- ("workspace/didChangeConfiguration") => {
- $crate::notification::DidChangeConfiguration
- };
- ("workspace/didChangeWatchedFiles") => {
- $crate::notification::DidChangeWatchedFiles
- };
- ("workspace/didChangeWorkspaceFolders") => {
- $crate::notification::DidChangeWorkspaceFolders
- };
- ("$/progress") => {
- $crate::notification::Progress
- };
- ("workspace/didCreateFiles") => {
- $crate::notification::DidCreateFiles
- };
- ("workspace/didRenameFiles") => {
- $crate::notification::DidRenameFiles
- };
- ("workspace/didDeleteFiles") => {
- $crate::notification::DidDeleteFiles
- };
-}
-
-/// The base protocol now offers support for request cancellation. To cancel a request,
-/// a notification message with the following properties is sent:
-///
-/// A request that got canceled still needs to return from the server and send a response back.
-/// It can not be left open / hanging. This is in line with the JSON RPC protocol that requires
-/// that every request sends a response back. In addition it allows for returning partial results on cancel.
-#[derive(Debug)]
-pub enum Cancel {}
-
-impl Notification for Cancel {
- type Params = CancelParams;
- const METHOD: &'static str = "$/cancelRequest";
-}
-
-/// A notification that should be used by the client to modify the trace
-/// setting of the server.
-#[derive(Debug)]
-pub enum SetTrace {}
-
-impl Notification for SetTrace {
- type Params = SetTraceParams;
- const METHOD: &'static str = "$/setTrace";
-}
-
-/// A notification to log the trace of the server’s execution.
-/// The amount and content of these notifications depends on the current trace configuration.
-///
-/// `LogTrace` should be used for systematic trace reporting. For single debugging messages,
-/// the server should send `LogMessage` notifications.
-#[derive(Debug)]
-pub enum LogTrace {}
-
-impl Notification for LogTrace {
- type Params = LogTraceParams;
- const METHOD: &'static str = "$/logTrace";
-}
-
-/// The initialized notification is sent from the client to the server after the client received
-/// the result of the initialize request but before the client is sending any other request or
-/// notification to the server. The server can use the initialized notification for example to
-/// dynamically register capabilities.
-#[derive(Debug)]
-pub enum Initialized {}
-
-impl Notification for Initialized {
- type Params = InitializedParams;
- const METHOD: &'static str = "initialized";
-}
-
-/// A notification to ask the server to exit its process.
-/// The server should exit with success code 0 if the shutdown request has been received before;
-/// otherwise with error code 1.
-#[derive(Debug)]
-pub enum Exit {}
-
-impl Notification for Exit {
- type Params = ();
- const METHOD: &'static str = "exit";
-}
-
-/// The show message notification is sent from a server to a client to ask the client to display a particular message
-/// in the user interface.
-#[derive(Debug)]
-pub enum ShowMessage {}
-
-impl Notification for ShowMessage {
- type Params = ShowMessageParams;
- const METHOD: &'static str = "window/showMessage";
-}
-
-/// The log message notification is sent from the server to the client to ask the client to log a particular message.
-#[derive(Debug)]
-pub enum LogMessage {}
-
-impl Notification for LogMessage {
- type Params = LogMessageParams;
- const METHOD: &'static str = "window/logMessage";
-}
-
-/// The telemetry notification is sent from the server to the client to ask the client to log a telemetry event.
-/// The protocol doesn't specify the payload since no interpretation of the data happens in the protocol. Most clients even don't handle
-/// the event directly but forward them to the extensions owning the corresponding server issuing the event.
-#[derive(Debug)]
-pub enum TelemetryEvent {}
-
-impl Notification for TelemetryEvent {
- type Params = serde_json::Value;
- const METHOD: &'static str = "telemetry/event";
-}
-
-/// A notification sent from the client to the server to signal the change of configuration settings.
-#[derive(Debug)]
-pub enum DidChangeConfiguration {}
-
-impl Notification for DidChangeConfiguration {
- type Params = DidChangeConfigurationParams;
- const METHOD: &'static str = "workspace/didChangeConfiguration";
-}
-
-/// The document open notification is sent from the client to the server to signal newly opened text documents.
-/// The document's truth is now managed by the client and the server must not try to read the document's truth
-/// using the document's uri.
-#[derive(Debug)]
-pub enum DidOpenTextDocument {}
-
-impl Notification for DidOpenTextDocument {
- type Params = DidOpenTextDocumentParams;
- const METHOD: &'static str = "textDocument/didOpen";
-}
-
-/// The document change notification is sent from the client to the server to signal changes to a text document.
-/// In 2.0 the shape of the params has changed to include proper version numbers and language ids.
-#[derive(Debug)]
-pub enum DidChangeTextDocument {}
-
-impl Notification for DidChangeTextDocument {
- type Params = DidChangeTextDocumentParams;
- const METHOD: &'static str = "textDocument/didChange";
-}
-
-/// The document will save notification is sent from the client to the server before the document
-/// is actually saved.
-#[derive(Debug)]
-pub enum WillSaveTextDocument {}
-
-impl Notification for WillSaveTextDocument {
- type Params = WillSaveTextDocumentParams;
- const METHOD: &'static str = "textDocument/willSave";
-}
-
-/// The document close notification is sent from the client to the server when the document got closed in the client.
-/// The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri
-/// the truth now exists on disk).
-#[derive(Debug)]
-pub enum DidCloseTextDocument {}
-
-impl Notification for DidCloseTextDocument {
- type Params = DidCloseTextDocumentParams;
- const METHOD: &'static str = "textDocument/didClose";
-}
-
-/// The document save notification is sent from the client to the server when the document was saved in the client.
-#[derive(Debug)]
-pub enum DidSaveTextDocument {}
-
-impl Notification for DidSaveTextDocument {
- type Params = DidSaveTextDocumentParams;
- const METHOD: &'static str = "textDocument/didSave";
-}
-
-/// The watched files notification is sent from the client to the server when the client detects changes to files and folders
-/// watched by the language client (note although the name suggest that only file events are sent it is about file system events which include folders as well).
-/// It is recommended that servers register for these file system events using the registration mechanism.
-/// In former implementations clients pushed file events without the server actively asking for it.
-#[derive(Debug)]
-pub enum DidChangeWatchedFiles {}
-
-impl Notification for DidChangeWatchedFiles {
- type Params = DidChangeWatchedFilesParams;
- const METHOD: &'static str = "workspace/didChangeWatchedFiles";
-}
-
-/// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server to inform the server
-/// about workspace folder configuration changes
-#[derive(Debug)]
-pub enum DidChangeWorkspaceFolders {}
-
-impl Notification for DidChangeWorkspaceFolders {
- type Params = DidChangeWorkspaceFoldersParams;
- const METHOD: &'static str = "workspace/didChangeWorkspaceFolders";
-}
-
-/// Diagnostics notification are sent from the server to the client to signal results of validation runs.
-#[derive(Debug)]
-pub enum PublishDiagnostics {}
-
-impl Notification for PublishDiagnostics {
- type Params = PublishDiagnosticsParams;
- const METHOD: &'static str = "textDocument/publishDiagnostics";
-}
-
-/// The progress notification is sent from the server to the client to ask
-/// the client to indicate progress.
-#[derive(Debug)]
-pub enum Progress {}
-
-impl Notification for Progress {
- type Params = ProgressParams;
- const METHOD: &'static str = "$/progress";
-}
-
-/// The `window/workDoneProgress/cancel` notification is sent from the client
-/// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`.
-#[derive(Debug)]
-pub enum WorkDoneProgressCancel {}
-
-impl Notification for WorkDoneProgressCancel {
- type Params = WorkDoneProgressCancelParams;
- const METHOD: &'static str = "window/workDoneProgress/cancel";
-}
-
-/// The did create files notification is sent from the client to the server when files were created from within the client.
-#[derive(Debug)]
-pub enum DidCreateFiles {}
-
-impl Notification for DidCreateFiles {
- type Params = CreateFilesParams;
- const METHOD: &'static str = "workspace/didCreateFiles";
-}
-
-/// The did rename files notification is sent from the client to the server when files were renamed from within the client.
-#[derive(Debug)]
-pub enum DidRenameFiles {}
-
-impl Notification for DidRenameFiles {
- type Params = RenameFilesParams;
- const METHOD: &'static str = "workspace/didRenameFiles";
-}
-
-/// The did delete files notification is sent from the client to the server when files were deleted from within the client.
-#[derive(Debug)]
-pub enum DidDeleteFiles {}
-
-impl Notification for DidDeleteFiles {
- type Params = DeleteFilesParams;
- const METHOD: &'static str = "workspace/didDeleteFiles";
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- fn fake_call<N>()
- where
- N: Notification,
- N::Params: serde::Serialize,
- {
- }
-
- macro_rules! check_macro {
- ($name:tt) => {
- // check whether the macro name matches the method
- assert_eq!(<lsp_notification!($name) as Notification>::METHOD, $name);
- // test whether type checking passes for each component
- fake_call::<lsp_notification!($name)>();
- };
- }
-
- #[test]
- fn check_macro_definitions() {
- check_macro!("$/cancelRequest");
- check_macro!("$/progress");
- check_macro!("$/logTrace");
- check_macro!("$/setTrace");
- check_macro!("initialized");
- check_macro!("exit");
- check_macro!("window/showMessage");
- check_macro!("window/logMessage");
- check_macro!("window/workDoneProgress/cancel");
- check_macro!("telemetry/event");
- check_macro!("textDocument/didOpen");
- check_macro!("textDocument/didChange");
- check_macro!("textDocument/willSave");
- check_macro!("textDocument/didSave");
- check_macro!("textDocument/didClose");
- check_macro!("textDocument/publishDiagnostics");
- check_macro!("workspace/didChangeConfiguration");
- check_macro!("workspace/didChangeWatchedFiles");
- check_macro!("workspace/didChangeWorkspaceFolders");
- check_macro!("workspace/didCreateFiles");
- check_macro!("workspace/didRenameFiles");
- check_macro!("workspace/didDeleteFiles");
- }
-
- #[test]
- #[cfg(feature = "proposed")]
- fn check_proposed_macro_definitions() {}
-}
+use super::*;
+
+use serde::{de::DeserializeOwned, Serialize};
+
+pub trait Notification {
+ type Params: DeserializeOwned + Serialize;
+ const METHOD: &'static str;
+}
+
+#[macro_export]
+macro_rules! lsp_notification {
+ ("$/cancelRequest") => {
+ $crate::notification::Cancel
+ };
+ ("$/setTrace") => {
+ $crate::notification::SetTrace
+ };
+ ("$/logTrace") => {
+ $crate::notification::LogTrace
+ };
+ ("initialized") => {
+ $crate::notification::Initialized
+ };
+ ("exit") => {
+ $crate::notification::Exit
+ };
+
+ ("window/showMessage") => {
+ $crate::notification::ShowMessage
+ };
+ ("window/logMessage") => {
+ $crate::notification::LogMessage
+ };
+ ("window/workDoneProgress/cancel") => {
+ $crate::notification::WorkDoneProgressCancel
+ };
+
+ ("telemetry/event") => {
+ $crate::notification::TelemetryEvent
+ };
+
+ ("textDocument/didOpen") => {
+ $crate::notification::DidOpenTextDocument
+ };
+ ("textDocument/didChange") => {
+ $crate::notification::DidChangeTextDocument
+ };
+ ("textDocument/willSave") => {
+ $crate::notification::WillSaveTextDocument
+ };
+ ("textDocument/didSave") => {
+ $crate::notification::DidSaveTextDocument
+ };
+ ("textDocument/didClose") => {
+ $crate::notification::DidCloseTextDocument
+ };
+ ("textDocument/publishDiagnostics") => {
+ $crate::notification::PublishDiagnostics
+ };
+
+ ("workspace/didChangeConfiguration") => {
+ $crate::notification::DidChangeConfiguration
+ };
+ ("workspace/didChangeWatchedFiles") => {
+ $crate::notification::DidChangeWatchedFiles
+ };
+ ("workspace/didChangeWorkspaceFolders") => {
+ $crate::notification::DidChangeWorkspaceFolders
+ };
+ ("$/progress") => {
+ $crate::notification::Progress
+ };
+ ("workspace/didCreateFiles") => {
+ $crate::notification::DidCreateFiles
+ };
+ ("workspace/didRenameFiles") => {
+ $crate::notification::DidRenameFiles
+ };
+ ("workspace/didDeleteFiles") => {
+ $crate::notification::DidDeleteFiles
+ };
+}
+
+/// The base protocol now offers support for request cancellation. To cancel a request,
+/// a notification message with the following properties is sent:
+///
+/// A request that got canceled still needs to return from the server and send a response back.
+/// It can not be left open / hanging. This is in line with the JSON RPC protocol that requires
+/// that every request sends a response back. In addition it allows for returning partial results on cancel.
+#[derive(Debug)]
+pub enum Cancel {}
+
+impl Notification for Cancel {
+ type Params = CancelParams;
+ const METHOD: &'static str = "$/cancelRequest";
+}
+
+/// A notification that should be used by the client to modify the trace
+/// setting of the server.
+#[derive(Debug)]
+pub enum SetTrace {}
+
+impl Notification for SetTrace {
+ type Params = SetTraceParams;
+ const METHOD: &'static str = "$/setTrace";
+}
+
+/// A notification to log the trace of the server’s execution.
+/// The amount and content of these notifications depends on the current trace configuration.
+///
+/// `LogTrace` should be used for systematic trace reporting. For single debugging messages,
+/// the server should send `LogMessage` notifications.
+#[derive(Debug)]
+pub enum LogTrace {}
+
+impl Notification for LogTrace {
+ type Params = LogTraceParams;
+ const METHOD: &'static str = "$/logTrace";
+}
+
+/// The initialized notification is sent from the client to the server after the client received
+/// the result of the initialize request but before the client is sending any other request or
+/// notification to the server. The server can use the initialized notification for example to
+/// dynamically register capabilities.
+#[derive(Debug)]
+pub enum Initialized {}
+
+impl Notification for Initialized {
+ type Params = InitializedParams;
+ const METHOD: &'static str = "initialized";
+}
+
+/// A notification to ask the server to exit its process.
+/// The server should exit with success code 0 if the shutdown request has been received before;
+/// otherwise with error code 1.
+#[derive(Debug)]
+pub enum Exit {}
+
+impl Notification for Exit {
+ type Params = ();
+ const METHOD: &'static str = "exit";
+}
+
+/// The show message notification is sent from a server to a client to ask the client to display a particular message
+/// in the user interface.
+#[derive(Debug)]
+pub enum ShowMessage {}
+
+impl Notification for ShowMessage {
+ type Params = ShowMessageParams;
+ const METHOD: &'static str = "window/showMessage";
+}
+
+/// The log message notification is sent from the server to the client to ask the client to log a particular message.
+#[derive(Debug)]
+pub enum LogMessage {}
+
+impl Notification for LogMessage {
+ type Params = LogMessageParams;
+ const METHOD: &'static str = "window/logMessage";
+}
+
+/// The telemetry notification is sent from the server to the client to ask the client to log a telemetry event.
+/// The protocol doesn't specify the payload since no interpretation of the data happens in the protocol. Most clients even don't handle
+/// the event directly but forward them to the extensions owning the corresponding server issuing the event.
+#[derive(Debug)]
+pub enum TelemetryEvent {}
+
+impl Notification for TelemetryEvent {
+ type Params = serde_json::Value;
+ const METHOD: &'static str = "telemetry/event";
+}
+
+/// A notification sent from the client to the server to signal the change of configuration settings.
+#[derive(Debug)]
+pub enum DidChangeConfiguration {}
+
+impl Notification for DidChangeConfiguration {
+ type Params = DidChangeConfigurationParams;
+ const METHOD: &'static str = "workspace/didChangeConfiguration";
+}
+
+/// The document open notification is sent from the client to the server to signal newly opened text documents.
+/// The document's truth is now managed by the client and the server must not try to read the document's truth
+/// using the document's uri.
+#[derive(Debug)]
+pub enum DidOpenTextDocument {}
+
+impl Notification for DidOpenTextDocument {
+ type Params = DidOpenTextDocumentParams;
+ const METHOD: &'static str = "textDocument/didOpen";
+}
+
+/// The document change notification is sent from the client to the server to signal changes to a text document.
+/// In 2.0 the shape of the params has changed to include proper version numbers and language ids.
+#[derive(Debug)]
+pub enum DidChangeTextDocument {}
+
+impl Notification for DidChangeTextDocument {
+ type Params = DidChangeTextDocumentParams;
+ const METHOD: &'static str = "textDocument/didChange";
+}
+
+/// The document will save notification is sent from the client to the server before the document
+/// is actually saved.
+#[derive(Debug)]
+pub enum WillSaveTextDocument {}
+
+impl Notification for WillSaveTextDocument {
+ type Params = WillSaveTextDocumentParams;
+ const METHOD: &'static str = "textDocument/willSave";
+}
+
+/// The document close notification is sent from the client to the server when the document got closed in the client.
+/// The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri
+/// the truth now exists on disk).
+#[derive(Debug)]
+pub enum DidCloseTextDocument {}
+
+impl Notification for DidCloseTextDocument {
+ type Params = DidCloseTextDocumentParams;
+ const METHOD: &'static str = "textDocument/didClose";
+}
+
+/// The document save notification is sent from the client to the server when the document was saved in the client.
+#[derive(Debug)]
+pub enum DidSaveTextDocument {}
+
+impl Notification for DidSaveTextDocument {
+ type Params = DidSaveTextDocumentParams;
+ const METHOD: &'static str = "textDocument/didSave";
+}
+
+/// The watched files notification is sent from the client to the server when the client detects changes to files and folders
+/// watched by the language client (note although the name suggest that only file events are sent it is about file system events which include folders as well).
+/// It is recommended that servers register for these file system events using the registration mechanism.
+/// In former implementations clients pushed file events without the server actively asking for it.
+#[derive(Debug)]
+pub enum DidChangeWatchedFiles {}
+
+impl Notification for DidChangeWatchedFiles {
+ type Params = DidChangeWatchedFilesParams;
+ const METHOD: &'static str = "workspace/didChangeWatchedFiles";
+}
+
+/// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server to inform the server
+/// about workspace folder configuration changes
+#[derive(Debug)]
+pub enum DidChangeWorkspaceFolders {}
+
+impl Notification for DidChangeWorkspaceFolders {
+ type Params = DidChangeWorkspaceFoldersParams;
+ const METHOD: &'static str = "workspace/didChangeWorkspaceFolders";
+}
+
+/// Diagnostics notification are sent from the server to the client to signal results of validation runs.
+#[derive(Debug)]
+pub enum PublishDiagnostics {}
+
+impl Notification for PublishDiagnostics {
+ type Params = PublishDiagnosticsParams;
+ const METHOD: &'static str = "textDocument/publishDiagnostics";
+}
+
+/// The progress notification is sent from the server to the client to ask
+/// the client to indicate progress.
+#[derive(Debug)]
+pub enum Progress {}
+
+impl Notification for Progress {
+ type Params = ProgressParams;
+ const METHOD: &'static str = "$/progress";
+}
+
+/// The `window/workDoneProgress/cancel` notification is sent from the client
+/// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`.
+#[derive(Debug)]
+pub enum WorkDoneProgressCancel {}
+
+impl Notification for WorkDoneProgressCancel {
+ type Params = WorkDoneProgressCancelParams;
+ const METHOD: &'static str = "window/workDoneProgress/cancel";
+}
+
+/// The did create files notification is sent from the client to the server when files were created from within the client.
+#[derive(Debug)]
+pub enum DidCreateFiles {}
+
+impl Notification for DidCreateFiles {
+ type Params = CreateFilesParams;
+ const METHOD: &'static str = "workspace/didCreateFiles";
+}
+
+/// The did rename files notification is sent from the client to the server when files were renamed from within the client.
+#[derive(Debug)]
+pub enum DidRenameFiles {}
+
+impl Notification for DidRenameFiles {
+ type Params = RenameFilesParams;
+ const METHOD: &'static str = "workspace/didRenameFiles";
+}
+
+/// The did delete files notification is sent from the client to the server when files were deleted from within the client.
+#[derive(Debug)]
+pub enum DidDeleteFiles {}
+
+impl Notification for DidDeleteFiles {
+ type Params = DeleteFilesParams;
+ const METHOD: &'static str = "workspace/didDeleteFiles";
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ fn fake_call<N>()
+ where
+ N: Notification,
+ N::Params: serde::Serialize,
+ {
+ }
+
+ macro_rules! check_macro {
+ ($name:tt) => {
+ // check whether the macro name matches the method
+ assert_eq!(<lsp_notification!($name) as Notification>::METHOD, $name);
+ // test whether type checking passes for each component
+ fake_call::<lsp_notification!($name)>();
+ };
+ }
+
+ #[test]
+ fn check_macro_definitions() {
+ check_macro!("$/cancelRequest");
+ check_macro!("$/progress");
+ check_macro!("$/logTrace");
+ check_macro!("$/setTrace");
+ check_macro!("initialized");
+ check_macro!("exit");
+ check_macro!("window/showMessage");
+ check_macro!("window/logMessage");
+ check_macro!("window/workDoneProgress/cancel");
+ check_macro!("telemetry/event");
+ check_macro!("textDocument/didOpen");
+ check_macro!("textDocument/didChange");
+ check_macro!("textDocument/willSave");
+ check_macro!("textDocument/didSave");
+ check_macro!("textDocument/didClose");
+ check_macro!("textDocument/publishDiagnostics");
+ check_macro!("workspace/didChangeConfiguration");
+ check_macro!("workspace/didChangeWatchedFiles");
+ check_macro!("workspace/didChangeWorkspaceFolders");
+ check_macro!("workspace/didCreateFiles");
+ check_macro!("workspace/didRenameFiles");
+ check_macro!("workspace/didDeleteFiles");
+ }
+
+ #[test]
+ #[cfg(feature = "proposed")]
+ fn check_proposed_macro_definitions() {}
+}
diff --git a/vendor/lsp-types/src/progress.rs b/vendor/lsp-types/src/progress.rs
index 97d30e1a0..b16e5a981 100644
--- a/vendor/lsp-types/src/progress.rs
+++ b/vendor/lsp-types/src/progress.rs
@@ -1,134 +1,134 @@
-use serde::{Deserialize, Serialize};
-
-use crate::NumberOrString;
-
-pub type ProgressToken = NumberOrString;
-
-/// The progress notification is sent from the server to the client to ask
-/// the client to indicate progress.
-#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct ProgressParams {
- /// The progress token provided by the client.
- pub token: ProgressToken,
-
- /// The progress data.
- pub value: ProgressParamsValue,
-}
-
-#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(untagged)]
-pub enum ProgressParamsValue {
- WorkDone(WorkDoneProgress),
-}
-
-/// The `window/workDoneProgress/create` request is sent from the server
-/// to the clientto ask the client to create a work done progress.
-#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressCreateParams {
- /// The token to be used to report progress.
- pub token: ProgressToken,
-}
-
-/// The `window/workDoneProgress/cancel` notification is sent from the client
-/// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`.
-#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressCancelParams {
- /// The token to be used to report progress.
- pub token: ProgressToken,
-}
-
-/// Options to signal work done progress support in server capabilities.
-#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressOptions {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub work_done_progress: Option<bool>,
-}
-
-/// An optional token that a server can use to report work done progress
-#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressParams {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub work_done_token: Option<ProgressToken>,
-}
-
-#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressBegin {
- /// Mandatory title of the progress operation. Used to briefly inform
- /// about the kind of operation being performed.
- /// Examples: "Indexing" or "Linking dependencies".
- pub title: String,
-
- /// Controls if a cancel button should show to allow the user to cancel the
- /// long running operation. Clients that don't support cancellation are allowed
- /// to ignore the setting.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cancellable: Option<bool>,
-
- /// Optional, more detailed associated progress message. Contains
- /// complementary information to the `title`.
- ///
- /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- /// If unset, the previous progress message (if any) is still valid.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub message: Option<String>,
-
- /// Optional progress percentage to display (value 100 is considered 100%).
- /// If not provided infinite progress is assumed and clients are allowed
- /// to ignore the `percentage` value in subsequent in report notifications.
- ///
- /// The value should be steadily rising. Clients are free to ignore values
- /// that are not following this rule. The value range is [0, 100]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub percentage: Option<u32>,
-}
-
-#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressReport {
- /// Controls if a cancel button should show to allow the user to cancel the
- /// long running operation. Clients that don't support cancellation are allowed
- /// to ignore the setting.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cancellable: Option<bool>,
-
- /// Optional, more detailed associated progress message. Contains
- /// complementary information to the `title`.
- /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- /// If unset, the previous progress message (if any) is still valid.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub message: Option<String>,
-
- /// Optional progress percentage to display (value 100 is considered 100%).
- /// If not provided infinite progress is assumed and clients are allowed
- /// to ignore the `percentage` value in subsequent in report notifications.
- ///
- /// The value should be steadily rising. Clients are free to ignore values
- /// that are not following this rule. The value range is [0, 100]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub percentage: Option<u32>,
-}
-
-#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkDoneProgressEnd {
- /// Optional, more detailed associated progress message. Contains
- /// complementary information to the `title`.
- /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- /// If unset, the previous progress message (if any) is still valid.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub message: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
-#[serde(tag = "kind", rename_all = "lowercase")]
-pub enum WorkDoneProgress {
- Begin(WorkDoneProgressBegin),
- Report(WorkDoneProgressReport),
- End(WorkDoneProgressEnd),
-}
+use serde::{Deserialize, Serialize};
+
+use crate::NumberOrString;
+
+pub type ProgressToken = NumberOrString;
+
+/// The progress notification is sent from the server to the client to ask
+/// the client to indicate progress.
+#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ProgressParams {
+ /// The progress token provided by the client.
+ pub token: ProgressToken,
+
+ /// The progress data.
+ pub value: ProgressParamsValue,
+}
+
+#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(untagged)]
+pub enum ProgressParamsValue {
+ WorkDone(WorkDoneProgress),
+}
+
+/// The `window/workDoneProgress/create` request is sent from the server
+/// to the clientto ask the client to create a work done progress.
+#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressCreateParams {
+ /// The token to be used to report progress.
+ pub token: ProgressToken,
+}
+
+/// The `window/workDoneProgress/cancel` notification is sent from the client
+/// to the server to cancel a progress initiated on the server side using the `window/workDoneProgress/create`.
+#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressCancelParams {
+ /// The token to be used to report progress.
+ pub token: ProgressToken,
+}
+
+/// Options to signal work done progress support in server capabilities.
+#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressOptions {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub work_done_progress: Option<bool>,
+}
+
+/// An optional token that a server can use to report work done progress
+#[derive(Debug, Eq, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressParams {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub work_done_token: Option<ProgressToken>,
+}
+
+#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressBegin {
+ /// Mandatory title of the progress operation. Used to briefly inform
+ /// about the kind of operation being performed.
+ /// Examples: "Indexing" or "Linking dependencies".
+ pub title: String,
+
+ /// Controls if a cancel button should show to allow the user to cancel the
+ /// long running operation. Clients that don't support cancellation are allowed
+ /// to ignore the setting.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub cancellable: Option<bool>,
+
+ /// Optional, more detailed associated progress message. Contains
+ /// complementary information to the `title`.
+ ///
+ /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
+ /// If unset, the previous progress message (if any) is still valid.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub message: Option<String>,
+
+ /// Optional progress percentage to display (value 100 is considered 100%).
+ /// If not provided infinite progress is assumed and clients are allowed
+ /// to ignore the `percentage` value in subsequent in report notifications.
+ ///
+ /// The value should be steadily rising. Clients are free to ignore values
+ /// that are not following this rule. The value range is [0, 100]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub percentage: Option<u32>,
+}
+
+#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressReport {
+ /// Controls if a cancel button should show to allow the user to cancel the
+ /// long running operation. Clients that don't support cancellation are allowed
+ /// to ignore the setting.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub cancellable: Option<bool>,
+
+ /// Optional, more detailed associated progress message. Contains
+ /// complementary information to the `title`.
+ /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
+ /// If unset, the previous progress message (if any) is still valid.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub message: Option<String>,
+
+ /// Optional progress percentage to display (value 100 is considered 100%).
+ /// If not provided infinite progress is assumed and clients are allowed
+ /// to ignore the `percentage` value in subsequent in report notifications.
+ ///
+ /// The value should be steadily rising. Clients are free to ignore values
+ /// that are not following this rule. The value range is [0, 100]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub percentage: Option<u32>,
+}
+
+#[derive(Debug, PartialEq, Default, Deserialize, Serialize, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkDoneProgressEnd {
+ /// Optional, more detailed associated progress message. Contains
+ /// complementary information to the `title`.
+ /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
+ /// If unset, the previous progress message (if any) is still valid.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub message: Option<String>,
+}
+
+#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]
+#[serde(tag = "kind", rename_all = "lowercase")]
+pub enum WorkDoneProgress {
+ Begin(WorkDoneProgressBegin),
+ Report(WorkDoneProgressReport),
+ End(WorkDoneProgressEnd),
+}
diff --git a/vendor/lsp-types/src/references.rs b/vendor/lsp-types/src/references.rs
index cb590d58e..4926bb771 100644
--- a/vendor/lsp-types/src/references.rs
+++ b/vendor/lsp-types/src/references.rs
@@ -1,30 +1,30 @@
-use crate::{
- DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams,
- WorkDoneProgressParams,
-};
-use serde::{Deserialize, Serialize};
-
-pub type ReferenceClientCapabilities = DynamicRegistrationClientCapabilities;
-#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ReferenceContext {
- /// Include the declaration of the current symbol.
- pub include_declaration: bool,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ReferenceParams {
- // Text Document and Position fields
- #[serde(flatten)]
- pub text_document_position: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- // ReferenceParams properties:
- pub context: ReferenceContext,
-}
+use crate::{
+ DynamicRegistrationClientCapabilities, PartialResultParams, TextDocumentPositionParams,
+ WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+pub type ReferenceClientCapabilities = DynamicRegistrationClientCapabilities;
+#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ReferenceContext {
+ /// Include the declaration of the current symbol.
+ pub include_declaration: bool,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ReferenceParams {
+ // Text Document and Position fields
+ #[serde(flatten)]
+ pub text_document_position: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ // ReferenceParams properties:
+ pub context: ReferenceContext,
+}
diff --git a/vendor/lsp-types/src/rename.rs b/vendor/lsp-types/src/rename.rs
index b84bdcf6d..4645035a4 100644
--- a/vendor/lsp-types/src/rename.rs
+++ b/vendor/lsp-types/src/rename.rs
@@ -1,88 +1,88 @@
-use crate::{Range, TextDocumentPositionParams, WorkDoneProgressOptions, WorkDoneProgressParams};
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameParams {
- /// Text Document and Position fields
- #[serde(flatten)]
- pub text_document_position: TextDocumentPositionParams,
-
- /// The new name of the symbol. If the given name is not valid the
- /// request must return a [ResponseError](#ResponseError) with an
- /// appropriate message set.
- pub new_name: String,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameOptions {
- /// Renames should be checked and tested before being executed.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub prepare_provider: Option<bool>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct RenameClientCapabilities {
- /// Whether rename supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Client supports testing for validity of rename operations before execution.
- ///
- /// since 3.12.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub prepare_support: Option<bool>,
-
- /// Client supports the default behavior result.
- ///
- /// The value indicates the default behavior used by the
- /// client.
- ///
- /// since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub prepare_support_default_behavior: Option<PrepareSupportDefaultBehavior>,
-
- /// Whether the client honors the change annotations in
- /// text edits and resource operations returned via the
- /// rename request's workspace edit by for example presenting
- /// the workspace edit in the user interface and asking
- /// for confirmation.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub honors_change_annotations: Option<bool>,
-}
-
-#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
-#[serde(transparent)]
-pub struct PrepareSupportDefaultBehavior(i32);
-lsp_enum! {
-impl PrepareSupportDefaultBehavior {
- /// The client's default behavior is to select the identifier
- /// according the to language's syntax rule
- pub const IDENTIFIER: PrepareSupportDefaultBehavior = PrepareSupportDefaultBehavior(1);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-#[serde(rename_all = "camelCase")]
-pub enum PrepareRenameResponse {
- Range(Range),
- RangeWithPlaceholder {
- range: Range,
- placeholder: String,
- },
- #[serde(rename_all = "camelCase")]
- DefaultBehavior {
- default_behavior: bool,
- },
-}
+use crate::{Range, TextDocumentPositionParams, WorkDoneProgressOptions, WorkDoneProgressParams};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameParams {
+ /// Text Document and Position fields
+ #[serde(flatten)]
+ pub text_document_position: TextDocumentPositionParams,
+
+ /// The new name of the symbol. If the given name is not valid the
+ /// request must return a [ResponseError](#ResponseError) with an
+ /// appropriate message set.
+ pub new_name: String,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameOptions {
+ /// Renames should be checked and tested before being executed.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub prepare_provider: Option<bool>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct RenameClientCapabilities {
+ /// Whether rename supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Client supports testing for validity of rename operations before execution.
+ ///
+ /// since 3.12.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub prepare_support: Option<bool>,
+
+ /// Client supports the default behavior result.
+ ///
+ /// The value indicates the default behavior used by the
+ /// client.
+ ///
+ /// since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub prepare_support_default_behavior: Option<PrepareSupportDefaultBehavior>,
+
+ /// Whether the client honors the change annotations in
+ /// text edits and resource operations returned via the
+ /// rename request's workspace edit by for example presenting
+ /// the workspace edit in the user interface and asking
+ /// for confirmation.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub honors_change_annotations: Option<bool>,
+}
+
+#[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct PrepareSupportDefaultBehavior(i32);
+lsp_enum! {
+impl PrepareSupportDefaultBehavior {
+ /// The client's default behavior is to select the identifier
+ /// according the to language's syntax rule
+ pub const IDENTIFIER: PrepareSupportDefaultBehavior = PrepareSupportDefaultBehavior(1);
+}
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+#[serde(rename_all = "camelCase")]
+pub enum PrepareRenameResponse {
+ Range(Range),
+ RangeWithPlaceholder {
+ range: Range,
+ placeholder: String,
+ },
+ #[serde(rename_all = "camelCase")]
+ DefaultBehavior {
+ default_behavior: bool,
+ },
+}
diff --git a/vendor/lsp-types/src/request.rs b/vendor/lsp-types/src/request.rs
index aa03be4a4..da2efd74b 100644
--- a/vendor/lsp-types/src/request.rs
+++ b/vendor/lsp-types/src/request.rs
@@ -1,889 +1,1001 @@
-use super::*;
-
-use serde::{de::DeserializeOwned, Serialize};
-
-pub trait Request {
- type Params: DeserializeOwned + Serialize;
- type Result: DeserializeOwned + Serialize;
- const METHOD: &'static str;
-}
-
-#[macro_export]
-macro_rules! lsp_request {
- ("initialize") => {
- $crate::request::Initialize
- };
- ("shutdown") => {
- $crate::request::Shutdown
- };
-
- ("window/showMessageRequest") => {
- $crate::request::ShowMessageRequest
- };
-
- ("client/registerCapability") => {
- $crate::request::RegisterCapability
- };
- ("client/unregisterCapability") => {
- $crate::request::UnregisterCapability
- };
-
- ("workspace/symbol") => {
- $crate::request::WorkspaceSymbol
- };
- ("workspace/executeCommand") => {
- $crate::request::ExecuteCommand
- };
-
- ("textDocument/willSaveWaitUntil") => {
- $crate::request::WillSaveWaitUntil
- };
-
- ("textDocument/completion") => {
- $crate::request::Completion
- };
- ("completionItem/resolve") => {
- $crate::request::ResolveCompletionItem
- };
- ("textDocument/hover") => {
- $crate::request::HoverRequest
- };
- ("textDocument/signatureHelp") => {
- $crate::request::SignatureHelpRequest
- };
- ("textDocument/declaration") => {
- $crate::request::GotoDeclaration
- };
- ("textDocument/definition") => {
- $crate::request::GotoDefinition
- };
- ("textDocument/references") => {
- $crate::request::References
- };
- ("textDocument/documentHighlight") => {
- $crate::request::DocumentHighlightRequest
- };
- ("textDocument/documentSymbol") => {
- $crate::request::DocumentSymbolRequest
- };
- ("textDocument/codeAction") => {
- $crate::request::CodeActionRequest
- };
- ("textDocument/codeLens") => {
- $crate::request::CodeLensRequest
- };
- ("codeLens/resolve") => {
- $crate::request::CodeLensResolve
- };
- ("textDocument/documentLink") => {
- $crate::request::DocumentLinkRequest
- };
- ("documentLink/resolve") => {
- $crate::request::DocumentLinkResolve
- };
- ("workspace/applyEdit") => {
- $crate::request::ApplyWorkspaceEdit
- };
- ("textDocument/rangeFormatting") => {
- $crate::request::RangeFormatting
- };
- ("textDocument/onTypeFormatting") => {
- $crate::request::OnTypeFormatting
- };
- ("textDocument/formatting") => {
- $crate::request::Formatting
- };
- ("textDocument/rename") => {
- $crate::request::Rename
- };
- ("textDocument/documentColor") => {
- $crate::request::DocumentColor
- };
- ("textDocument/colorPresentation") => {
- $crate::request::ColorPresentationRequest
- };
- ("textDocument/foldingRange") => {
- $crate::request::FoldingRangeRequest
- };
- ("textDocument/prepareRename") => {
- $crate::request::PrepareRenameRequest
- };
- ("textDocument/implementation") => {
- $crate::request::GotoImplementation
- };
- ("textDocument/typeDefinition") => {
- $crate::request::GotoTypeDefinition
- };
- ("textDocument/selectionRange") => {
- $crate::request::SelectionRangeRequest
- };
- ("workspace/workspaceFolders") => {
- $crate::request::WorkspaceFoldersRequest
- };
- ("workspace/configuration") => {
- $crate::request::WorkspaceConfiguration
- };
- ("window/workDoneProgress/create") => {
- $crate::request::WorkDoneProgressCreate
- };
- ("callHierarchy/incomingCalls") => {
- $crate::request::CallHierarchyIncomingCalls
- };
- ("callHierarchy/outgoingCalls") => {
- $crate::request::CallHierarchyOutgoingCalls
- };
- ("textDocument/moniker") => {
- $crate::request::MonikerRequest
- };
- ("textDocument/linkedEditingRange") => {
- $crate::request::LinkedEditingRange
- };
- ("textDocument/prepareCallHierarchy") => {
- $crate::request::CallHierarchyPrepare
- };
- ("textDocument/semanticTokens/full") => {
- $crate::request::SemanticTokensFullRequest
- };
- ("textDocument/semanticTokens/full/delta") => {
- $crate::request::SemanticTokensFullDeltaRequest
- };
- ("textDocument/semanticTokens/range") => {
- $crate::request::SemanticTokensRangeRequest
- };
- ("workspace/willCreateFiles") => {
- $crate::request::WillCreateFiles
- };
- ("workspace/willRenameFiles") => {
- $crate::request::WillRenameFiles
- };
- ("workspace/willDeleteFiles") => {
- $crate::request::WillDeleteFiles
- };
- ("workspace/semanticTokens/refresh") => {
- $crate::request::SemanticTokensRefresh
- };
- ("workspace/codeLens/refresh") => {
- $crate::request::CodeLensRefresh
- };
- ("codeAction/resolve") => {
- $crate::request::CodeActionResolveRequest
- };
- ("window/showDocument") => {
- $crate::request::ShowDocument
- };
-}
-
-/// The initialize request is sent as the first request from the client to the server.
-/// If the server receives request or notification before the `initialize` request it should act as follows:
-///
-/// * for a request the respond should be errored with `code: -32001`. The message can be picked by the server.
-/// * notifications should be dropped.
-#[derive(Debug)]
-pub enum Initialize {}
-
-impl Request for Initialize {
- type Params = InitializeParams;
- type Result = InitializeResult;
- const METHOD: &'static str = "initialize";
-}
-
-/// The shutdown request is sent from the client to the server. It asks the server to shut down,
-/// but to not exit (otherwise the response might not be delivered correctly to the client).
-/// There is a separate exit notification that asks the server to exit.
-#[derive(Debug)]
-pub enum Shutdown {}
-
-impl Request for Shutdown {
- type Params = ();
- type Result = ();
- const METHOD: &'static str = "shutdown";
-}
-
-/// The show message request is sent from a server to a client to ask the client to display a particular message
-/// in the user interface. In addition to the show message notification the request allows to pass actions and to
-/// wait for an answer from the client.
-#[derive(Debug)]
-pub enum ShowMessageRequest {}
-
-impl Request for ShowMessageRequest {
- type Params = ShowMessageRequestParams;
- type Result = Option<MessageActionItem>;
- const METHOD: &'static str = "window/showMessageRequest";
-}
-
-/// The client/registerCapability request is sent from the server to the client to register for a new capability
-/// on the client side. Not all clients need to support dynamic capability registration. A client opts in via the
-/// ClientCapabilities.GenericCapability property.
-#[derive(Debug)]
-pub enum RegisterCapability {}
-
-impl Request for RegisterCapability {
- type Params = RegistrationParams;
- type Result = ();
- const METHOD: &'static str = "client/registerCapability";
-}
-
-/// The client/unregisterCapability request is sent from the server to the client to unregister a
-/// previously register capability.
-#[derive(Debug)]
-pub enum UnregisterCapability {}
-
-impl Request for UnregisterCapability {
- type Params = UnregistrationParams;
- type Result = ();
- const METHOD: &'static str = "client/unregisterCapability";
-}
-
-/// The Completion request is sent from the client to the server to compute completion items at a given cursor position.
-/// Completion items are presented in the IntelliSense user interface. If computing full completion items is expensive,
-/// servers can additionally provide a handler for the completion item resolve request ('completionItem/resolve').
-/// This request is sent when a completion item is selected in the user interface. A typical use case is for example:
-/// the 'textDocument/completion' request doesn’t fill in the documentation property for returned completion items
-/// since it is expensive to compute. When the item is selected in the user interface then a ‘completionItem/resolve’
-/// request is sent with the selected completion item as a param. The returned completion item should have the
-/// documentation property filled in. The request can delay the computation of the detail and documentation properties.
-/// However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText,
-/// and textEdit must be provided in the textDocument/completion request and must not be changed during resolve.
-#[derive(Debug)]
-pub enum Completion {}
-
-impl Request for Completion {
- type Params = CompletionParams;
- type Result = Option<CompletionResponse>;
- const METHOD: &'static str = "textDocument/completion";
-}
-
-/// The request is sent from the client to the server to resolve additional information for a given completion item.
-#[derive(Debug)]
-pub enum ResolveCompletionItem {}
-
-impl Request for ResolveCompletionItem {
- type Params = CompletionItem;
- type Result = CompletionItem;
- const METHOD: &'static str = "completionItem/resolve";
-}
-
-/// The hover request is sent from the client to the server to request hover information at a given text
-/// document position.
-#[derive(Debug)]
-pub enum HoverRequest {}
-
-impl Request for HoverRequest {
- type Params = HoverParams;
- type Result = Option<Hover>;
- const METHOD: &'static str = "textDocument/hover";
-}
-
-/// The signature help request is sent from the client to the server to request signature information at
-/// a given cursor position.
-#[derive(Debug)]
-pub enum SignatureHelpRequest {}
-
-impl Request for SignatureHelpRequest {
- type Params = SignatureHelpParams;
- type Result = Option<SignatureHelp>;
- const METHOD: &'static str = "textDocument/signatureHelp";
-}
-
-#[derive(Debug)]
-pub enum GotoDeclaration {}
-pub type GotoDeclarationParams = GotoDefinitionParams;
-pub type GotoDeclarationResponse = GotoDefinitionResponse;
-
-/// The goto declaration request is sent from the client to the server to resolve the declaration location of
-/// a symbol at a given text document position.
-impl Request for GotoDeclaration {
- type Params = GotoDeclarationParams;
- type Result = Option<GotoDeclarationResponse>;
- const METHOD: &'static str = "textDocument/declaration";
-}
-
-/// The goto definition request is sent from the client to the server to resolve the definition location of
-/// a symbol at a given text document position.
-#[derive(Debug)]
-pub enum GotoDefinition {}
-
-impl Request for GotoDefinition {
- type Params = GotoDefinitionParams;
- type Result = Option<GotoDefinitionResponse>;
- const METHOD: &'static str = "textDocument/definition";
-}
-
-/// The references request is sent from the client to the server to resolve project-wide references for the
-/// symbol denoted by the given text document position.
-#[derive(Debug)]
-pub enum References {}
-
-impl Request for References {
- type Params = ReferenceParams;
- type Result = Option<Vec<Location>>;
- const METHOD: &'static str = "textDocument/references";
-}
-
-/// The goto type definition request is sent from the client to the
-/// server to resolve the type definition location of a symbol at a
-/// given text document position.
-#[derive(Debug)]
-pub enum GotoTypeDefinition {}
-
-pub type GotoTypeDefinitionParams = GotoDefinitionParams;
-pub type GotoTypeDefinitionResponse = GotoDefinitionResponse;
-
-impl Request for GotoTypeDefinition {
- type Params = GotoTypeDefinitionParams;
- type Result = Option<GotoTypeDefinitionResponse>;
- const METHOD: &'static str = "textDocument/typeDefinition";
-}
-
-/// The goto implementation request is sent from the client to the
-/// server to resolve the implementation location of a symbol at a
-/// given text document position.
-#[derive(Debug)]
-pub enum GotoImplementation {}
-
-pub type GotoImplementationParams = GotoTypeDefinitionParams;
-pub type GotoImplementationResponse = GotoDefinitionResponse;
-
-impl Request for GotoImplementation {
- type Params = GotoImplementationParams;
- type Result = Option<GotoImplementationResponse>;
- const METHOD: &'static str = "textDocument/implementation";
-}
-
-/// The document highlight request is sent from the client to the server to resolve a document highlights
-/// for a given text document position.
-/// For programming languages this usually highlights all references to the symbol scoped to this file.
-/// However we kept 'textDocument/documentHighlight' and 'textDocument/references' separate requests since
-/// the first one is allowed to be more fuzzy.
-/// Symbol matches usually have a DocumentHighlightKind of Read or Write whereas fuzzy or textual matches
-/// use Text as the kind.
-#[derive(Debug)]
-pub enum DocumentHighlightRequest {}
-
-impl Request for DocumentHighlightRequest {
- type Params = DocumentHighlightParams;
- type Result = Option<Vec<DocumentHighlight>>;
- const METHOD: &'static str = "textDocument/documentHighlight";
-}
-
-/// The document symbol request is sent from the client to the server to list all symbols found in a given
-/// text document.
-#[derive(Debug)]
-pub enum DocumentSymbolRequest {}
-
-impl Request for DocumentSymbolRequest {
- type Params = DocumentSymbolParams;
- type Result = Option<DocumentSymbolResponse>;
- const METHOD: &'static str = "textDocument/documentSymbol";
-}
-
-/// The workspace symbol request is sent from the client to the server to list project-wide symbols
-/// matching the query string.
-#[derive(Debug)]
-pub enum WorkspaceSymbol {}
-
-impl Request for WorkspaceSymbol {
- type Params = WorkspaceSymbolParams;
- type Result = Option<Vec<SymbolInformation>>;
- const METHOD: &'static str = "workspace/symbol";
-}
-
-/// The workspace/executeCommand request is sent from the client to the server to trigger command execution on the server.
-/// In most cases the server creates a WorkspaceEdit structure and applies the changes to the workspace using the request
-/// workspace/applyEdit which is sent from the server to the client.
-#[derive(Debug)]
-pub enum ExecuteCommand {}
-
-impl Request for ExecuteCommand {
- type Params = ExecuteCommandParams;
- type Result = Option<Value>;
- const METHOD: &'static str = "workspace/executeCommand";
-}
-
-/// The document will save request is sent from the client to the server before the document is
-/// actually saved. The request can return an array of TextEdits which will be applied to the text
-/// document before it is saved. Please note that clients might drop results if computing the text
-/// edits took too long or if a server constantly fails on this request. This is done to keep the
-/// save fast and reliable.
-#[derive(Debug)]
-pub enum WillSaveWaitUntil {}
-
-impl Request for WillSaveWaitUntil {
- type Params = WillSaveTextDocumentParams;
- type Result = Option<Vec<TextEdit>>;
- const METHOD: &'static str = "textDocument/willSaveWaitUntil";
-}
-
-/// The workspace/applyEdit request is sent from the server to the client to modify resource on the
-/// client side.
-#[derive(Debug)]
-pub enum ApplyWorkspaceEdit {}
-
-impl Request for ApplyWorkspaceEdit {
- type Params = ApplyWorkspaceEditParams;
- type Result = ApplyWorkspaceEditResponse;
- const METHOD: &'static str = "workspace/applyEdit";
-}
-
-/// The workspace/configuration request is sent from the server to the client to fetch configuration settings
-/// from the client. The request can fetch several configuration settings in one roundtrip.
-/// The order of the returned configuration settings correspond to the order of the passed ConfigurationItems
-/// (e.g. the first item in the response is the result for the first configuration item in the params).
-///
-/// A ConfigurationItem consists of the configuration section to ask for and an additional scope URI.
-/// The configuration section ask for is defined by the server and doesn’t necessarily need to correspond to
-/// the configuration store used be the client. So a server might ask for a configuration cpp.formatterOptions
-/// but the client stores the configuration in a XML store layout differently.
-/// It is up to the client to do the necessary conversion. If a scope URI is provided the client should return
-/// the setting scoped to the provided resource. If the client for example uses EditorConfig to manage its
-/// settings the configuration should be returned for the passed resource URI. If the client can’t provide a
-/// configuration setting for a given scope then null need to be present in the returned array.
-#[derive(Debug)]
-pub enum WorkspaceConfiguration {}
-
-impl Request for WorkspaceConfiguration {
- type Params = ConfigurationParams;
- type Result = Vec<Value>;
- const METHOD: &'static str = "workspace/configuration";
-}
-
-/// The code action request is sent from the client to the server to compute commands for a given text document
-/// and range. The request is triggered when the user moves the cursor into a problem marker in the editor or
-/// presses the lightbulb associated with a marker.
-#[derive(Debug)]
-pub enum CodeActionRequest {}
-
-impl Request for CodeActionRequest {
- type Params = CodeActionParams;
- type Result = Option<CodeActionResponse>;
- const METHOD: &'static str = "textDocument/codeAction";
-}
-
-/// The request is sent from the client to the server to resolve additional information for a given code action.
-/// This is usually used to compute the `edit` property of a code action to avoid its unnecessary computation
-/// during the `textDocument/codeAction` request.
-///
-/// since 3.16.0
-#[derive(Debug)]
-pub enum CodeActionResolveRequest {}
-
-impl Request for CodeActionResolveRequest {
- type Params = CodeAction;
- type Result = CodeAction;
- const METHOD: &'static str = "codeAction/resolve";
-}
-
-/// The code lens request is sent from the client to the server to compute code lenses for a given text document.
-#[derive(Debug)]
-pub enum CodeLensRequest {}
-
-impl Request for CodeLensRequest {
- type Params = CodeLensParams;
- type Result = Option<Vec<CodeLens>>;
- const METHOD: &'static str = "textDocument/codeLens";
-}
-
-/// The code lens resolve request is sent from the client to the server to resolve the command for a
-/// given code lens item.
-#[derive(Debug)]
-pub enum CodeLensResolve {}
-
-impl Request for CodeLensResolve {
- type Params = CodeLens;
- type Result = CodeLens;
- const METHOD: &'static str = "codeLens/resolve";
-}
-
-/// The document links request is sent from the client to the server to request the location of links in a document.
-#[derive(Debug)]
-pub enum DocumentLinkRequest {}
-
-impl Request for DocumentLinkRequest {
- type Params = DocumentLinkParams;
- type Result = Option<Vec<DocumentLink>>;
- const METHOD: &'static str = "textDocument/documentLink";
-}
-
-/// The document link resolve request is sent from the client to the server to resolve the target of
-/// a given document link.
-#[derive(Debug)]
-pub enum DocumentLinkResolve {}
-
-impl Request for DocumentLinkResolve {
- type Params = DocumentLink;
- type Result = DocumentLink;
- const METHOD: &'static str = "documentLink/resolve";
-}
-
-/// The document formatting request is sent from the server to the client to format a whole document.
-#[derive(Debug)]
-pub enum Formatting {}
-
-impl Request for Formatting {
- type Params = DocumentFormattingParams;
- type Result = Option<Vec<TextEdit>>;
- const METHOD: &'static str = "textDocument/formatting";
-}
-
-/// The document range formatting request is sent from the client to the server to format a given range in a document.
-#[derive(Debug)]
-pub enum RangeFormatting {}
-
-impl Request for RangeFormatting {
- type Params = DocumentRangeFormattingParams;
- type Result = Option<Vec<TextEdit>>;
- const METHOD: &'static str = "textDocument/rangeFormatting";
-}
-
-/// The document on type formatting request is sent from the client to the server to format parts of
-/// the document during typing.
-#[derive(Debug)]
-pub enum OnTypeFormatting {}
-
-impl Request for OnTypeFormatting {
- type Params = DocumentOnTypeFormattingParams;
- type Result = Option<Vec<TextEdit>>;
- const METHOD: &'static str = "textDocument/onTypeFormatting";
-}
-
-/// The linked editing request is sent from the client to the server to return for a given position in a document
-/// the range of the symbol at the position and all ranges that have the same content.
-/// Optionally a word pattern can be returned to describe valid contents. A rename to one of the ranges can be applied
-/// to all other ranges if the new content is valid. If no result-specific word pattern is provided, the word pattern from
-/// the client’s language configuration is used.
-#[derive(Debug)]
-pub enum LinkedEditingRange {}
-
-impl Request for LinkedEditingRange {
- type Params = LinkedEditingRangeParams;
- type Result = Option<LinkedEditingRanges>;
- const METHOD: &'static str = "textDocument/linkedEditingRange";
-}
-
-/// The rename request is sent from the client to the server to perform a workspace-wide rename of a symbol.
-#[derive(Debug)]
-pub enum Rename {}
-
-impl Request for Rename {
- type Params = RenameParams;
- type Result = Option<WorkspaceEdit>;
- const METHOD: &'static str = "textDocument/rename";
-}
-
-/// The document color request is sent from the client to the server to list all color references found in a given text document.
-/// Along with the range, a color value in RGB is returned.
-#[derive(Debug)]
-pub enum DocumentColor {}
-
-impl Request for DocumentColor {
- type Params = DocumentColorParams;
- type Result = Vec<ColorInformation>;
- const METHOD: &'static str = "textDocument/documentColor";
-}
-
-/// The color presentation request is sent from the client to the server to obtain a list of presentations for a color value
-/// at a given location.
-#[derive(Debug)]
-pub enum ColorPresentationRequest {}
-
-impl Request for ColorPresentationRequest {
- type Params = ColorPresentationParams;
- type Result = Vec<ColorPresentation>;
- const METHOD: &'static str = "textDocument/colorPresentation";
-}
-
-/// The folding range request is sent from the client to the server to return all folding ranges found in a given text document.
-#[derive(Debug)]
-pub enum FoldingRangeRequest {}
-
-impl Request for FoldingRangeRequest {
- type Params = FoldingRangeParams;
- type Result = Option<Vec<FoldingRange>>;
- const METHOD: &'static str = "textDocument/foldingRange";
-}
-
-/// The prepare rename request is sent from the client to the server to setup and test the validity of a rename operation
-/// at a given location.
-#[derive(Debug)]
-pub enum PrepareRenameRequest {}
-
-impl Request for PrepareRenameRequest {
- type Params = TextDocumentPositionParams;
- type Result = Option<PrepareRenameResponse>;
- const METHOD: &'static str = "textDocument/prepareRename";
-}
-
-/// The workspace/workspaceFolders request is sent from the server to the client to fetch the current open list of
-/// workspace folders. Returns null in the response if only a single file is open in the tool.
-/// Returns an empty array if a workspace is open but no folders are configured.
-#[derive(Debug)]
-pub enum WorkspaceFoldersRequest {}
-
-impl Request for WorkspaceFoldersRequest {
- type Params = ();
- type Result = Option<Vec<WorkspaceFolder>>;
- const METHOD: &'static str = "workspace/workspaceFolders";
-}
-
-/// The `window/workDoneProgress/create` request is sent from the server
-/// to the clientto ask the client to create a work done progress.
-#[derive(Debug)]
-pub enum WorkDoneProgressCreate {}
-
-impl Request for WorkDoneProgressCreate {
- type Params = WorkDoneProgressCreateParams;
- type Result = ();
- const METHOD: &'static str = "window/workDoneProgress/create";
-}
-
-/// The selection range request is sent from the client to the server to return
-/// suggested selection ranges at given positions. A selection range is a range
-/// around the cursor position which the user might be interested in selecting.
-///
-/// A selection range in the return array is for the position in the provided parameters at the same index.
-/// Therefore `positions[i]` must be contained in `result[i].range`.
-///
-/// Typically, but not necessary, selection ranges correspond to the nodes of the
-/// syntax tree.
-pub enum SelectionRangeRequest {}
-
-impl Request for SelectionRangeRequest {
- type Params = SelectionRangeParams;
- type Result = Option<Vec<SelectionRange>>;
- const METHOD: &'static str = "textDocument/selectionRange";
-}
-
-pub enum CallHierarchyPrepare {}
-
-impl Request for CallHierarchyPrepare {
- type Params = CallHierarchyPrepareParams;
- type Result = Option<Vec<CallHierarchyItem>>;
- const METHOD: &'static str = "textDocument/prepareCallHierarchy";
-}
-
-pub enum CallHierarchyIncomingCalls {}
-
-impl Request for CallHierarchyIncomingCalls {
- type Params = CallHierarchyIncomingCallsParams;
- type Result = Option<Vec<CallHierarchyIncomingCall>>;
- const METHOD: &'static str = "callHierarchy/incomingCalls";
-}
-
-pub enum CallHierarchyOutgoingCalls {}
-
-impl Request for CallHierarchyOutgoingCalls {
- type Params = CallHierarchyOutgoingCallsParams;
- type Result = Option<Vec<CallHierarchyOutgoingCall>>;
- const METHOD: &'static str = "callHierarchy/outgoingCalls";
-}
-
-pub enum SemanticTokensFullRequest {}
-
-impl Request for SemanticTokensFullRequest {
- type Params = SemanticTokensParams;
- type Result = Option<SemanticTokensResult>;
- const METHOD: &'static str = "textDocument/semanticTokens/full";
-}
-
-pub enum SemanticTokensFullDeltaRequest {}
-
-impl Request for SemanticTokensFullDeltaRequest {
- type Params = SemanticTokensDeltaParams;
- type Result = Option<SemanticTokensFullDeltaResult>;
- const METHOD: &'static str = "textDocument/semanticTokens/full/delta";
-}
-
-pub enum SemanticTokensRangeRequest {}
-
-impl Request for SemanticTokensRangeRequest {
- type Params = SemanticTokensRangeParams;
- type Result = Option<SemanticTokensRangeResult>;
- const METHOD: &'static str = "textDocument/semanticTokens/range";
-}
-
-/// The `workspace/semanticTokens/refresh` request is sent from the server to the client.
-/// Servers can use it to ask clients to refresh the editors for which this server provides semantic tokens.
-/// As a result the client should ask the server to recompute the semantic tokens for these editors.
-/// This is useful if a server detects a project wide configuration change which requires a re-calculation of all semantic tokens.
-/// Note that the client still has the freedom to delay the re-calculation of the semantic tokens if for example an editor is currently not visible.
-pub enum SemanticTokensRefresh {}
-
-impl Request for SemanticTokensRefresh {
- type Params = ();
- type Result = ();
- const METHOD: &'static str = "workspace/semanticTokens/refresh";
-}
-
-/// The workspace/codeLens/refresh request is sent from the server to the client.
-/// Servers can use it to ask clients to refresh the code lenses currently shown in editors.
-/// As a result the client should ask the server to recompute the code lenses for these editors.
-/// This is useful if a server detects a configuration change which requires a re-calculation of all code lenses.
-/// Note that the client still has the freedom to delay the re-calculation of the code lenses if for example an editor is currently not visible.
-pub enum CodeLensRefresh {}
-
-impl Request for CodeLensRefresh {
- type Params = ();
- type Result = ();
- const METHOD: &'static str = "workspace/codeLens/refresh";
-}
-
-/// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep creates fast and reliable.
-pub enum WillCreateFiles {}
-
-impl Request for WillCreateFiles {
- type Params = CreateFilesParams;
- type Result = Option<WorkspaceEdit>;
- const METHOD: &'static str = "workspace/willCreateFiles";
-}
-
-/// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are renamed. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep renames fast and reliable.
-pub enum WillRenameFiles {}
-
-impl Request for WillRenameFiles {
- type Params = RenameFilesParams;
- type Result = Option<WorkspaceEdit>;
- const METHOD: &'static str = "workspace/willRenameFiles";
-}
-
-/// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are deleted. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep deletes fast and reliable.
-pub enum WillDeleteFiles {}
-
-impl Request for WillDeleteFiles {
- type Params = DeleteFilesParams;
- type Result = Option<WorkspaceEdit>;
- const METHOD: &'static str = "workspace/willDeleteFiles";
-}
-
-/// The show document request is sent from a server to a client to ask the client to display a particular document in the user interface.
-pub enum ShowDocument {}
-
-impl Request for ShowDocument {
- type Params = ShowDocumentParams;
- type Result = ShowDocumentResult;
- const METHOD: &'static str = "window/showDocument";
-}
-
-pub enum MonikerRequest {}
-
-impl Request for MonikerRequest {
- type Params = MonikerParams;
- type Result = Option<Vec<Moniker>>;
- const METHOD: &'static str = "textDocument/moniker";
-}
-
-#[cfg(feature = "proposed")]
-pub enum InlayHintRequest {}
-
-#[cfg(feature = "proposed")]
-impl Request for InlayHintRequest {
- type Params = InlayHintParams;
- type Result = Option<Vec<InlayHint>>;
- const METHOD: &'static str = "textDocument/inlayHint";
-}
-
-#[cfg(feature = "proposed")]
-pub enum InlayHintResolveRequest {}
-
-#[cfg(feature = "proposed")]
-impl Request for InlayHintResolveRequest {
- type Params = InlayHint;
- type Result = InlayHint;
- const METHOD: &'static str = "inlayHint/resolve";
-}
-
-#[cfg(feature = "proposed")]
-pub enum InlayHintRefreshRequest {}
-
-#[cfg(feature = "proposed")]
-impl Request for InlayHintRefreshRequest {
- type Params = ();
- type Result = ();
- const METHOD: &'static str = "workspace/inlayHint/refresh";
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- fn fake_call<R>()
- where
- R: Request,
- R::Params: serde::Serialize,
- R::Result: serde::de::DeserializeOwned,
- {
- }
-
- macro_rules! check_macro {
- ($name:tt) => {
- // check whethe the macro name matches the method
- assert_eq!(<lsp_request!($name) as Request>::METHOD, $name);
- // test whether type checking passes for each component
- fake_call::<lsp_request!($name)>();
- };
- }
-
- #[test]
- fn check_macro_definitions() {
- check_macro!("initialize");
- check_macro!("shutdown");
-
- check_macro!("window/showDocument");
- check_macro!("window/showMessageRequest");
- check_macro!("window/workDoneProgress/create");
-
- check_macro!("client/registerCapability");
- check_macro!("client/unregisterCapability");
-
- check_macro!("textDocument/willSaveWaitUntil");
- check_macro!("textDocument/completion");
- check_macro!("textDocument/hover");
- check_macro!("textDocument/signatureHelp");
- check_macro!("textDocument/declaration");
- check_macro!("textDocument/definition");
- check_macro!("textDocument/references");
- check_macro!("textDocument/documentHighlight");
- check_macro!("textDocument/documentSymbol");
- check_macro!("textDocument/codeAction");
- check_macro!("textDocument/codeLens");
- check_macro!("textDocument/documentLink");
- check_macro!("textDocument/rangeFormatting");
- check_macro!("textDocument/onTypeFormatting");
- check_macro!("textDocument/formatting");
- check_macro!("textDocument/rename");
- check_macro!("textDocument/documentColor");
- check_macro!("textDocument/colorPresentation");
- check_macro!("textDocument/foldingRange");
- check_macro!("textDocument/prepareRename");
- check_macro!("textDocument/implementation");
- check_macro!("textDocument/selectionRange");
- check_macro!("textDocument/typeDefinition");
- check_macro!("textDocument/moniker");
- check_macro!("textDocument/linkedEditingRange");
- check_macro!("textDocument/prepareCallHierarchy");
- check_macro!("textDocument/semanticTokens/full");
- check_macro!("textDocument/semanticTokens/full/delta");
- check_macro!("textDocument/semanticTokens/range");
-
- check_macro!("workspace/applyEdit");
- check_macro!("workspace/symbol");
- check_macro!("workspace/executeCommand");
- check_macro!("workspace/configuration");
- check_macro!("workspace/willCreateFiles");
- check_macro!("workspace/willRenameFiles");
- check_macro!("workspace/willDeleteFiles");
- check_macro!("workspace/workspaceFolders");
- check_macro!("workspace/semanticTokens/refresh");
- check_macro!("workspace/codeLens/refresh");
-
- check_macro!("codeAction/resolve");
- check_macro!("codeLens/resolve");
- check_macro!("completionItem/resolve");
- check_macro!("documentLink/resolve");
- check_macro!("callHierarchy/incomingCalls");
- check_macro!("callHierarchy/outgoingCalls");
- }
-
- #[test]
- #[cfg(feature = "proposed")]
- fn check_proposed_macro_definitions() {}
-}
+use super::*;
+
+use serde::{de::DeserializeOwned, Serialize};
+
+pub trait Request {
+ type Params: DeserializeOwned + Serialize;
+ type Result: DeserializeOwned + Serialize;
+ const METHOD: &'static str;
+}
+
+#[macro_export]
+macro_rules! lsp_request {
+ ("initialize") => {
+ $crate::request::Initialize
+ };
+ ("shutdown") => {
+ $crate::request::Shutdown
+ };
+
+ ("window/showMessageRequest") => {
+ $crate::request::ShowMessageRequest
+ };
+
+ ("client/registerCapability") => {
+ $crate::request::RegisterCapability
+ };
+ ("client/unregisterCapability") => {
+ $crate::request::UnregisterCapability
+ };
+
+ ("workspace/symbol") => {
+ $crate::request::WorkspaceSymbolRequest
+ };
+ ("workspace/executeCommand") => {
+ $crate::request::ExecuteCommand
+ };
+
+ ("textDocument/willSaveWaitUntil") => {
+ $crate::request::WillSaveWaitUntil
+ };
+
+ ("textDocument/completion") => {
+ $crate::request::Completion
+ };
+ ("completionItem/resolve") => {
+ $crate::request::ResolveCompletionItem
+ };
+ ("textDocument/hover") => {
+ $crate::request::HoverRequest
+ };
+ ("textDocument/signatureHelp") => {
+ $crate::request::SignatureHelpRequest
+ };
+ ("textDocument/declaration") => {
+ $crate::request::GotoDeclaration
+ };
+ ("textDocument/definition") => {
+ $crate::request::GotoDefinition
+ };
+ ("textDocument/references") => {
+ $crate::request::References
+ };
+ ("textDocument/documentHighlight") => {
+ $crate::request::DocumentHighlightRequest
+ };
+ ("textDocument/documentSymbol") => {
+ $crate::request::DocumentSymbolRequest
+ };
+ ("textDocument/codeAction") => {
+ $crate::request::CodeActionRequest
+ };
+ ("textDocument/codeLens") => {
+ $crate::request::CodeLensRequest
+ };
+ ("codeLens/resolve") => {
+ $crate::request::CodeLensResolve
+ };
+ ("textDocument/documentLink") => {
+ $crate::request::DocumentLinkRequest
+ };
+ ("documentLink/resolve") => {
+ $crate::request::DocumentLinkResolve
+ };
+ ("workspace/applyEdit") => {
+ $crate::request::ApplyWorkspaceEdit
+ };
+ ("textDocument/rangeFormatting") => {
+ $crate::request::RangeFormatting
+ };
+ ("textDocument/onTypeFormatting") => {
+ $crate::request::OnTypeFormatting
+ };
+ ("textDocument/formatting") => {
+ $crate::request::Formatting
+ };
+ ("textDocument/rename") => {
+ $crate::request::Rename
+ };
+ ("textDocument/documentColor") => {
+ $crate::request::DocumentColor
+ };
+ ("textDocument/colorPresentation") => {
+ $crate::request::ColorPresentationRequest
+ };
+ ("textDocument/foldingRange") => {
+ $crate::request::FoldingRangeRequest
+ };
+ ("textDocument/prepareRename") => {
+ $crate::request::PrepareRenameRequest
+ };
+ ("textDocument/implementation") => {
+ $crate::request::GotoImplementation
+ };
+ ("textDocument/typeDefinition") => {
+ $crate::request::GotoTypeDefinition
+ };
+ ("textDocument/selectionRange") => {
+ $crate::request::SelectionRangeRequest
+ };
+ ("workspace/workspaceFolders") => {
+ $crate::request::WorkspaceFoldersRequest
+ };
+ ("workspace/configuration") => {
+ $crate::request::WorkspaceConfiguration
+ };
+ ("window/workDoneProgress/create") => {
+ $crate::request::WorkDoneProgressCreate
+ };
+ ("callHierarchy/incomingCalls") => {
+ $crate::request::CallHierarchyIncomingCalls
+ };
+ ("callHierarchy/outgoingCalls") => {
+ $crate::request::CallHierarchyOutgoingCalls
+ };
+ ("textDocument/moniker") => {
+ $crate::request::MonikerRequest
+ };
+ ("textDocument/linkedEditingRange") => {
+ $crate::request::LinkedEditingRange
+ };
+ ("textDocument/prepareCallHierarchy") => {
+ $crate::request::CallHierarchyPrepare
+ };
+ ("textDocument/prepareTypeHierarchy") => {
+ $crate::request::TypeHierarchyPrepare
+ };
+ ("textDocument/semanticTokens/full") => {
+ $crate::request::SemanticTokensFullRequest
+ };
+ ("textDocument/semanticTokens/full/delta") => {
+ $crate::request::SemanticTokensFullDeltaRequest
+ };
+ ("textDocument/semanticTokens/range") => {
+ $crate::request::SemanticTokensRangeRequest
+ };
+ ("textDocument/inlayHint") => {
+ $crate::request::InlayHintRequest
+ };
+ ("textDocument/inlineValue") => {
+ $crate::request::InlineValueRequest
+ };
+ ("typeHierarchy/supertypes") => {
+ $crate::request::TypeHierarchySupertypes
+ };
+ ("typeHierarchy/subtypes") => {
+ $crate::request::TypeHierarchySubtypes
+ };
+ ("workspace/willCreateFiles") => {
+ $crate::request::WillCreateFiles
+ };
+ ("workspace/willRenameFiles") => {
+ $crate::request::WillRenameFiles
+ };
+ ("workspace/willDeleteFiles") => {
+ $crate::request::WillDeleteFiles
+ };
+ ("workspace/semanticTokens/refresh") => {
+ $crate::request::SemanticTokensRefresh
+ };
+ ("workspace/codeLens/refresh") => {
+ $crate::request::CodeLensRefresh
+ };
+ ("workspace/inlayHint/refresh") => {
+ $crate::request::InlayHintRefreshRequest
+ };
+ ("workspace/inlineValue/refresh") => {
+ $crate::request::InlineValueRefreshRequest
+ };
+ ("codeAction/resolve") => {
+ $crate::request::CodeActionResolveRequest
+ };
+ ("inlayHint/resolve") => {
+ $crate::request::InlayHintResolveRequest
+ };
+ ("window/showDocument") => {
+ $crate::request::ShowDocument
+ };
+}
+
+/// The initialize request is sent as the first request from the client to the server.
+/// If the server receives request or notification before the `initialize` request it should act as follows:
+///
+/// * for a request the respond should be errored with `code: -32001`. The message can be picked by the server.
+/// * notifications should be dropped.
+#[derive(Debug)]
+pub enum Initialize {}
+
+impl Request for Initialize {
+ type Params = InitializeParams;
+ type Result = InitializeResult;
+ const METHOD: &'static str = "initialize";
+}
+
+/// The shutdown request is sent from the client to the server. It asks the server to shut down,
+/// but to not exit (otherwise the response might not be delivered correctly to the client).
+/// There is a separate exit notification that asks the server to exit.
+#[derive(Debug)]
+pub enum Shutdown {}
+
+impl Request for Shutdown {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "shutdown";
+}
+
+/// The show message request is sent from a server to a client to ask the client to display a particular message
+/// in the user interface. In addition to the show message notification the request allows to pass actions and to
+/// wait for an answer from the client.
+#[derive(Debug)]
+pub enum ShowMessageRequest {}
+
+impl Request for ShowMessageRequest {
+ type Params = ShowMessageRequestParams;
+ type Result = Option<MessageActionItem>;
+ const METHOD: &'static str = "window/showMessageRequest";
+}
+
+/// The client/registerCapability request is sent from the server to the client to register for a new capability
+/// on the client side. Not all clients need to support dynamic capability registration. A client opts in via the
+/// ClientCapabilities.GenericCapability property.
+#[derive(Debug)]
+pub enum RegisterCapability {}
+
+impl Request for RegisterCapability {
+ type Params = RegistrationParams;
+ type Result = ();
+ const METHOD: &'static str = "client/registerCapability";
+}
+
+/// The client/unregisterCapability request is sent from the server to the client to unregister a
+/// previously register capability.
+#[derive(Debug)]
+pub enum UnregisterCapability {}
+
+impl Request for UnregisterCapability {
+ type Params = UnregistrationParams;
+ type Result = ();
+ const METHOD: &'static str = "client/unregisterCapability";
+}
+
+/// The Completion request is sent from the client to the server to compute completion items at a given cursor position.
+/// Completion items are presented in the IntelliSense user interface. If computing full completion items is expensive,
+/// servers can additionally provide a handler for the completion item resolve request ('completionItem/resolve').
+/// This request is sent when a completion item is selected in the user interface. A typical use case is for example:
+/// the 'textDocument/completion' request doesn’t fill in the documentation property for returned completion items
+/// since it is expensive to compute. When the item is selected in the user interface then a ‘completionItem/resolve’
+/// request is sent with the selected completion item as a param. The returned completion item should have the
+/// documentation property filled in. The request can delay the computation of the detail and documentation properties.
+/// However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText,
+/// and textEdit must be provided in the textDocument/completion request and must not be changed during resolve.
+#[derive(Debug)]
+pub enum Completion {}
+
+impl Request for Completion {
+ type Params = CompletionParams;
+ type Result = Option<CompletionResponse>;
+ const METHOD: &'static str = "textDocument/completion";
+}
+
+/// The request is sent from the client to the server to resolve additional information for a given completion item.
+#[derive(Debug)]
+pub enum ResolveCompletionItem {}
+
+impl Request for ResolveCompletionItem {
+ type Params = CompletionItem;
+ type Result = CompletionItem;
+ const METHOD: &'static str = "completionItem/resolve";
+}
+
+/// The hover request is sent from the client to the server to request hover information at a given text
+/// document position.
+#[derive(Debug)]
+pub enum HoverRequest {}
+
+impl Request for HoverRequest {
+ type Params = HoverParams;
+ type Result = Option<Hover>;
+ const METHOD: &'static str = "textDocument/hover";
+}
+
+/// The signature help request is sent from the client to the server to request signature information at
+/// a given cursor position.
+#[derive(Debug)]
+pub enum SignatureHelpRequest {}
+
+impl Request for SignatureHelpRequest {
+ type Params = SignatureHelpParams;
+ type Result = Option<SignatureHelp>;
+ const METHOD: &'static str = "textDocument/signatureHelp";
+}
+
+#[derive(Debug)]
+pub enum GotoDeclaration {}
+pub type GotoDeclarationParams = GotoDefinitionParams;
+pub type GotoDeclarationResponse = GotoDefinitionResponse;
+
+/// The goto declaration request is sent from the client to the server to resolve the declaration location of
+/// a symbol at a given text document position.
+impl Request for GotoDeclaration {
+ type Params = GotoDeclarationParams;
+ type Result = Option<GotoDeclarationResponse>;
+ const METHOD: &'static str = "textDocument/declaration";
+}
+
+/// The goto definition request is sent from the client to the server to resolve the definition location of
+/// a symbol at a given text document position.
+#[derive(Debug)]
+pub enum GotoDefinition {}
+
+impl Request for GotoDefinition {
+ type Params = GotoDefinitionParams;
+ type Result = Option<GotoDefinitionResponse>;
+ const METHOD: &'static str = "textDocument/definition";
+}
+
+/// The references request is sent from the client to the server to resolve project-wide references for the
+/// symbol denoted by the given text document position.
+#[derive(Debug)]
+pub enum References {}
+
+impl Request for References {
+ type Params = ReferenceParams;
+ type Result = Option<Vec<Location>>;
+ const METHOD: &'static str = "textDocument/references";
+}
+
+/// The goto type definition request is sent from the client to the
+/// server to resolve the type definition location of a symbol at a
+/// given text document position.
+#[derive(Debug)]
+pub enum GotoTypeDefinition {}
+
+pub type GotoTypeDefinitionParams = GotoDefinitionParams;
+pub type GotoTypeDefinitionResponse = GotoDefinitionResponse;
+
+impl Request for GotoTypeDefinition {
+ type Params = GotoTypeDefinitionParams;
+ type Result = Option<GotoTypeDefinitionResponse>;
+ const METHOD: &'static str = "textDocument/typeDefinition";
+}
+
+/// The goto implementation request is sent from the client to the
+/// server to resolve the implementation location of a symbol at a
+/// given text document position.
+#[derive(Debug)]
+pub enum GotoImplementation {}
+
+pub type GotoImplementationParams = GotoTypeDefinitionParams;
+pub type GotoImplementationResponse = GotoDefinitionResponse;
+
+impl Request for GotoImplementation {
+ type Params = GotoImplementationParams;
+ type Result = Option<GotoImplementationResponse>;
+ const METHOD: &'static str = "textDocument/implementation";
+}
+
+/// The document highlight request is sent from the client to the server to resolve a document highlights
+/// for a given text document position.
+/// For programming languages this usually highlights all references to the symbol scoped to this file.
+/// However we kept 'textDocument/documentHighlight' and 'textDocument/references' separate requests since
+/// the first one is allowed to be more fuzzy.
+/// Symbol matches usually have a DocumentHighlightKind of Read or Write whereas fuzzy or textual matches
+/// use Text as the kind.
+#[derive(Debug)]
+pub enum DocumentHighlightRequest {}
+
+impl Request for DocumentHighlightRequest {
+ type Params = DocumentHighlightParams;
+ type Result = Option<Vec<DocumentHighlight>>;
+ const METHOD: &'static str = "textDocument/documentHighlight";
+}
+
+/// The document symbol request is sent from the client to the server to list all symbols found in a given
+/// text document.
+#[derive(Debug)]
+pub enum DocumentSymbolRequest {}
+
+impl Request for DocumentSymbolRequest {
+ type Params = DocumentSymbolParams;
+ type Result = Option<DocumentSymbolResponse>;
+ const METHOD: &'static str = "textDocument/documentSymbol";
+}
+
+/// The workspace symbol request is sent from the client to the server to list project-wide symbols
+/// matching the query string.
+#[derive(Debug)]
+pub enum WorkspaceSymbolRequest {}
+
+impl Request for WorkspaceSymbolRequest {
+ type Params = WorkspaceSymbolParams;
+ type Result = Option<WorkspaceSymbolResponse>;
+ const METHOD: &'static str = "workspace/symbol";
+}
+
+/// The `workspaceSymbol/resolve` request is sent from the client to the server to resolve
+/// additional information for a given workspace symbol.
+#[derive(Debug)]
+pub enum WorkspaceSymbolResolve {}
+
+impl Request for WorkspaceSymbolResolve {
+ type Params = WorkspaceSymbol;
+ type Result = WorkspaceSymbol;
+ const METHOD: &'static str = "workspaceSymbol/resolve";
+}
+
+/// The workspace/executeCommand request is sent from the client to the server to trigger command execution on the server.
+/// In most cases the server creates a WorkspaceEdit structure and applies the changes to the workspace using the request
+/// workspace/applyEdit which is sent from the server to the client.
+#[derive(Debug)]
+pub enum ExecuteCommand {}
+
+impl Request for ExecuteCommand {
+ type Params = ExecuteCommandParams;
+ type Result = Option<Value>;
+ const METHOD: &'static str = "workspace/executeCommand";
+}
+
+/// The document will save request is sent from the client to the server before the document is
+/// actually saved. The request can return an array of TextEdits which will be applied to the text
+/// document before it is saved. Please note that clients might drop results if computing the text
+/// edits took too long or if a server constantly fails on this request. This is done to keep the
+/// save fast and reliable.
+#[derive(Debug)]
+pub enum WillSaveWaitUntil {}
+
+impl Request for WillSaveWaitUntil {
+ type Params = WillSaveTextDocumentParams;
+ type Result = Option<Vec<TextEdit>>;
+ const METHOD: &'static str = "textDocument/willSaveWaitUntil";
+}
+
+/// The workspace/applyEdit request is sent from the server to the client to modify resource on the
+/// client side.
+#[derive(Debug)]
+pub enum ApplyWorkspaceEdit {}
+
+impl Request for ApplyWorkspaceEdit {
+ type Params = ApplyWorkspaceEditParams;
+ type Result = ApplyWorkspaceEditResponse;
+ const METHOD: &'static str = "workspace/applyEdit";
+}
+
+/// The workspace/configuration request is sent from the server to the client to fetch configuration settings
+/// from the client. The request can fetch several configuration settings in one roundtrip.
+/// The order of the returned configuration settings correspond to the order of the passed ConfigurationItems
+/// (e.g. the first item in the response is the result for the first configuration item in the params).
+///
+/// A ConfigurationItem consists of the configuration section to ask for and an additional scope URI.
+/// The configuration section ask for is defined by the server and doesn’t necessarily need to correspond to
+/// the configuration store used be the client. So a server might ask for a configuration cpp.formatterOptions
+/// but the client stores the configuration in a XML store layout differently.
+/// It is up to the client to do the necessary conversion. If a scope URI is provided the client should return
+/// the setting scoped to the provided resource. If the client for example uses EditorConfig to manage its
+/// settings the configuration should be returned for the passed resource URI. If the client can’t provide a
+/// configuration setting for a given scope then null need to be present in the returned array.
+#[derive(Debug)]
+pub enum WorkspaceConfiguration {}
+
+impl Request for WorkspaceConfiguration {
+ type Params = ConfigurationParams;
+ type Result = Vec<Value>;
+ const METHOD: &'static str = "workspace/configuration";
+}
+
+/// The code action request is sent from the client to the server to compute commands for a given text document
+/// and range. The request is triggered when the user moves the cursor into a problem marker in the editor or
+/// presses the lightbulb associated with a marker.
+#[derive(Debug)]
+pub enum CodeActionRequest {}
+
+impl Request for CodeActionRequest {
+ type Params = CodeActionParams;
+ type Result = Option<CodeActionResponse>;
+ const METHOD: &'static str = "textDocument/codeAction";
+}
+
+/// The request is sent from the client to the server to resolve additional information for a given code action.
+/// This is usually used to compute the `edit` property of a code action to avoid its unnecessary computation
+/// during the `textDocument/codeAction` request.
+///
+/// since 3.16.0
+#[derive(Debug)]
+pub enum CodeActionResolveRequest {}
+
+impl Request for CodeActionResolveRequest {
+ type Params = CodeAction;
+ type Result = CodeAction;
+ const METHOD: &'static str = "codeAction/resolve";
+}
+
+/// The code lens request is sent from the client to the server to compute code lenses for a given text document.
+#[derive(Debug)]
+pub enum CodeLensRequest {}
+
+impl Request for CodeLensRequest {
+ type Params = CodeLensParams;
+ type Result = Option<Vec<CodeLens>>;
+ const METHOD: &'static str = "textDocument/codeLens";
+}
+
+/// The code lens resolve request is sent from the client to the server to resolve the command for a
+/// given code lens item.
+#[derive(Debug)]
+pub enum CodeLensResolve {}
+
+impl Request for CodeLensResolve {
+ type Params = CodeLens;
+ type Result = CodeLens;
+ const METHOD: &'static str = "codeLens/resolve";
+}
+
+/// The document links request is sent from the client to the server to request the location of links in a document.
+#[derive(Debug)]
+pub enum DocumentLinkRequest {}
+
+impl Request for DocumentLinkRequest {
+ type Params = DocumentLinkParams;
+ type Result = Option<Vec<DocumentLink>>;
+ const METHOD: &'static str = "textDocument/documentLink";
+}
+
+/// The document link resolve request is sent from the client to the server to resolve the target of
+/// a given document link.
+#[derive(Debug)]
+pub enum DocumentLinkResolve {}
+
+impl Request for DocumentLinkResolve {
+ type Params = DocumentLink;
+ type Result = DocumentLink;
+ const METHOD: &'static str = "documentLink/resolve";
+}
+
+/// The document formatting request is sent from the server to the client to format a whole document.
+#[derive(Debug)]
+pub enum Formatting {}
+
+impl Request for Formatting {
+ type Params = DocumentFormattingParams;
+ type Result = Option<Vec<TextEdit>>;
+ const METHOD: &'static str = "textDocument/formatting";
+}
+
+/// The document range formatting request is sent from the client to the server to format a given range in a document.
+#[derive(Debug)]
+pub enum RangeFormatting {}
+
+impl Request for RangeFormatting {
+ type Params = DocumentRangeFormattingParams;
+ type Result = Option<Vec<TextEdit>>;
+ const METHOD: &'static str = "textDocument/rangeFormatting";
+}
+
+/// The document on type formatting request is sent from the client to the server to format parts of
+/// the document during typing.
+#[derive(Debug)]
+pub enum OnTypeFormatting {}
+
+impl Request for OnTypeFormatting {
+ type Params = DocumentOnTypeFormattingParams;
+ type Result = Option<Vec<TextEdit>>;
+ const METHOD: &'static str = "textDocument/onTypeFormatting";
+}
+
+/// The linked editing request is sent from the client to the server to return for a given position in a document
+/// the range of the symbol at the position and all ranges that have the same content.
+/// Optionally a word pattern can be returned to describe valid contents. A rename to one of the ranges can be applied
+/// to all other ranges if the new content is valid. If no result-specific word pattern is provided, the word pattern from
+/// the client’s language configuration is used.
+#[derive(Debug)]
+pub enum LinkedEditingRange {}
+
+impl Request for LinkedEditingRange {
+ type Params = LinkedEditingRangeParams;
+ type Result = Option<LinkedEditingRanges>;
+ const METHOD: &'static str = "textDocument/linkedEditingRange";
+}
+
+/// The rename request is sent from the client to the server to perform a workspace-wide rename of a symbol.
+#[derive(Debug)]
+pub enum Rename {}
+
+impl Request for Rename {
+ type Params = RenameParams;
+ type Result = Option<WorkspaceEdit>;
+ const METHOD: &'static str = "textDocument/rename";
+}
+
+/// The document color request is sent from the client to the server to list all color references found in a given text document.
+/// Along with the range, a color value in RGB is returned.
+#[derive(Debug)]
+pub enum DocumentColor {}
+
+impl Request for DocumentColor {
+ type Params = DocumentColorParams;
+ type Result = Vec<ColorInformation>;
+ const METHOD: &'static str = "textDocument/documentColor";
+}
+
+/// The color presentation request is sent from the client to the server to obtain a list of presentations for a color value
+/// at a given location.
+#[derive(Debug)]
+pub enum ColorPresentationRequest {}
+
+impl Request for ColorPresentationRequest {
+ type Params = ColorPresentationParams;
+ type Result = Vec<ColorPresentation>;
+ const METHOD: &'static str = "textDocument/colorPresentation";
+}
+
+/// The folding range request is sent from the client to the server to return all folding ranges found in a given text document.
+#[derive(Debug)]
+pub enum FoldingRangeRequest {}
+
+impl Request for FoldingRangeRequest {
+ type Params = FoldingRangeParams;
+ type Result = Option<Vec<FoldingRange>>;
+ const METHOD: &'static str = "textDocument/foldingRange";
+}
+
+/// The prepare rename request is sent from the client to the server to setup and test the validity of a rename operation
+/// at a given location.
+#[derive(Debug)]
+pub enum PrepareRenameRequest {}
+
+impl Request for PrepareRenameRequest {
+ type Params = TextDocumentPositionParams;
+ type Result = Option<PrepareRenameResponse>;
+ const METHOD: &'static str = "textDocument/prepareRename";
+}
+
+/// The workspace/workspaceFolders request is sent from the server to the client to fetch the current open list of
+/// workspace folders. Returns null in the response if only a single file is open in the tool.
+/// Returns an empty array if a workspace is open but no folders are configured.
+#[derive(Debug)]
+pub enum WorkspaceFoldersRequest {}
+
+impl Request for WorkspaceFoldersRequest {
+ type Params = ();
+ type Result = Option<Vec<WorkspaceFolder>>;
+ const METHOD: &'static str = "workspace/workspaceFolders";
+}
+
+/// The `window/workDoneProgress/create` request is sent from the server
+/// to the clientto ask the client to create a work done progress.
+#[derive(Debug)]
+pub enum WorkDoneProgressCreate {}
+
+impl Request for WorkDoneProgressCreate {
+ type Params = WorkDoneProgressCreateParams;
+ type Result = ();
+ const METHOD: &'static str = "window/workDoneProgress/create";
+}
+
+/// The selection range request is sent from the client to the server to return
+/// suggested selection ranges at given positions. A selection range is a range
+/// around the cursor position which the user might be interested in selecting.
+///
+/// A selection range in the return array is for the position in the provided parameters at the same index.
+/// Therefore `positions[i]` must be contained in `result[i].range`.
+///
+/// Typically, but not necessary, selection ranges correspond to the nodes of the
+/// syntax tree.
+pub enum SelectionRangeRequest {}
+
+impl Request for SelectionRangeRequest {
+ type Params = SelectionRangeParams;
+ type Result = Option<Vec<SelectionRange>>;
+ const METHOD: &'static str = "textDocument/selectionRange";
+}
+
+pub enum CallHierarchyPrepare {}
+
+impl Request for CallHierarchyPrepare {
+ type Params = CallHierarchyPrepareParams;
+ type Result = Option<Vec<CallHierarchyItem>>;
+ const METHOD: &'static str = "textDocument/prepareCallHierarchy";
+}
+
+pub enum CallHierarchyIncomingCalls {}
+
+impl Request for CallHierarchyIncomingCalls {
+ type Params = CallHierarchyIncomingCallsParams;
+ type Result = Option<Vec<CallHierarchyIncomingCall>>;
+ const METHOD: &'static str = "callHierarchy/incomingCalls";
+}
+
+pub enum CallHierarchyOutgoingCalls {}
+
+impl Request for CallHierarchyOutgoingCalls {
+ type Params = CallHierarchyOutgoingCallsParams;
+ type Result = Option<Vec<CallHierarchyOutgoingCall>>;
+ const METHOD: &'static str = "callHierarchy/outgoingCalls";
+}
+
+pub enum SemanticTokensFullRequest {}
+
+impl Request for SemanticTokensFullRequest {
+ type Params = SemanticTokensParams;
+ type Result = Option<SemanticTokensResult>;
+ const METHOD: &'static str = "textDocument/semanticTokens/full";
+}
+
+pub enum SemanticTokensFullDeltaRequest {}
+
+impl Request for SemanticTokensFullDeltaRequest {
+ type Params = SemanticTokensDeltaParams;
+ type Result = Option<SemanticTokensFullDeltaResult>;
+ const METHOD: &'static str = "textDocument/semanticTokens/full/delta";
+}
+
+pub enum SemanticTokensRangeRequest {}
+
+impl Request for SemanticTokensRangeRequest {
+ type Params = SemanticTokensRangeParams;
+ type Result = Option<SemanticTokensRangeResult>;
+ const METHOD: &'static str = "textDocument/semanticTokens/range";
+}
+
+/// The `workspace/semanticTokens/refresh` request is sent from the server to the client.
+/// Servers can use it to ask clients to refresh the editors for which this server provides semantic tokens.
+/// As a result the client should ask the server to recompute the semantic tokens for these editors.
+/// This is useful if a server detects a project wide configuration change which requires a re-calculation of all semantic tokens.
+/// Note that the client still has the freedom to delay the re-calculation of the semantic tokens if for example an editor is currently not visible.
+pub enum SemanticTokensRefresh {}
+
+impl Request for SemanticTokensRefresh {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "workspace/semanticTokens/refresh";
+}
+
+/// The workspace/codeLens/refresh request is sent from the server to the client.
+/// Servers can use it to ask clients to refresh the code lenses currently shown in editors.
+/// As a result the client should ask the server to recompute the code lenses for these editors.
+/// This is useful if a server detects a configuration change which requires a re-calculation of all code lenses.
+/// Note that the client still has the freedom to delay the re-calculation of the code lenses if for example an editor is currently not visible.
+pub enum CodeLensRefresh {}
+
+impl Request for CodeLensRefresh {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "workspace/codeLens/refresh";
+}
+
+/// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep creates fast and reliable.
+pub enum WillCreateFiles {}
+
+impl Request for WillCreateFiles {
+ type Params = CreateFilesParams;
+ type Result = Option<WorkspaceEdit>;
+ const METHOD: &'static str = "workspace/willCreateFiles";
+}
+
+/// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are renamed. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep renames fast and reliable.
+pub enum WillRenameFiles {}
+
+impl Request for WillRenameFiles {
+ type Params = RenameFilesParams;
+ type Result = Option<WorkspaceEdit>;
+ const METHOD: &'static str = "workspace/willRenameFiles";
+}
+
+/// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are deleted. Please note that clients might drop results if computing the edit took too long or if a server constantly fails on this request. This is done to keep deletes fast and reliable.
+pub enum WillDeleteFiles {}
+
+impl Request for WillDeleteFiles {
+ type Params = DeleteFilesParams;
+ type Result = Option<WorkspaceEdit>;
+ const METHOD: &'static str = "workspace/willDeleteFiles";
+}
+
+/// The show document request is sent from a server to a client to ask the client to display a particular document in the user interface.
+pub enum ShowDocument {}
+
+impl Request for ShowDocument {
+ type Params = ShowDocumentParams;
+ type Result = ShowDocumentResult;
+ const METHOD: &'static str = "window/showDocument";
+}
+
+pub enum MonikerRequest {}
+
+impl Request for MonikerRequest {
+ type Params = MonikerParams;
+ type Result = Option<Vec<Moniker>>;
+ const METHOD: &'static str = "textDocument/moniker";
+}
+
+/// The inlay hints request is sent from the client to the server to compute inlay hints for a given
+/// [text document, range] tuple that may be rendered in the editor in place with other text.
+pub enum InlayHintRequest {}
+
+impl Request for InlayHintRequest {
+ type Params = InlayHintParams;
+ type Result = Option<Vec<InlayHint>>;
+ const METHOD: &'static str = "textDocument/inlayHint";
+}
+
+/// The `inlayHint/resolve` request is sent from the client to the server to resolve additional
+/// information for a given inlay hint. This is usually used to compute the tooltip, location or
+/// command properties of a inlay hint’s label part to avoid its unnecessary computation during the
+/// `textDocument/inlayHint` request.
+pub enum InlayHintResolveRequest {}
+
+impl Request for InlayHintResolveRequest {
+ type Params = InlayHint;
+ type Result = InlayHint;
+ const METHOD: &'static str = "inlayHint/resolve";
+}
+
+/// The `workspace/inlayHint/refresh` request is sent from the server to the client. Servers can use
+/// it to ask clients to refresh the inlay hints currently shown in editors. As a result the client
+/// should ask the server to recompute the inlay hints for these editors. This is useful if a server
+/// detects a configuration change which requires a re-calculation of all inlay hints. Note that the
+/// client still has the freedom to delay the re-calculation of the inlay hints if for example an
+/// editor is currently not visible.
+pub enum InlayHintRefreshRequest {}
+
+impl Request for InlayHintRefreshRequest {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "workspace/inlayHint/refresh";
+}
+
+/// The inline value request is sent from the client to the server to compute inline values for a
+/// given text document that may be rendered in the editor at the end of lines.
+pub enum InlineValueRequest {}
+
+impl Request for InlineValueRequest {
+ type Params = InlineValueParams;
+ type Result = Option<InlineValue>;
+ const METHOD: &'static str = "textDocument/inlineValue";
+}
+
+/// The `workspace/inlineValue/refresh` request is sent from the server to the client. Servers can
+/// use it to ask clients to refresh the inline values currently shown in editors. As a result the
+/// client should ask the server to recompute the inline values for these editors. This is useful if
+/// a server detects a configuration change which requires a re-calculation of all inline values.
+/// Note that the client still has the freedom to delay the re-calculation of the inline values if
+/// for example an editor is currently not visible.
+pub enum InlineValueRefreshRequest {}
+
+impl Request for InlineValueRefreshRequest {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "workspace/inlineValue/refresh";
+}
+
+/// The type hierarchy request is sent from the client to the server to return a type hierarchy for
+/// the language element of given text document positions. Will return null if the server couldn’t
+/// infer a valid type from the position. The type hierarchy requests are executed in two steps:
+///
+/// 1. first a type hierarchy item is prepared for the given text document position.
+/// 2. for a type hierarchy item the supertype or subtype type hierarchy items are resolved.
+pub enum TypeHierarchyPrepare {}
+
+impl Request for TypeHierarchyPrepare {
+ type Params = TypeHierarchyPrepareParams;
+ type Result = Option<Vec<TypeHierarchyItem>>;
+ const METHOD: &'static str = "textDocument/prepareTypeHierarchy";
+}
+
+/// The `typeHierarchy/supertypes` request is sent from the client to the server to resolve the
+/// supertypes for a given type hierarchy item. Will return null if the server couldn’t infer a
+/// valid type from item in the params. The request doesn’t define its own client and server
+/// capabilities. It is only issued if a server registers for the
+/// `textDocument/prepareTypeHierarchy` request.
+pub enum TypeHierarchySupertypes {}
+
+impl Request for TypeHierarchySupertypes {
+ type Params = TypeHierarchySupertypesParams;
+ type Result = Option<Vec<TypeHierarchyItem>>;
+ const METHOD: &'static str = "typeHierarchy/supertypes";
+}
+
+/// The `typeHierarchy/subtypes` request is sent from the client to the server to resolve the
+/// subtypes for a given type hierarchy item. Will return null if the server couldn’t infer a valid
+/// type from item in the params. The request doesn’t define its own client and server capabilities.
+/// It is only issued if a server registers for the textDocument/prepareTypeHierarchy request.
+pub enum TypeHierarchySubtypes {}
+
+impl Request for TypeHierarchySubtypes {
+ type Params = TypeHierarchySubtypesParams;
+ type Result = Option<Vec<TypeHierarchyItem>>;
+ const METHOD: &'static str = "typeHierarchy/subtypes";
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ fn fake_call<R>()
+ where
+ R: Request,
+ R::Params: serde::Serialize,
+ R::Result: serde::de::DeserializeOwned,
+ {
+ }
+
+ macro_rules! check_macro {
+ ($name:tt) => {
+ // check whethe the macro name matches the method
+ assert_eq!(<lsp_request!($name) as Request>::METHOD, $name);
+ // test whether type checking passes for each component
+ fake_call::<lsp_request!($name)>();
+ };
+ }
+
+ #[test]
+ fn check_macro_definitions() {
+ check_macro!("initialize");
+ check_macro!("shutdown");
+
+ check_macro!("window/showDocument");
+ check_macro!("window/showMessageRequest");
+ check_macro!("window/workDoneProgress/create");
+
+ check_macro!("client/registerCapability");
+ check_macro!("client/unregisterCapability");
+
+ check_macro!("textDocument/willSaveWaitUntil");
+ check_macro!("textDocument/completion");
+ check_macro!("textDocument/hover");
+ check_macro!("textDocument/signatureHelp");
+ check_macro!("textDocument/declaration");
+ check_macro!("textDocument/definition");
+ check_macro!("textDocument/references");
+ check_macro!("textDocument/documentHighlight");
+ check_macro!("textDocument/documentSymbol");
+ check_macro!("textDocument/codeAction");
+ check_macro!("textDocument/codeLens");
+ check_macro!("textDocument/documentLink");
+ check_macro!("textDocument/rangeFormatting");
+ check_macro!("textDocument/onTypeFormatting");
+ check_macro!("textDocument/formatting");
+ check_macro!("textDocument/rename");
+ check_macro!("textDocument/documentColor");
+ check_macro!("textDocument/colorPresentation");
+ check_macro!("textDocument/foldingRange");
+ check_macro!("textDocument/prepareRename");
+ check_macro!("textDocument/implementation");
+ check_macro!("textDocument/selectionRange");
+ check_macro!("textDocument/typeDefinition");
+ check_macro!("textDocument/moniker");
+ check_macro!("textDocument/linkedEditingRange");
+ check_macro!("textDocument/prepareCallHierarchy");
+ check_macro!("textDocument/prepareTypeHierarchy");
+ check_macro!("textDocument/semanticTokens/full");
+ check_macro!("textDocument/semanticTokens/full/delta");
+ check_macro!("textDocument/semanticTokens/range");
+ check_macro!("textDocument/inlayHint");
+ check_macro!("textDocument/inlineValue");
+
+ check_macro!("workspace/applyEdit");
+ check_macro!("workspace/symbol");
+ check_macro!("workspace/executeCommand");
+ check_macro!("workspace/configuration");
+ check_macro!("workspace/willCreateFiles");
+ check_macro!("workspace/willRenameFiles");
+ check_macro!("workspace/willDeleteFiles");
+ check_macro!("workspace/workspaceFolders");
+ check_macro!("workspace/semanticTokens/refresh");
+ check_macro!("workspace/codeLens/refresh");
+ check_macro!("workspace/inlayHint/refresh");
+ check_macro!("workspace/inlineValue/refresh");
+
+ check_macro!("callHierarchy/incomingCalls");
+ check_macro!("callHierarchy/outgoingCalls");
+ check_macro!("codeAction/resolve");
+ check_macro!("codeLens/resolve");
+ check_macro!("completionItem/resolve");
+ check_macro!("documentLink/resolve");
+ check_macro!("inlayHint/resolve");
+ check_macro!("typeHierarchy/subtypes");
+ check_macro!("typeHierarchy/supertypes");
+ }
+
+ #[test]
+ #[cfg(feature = "proposed")]
+ fn check_proposed_macro_definitions() {}
+}
diff --git a/vendor/lsp-types/src/selection_range.rs b/vendor/lsp-types/src/selection_range.rs
index 048df6f99..9ad110678 100644
--- a/vendor/lsp-types/src/selection_range.rs
+++ b/vendor/lsp-types/src/selection_range.rs
@@ -1,86 +1,86 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- PartialResultParams, Position, Range, StaticTextDocumentRegistrationOptions,
- TextDocumentIdentifier, WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SelectionRangeClientCapabilities {
- /// Whether implementation supports dynamic registration for selection range
- /// providers. If this is set to `true` the client supports the new
- /// `SelectionRangeRegistrationOptions` return value for the corresponding
- /// server capability as well.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct SelectionRangeOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct SelectionRangeRegistrationOptions {
- #[serde(flatten)]
- pub selection_range_options: SelectionRangeOptions,
-
- #[serde(flatten)]
- pub registration_options: StaticTextDocumentRegistrationOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum SelectionRangeProviderCapability {
- Simple(bool),
- Options(SelectionRangeOptions),
- RegistrationOptions(SelectionRangeRegistrationOptions),
-}
-
-impl From<SelectionRangeRegistrationOptions> for SelectionRangeProviderCapability {
- fn from(from: SelectionRangeRegistrationOptions) -> Self {
- Self::RegistrationOptions(from)
- }
-}
-
-impl From<SelectionRangeOptions> for SelectionRangeProviderCapability {
- fn from(from: SelectionRangeOptions) -> Self {
- Self::Options(from)
- }
-}
-
-impl From<bool> for SelectionRangeProviderCapability {
- fn from(from: bool) -> Self {
- Self::Simple(from)
- }
-}
-
-/// A parameter literal used in selection range requests.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SelectionRangeParams {
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The positions inside the text document.
- pub positions: Vec<Position>,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-}
-
-/// Represents a selection range.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SelectionRange {
- /// Range of the selection.
- pub range: Range,
-
- /// The parent selection range containing this range.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub parent: Option<Box<SelectionRange>>,
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ PartialResultParams, Position, Range, StaticTextDocumentRegistrationOptions,
+ TextDocumentIdentifier, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SelectionRangeClientCapabilities {
+ /// Whether implementation supports dynamic registration for selection range
+ /// providers. If this is set to `true` the client supports the new
+ /// `SelectionRangeRegistrationOptions` return value for the corresponding
+ /// server capability as well.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct SelectionRangeOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct SelectionRangeRegistrationOptions {
+ #[serde(flatten)]
+ pub selection_range_options: SelectionRangeOptions,
+
+ #[serde(flatten)]
+ pub registration_options: StaticTextDocumentRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum SelectionRangeProviderCapability {
+ Simple(bool),
+ Options(SelectionRangeOptions),
+ RegistrationOptions(SelectionRangeRegistrationOptions),
+}
+
+impl From<SelectionRangeRegistrationOptions> for SelectionRangeProviderCapability {
+ fn from(from: SelectionRangeRegistrationOptions) -> Self {
+ Self::RegistrationOptions(from)
+ }
+}
+
+impl From<SelectionRangeOptions> for SelectionRangeProviderCapability {
+ fn from(from: SelectionRangeOptions) -> Self {
+ Self::Options(from)
+ }
+}
+
+impl From<bool> for SelectionRangeProviderCapability {
+ fn from(from: bool) -> Self {
+ Self::Simple(from)
+ }
+}
+
+/// A parameter literal used in selection range requests.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SelectionRangeParams {
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The positions inside the text document.
+ pub positions: Vec<Position>,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+/// Represents a selection range.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SelectionRange {
+ /// Range of the selection.
+ pub range: Range,
+
+ /// The parent selection range containing this range.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub parent: Option<Box<SelectionRange>>,
+}
diff --git a/vendor/lsp-types/src/semantic_tokens.rs b/vendor/lsp-types/src/semantic_tokens.rs
index f1b6d53d2..0b881850c 100644
--- a/vendor/lsp-types/src/semantic_tokens.rs
+++ b/vendor/lsp-types/src/semantic_tokens.rs
@@ -1,739 +1,733 @@
-use std::borrow::Cow;
-
-use serde::ser::SerializeSeq;
-use serde::{Deserialize, Serialize};
-
-use crate::{
- PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
- TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-/// A set of predefined token types. This set is not fixed
-/// and clients can specify additional token types via the
-/// corresponding client capabilities.
-/// since @3.16.0
-#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
-pub struct SemanticTokenType(Cow<'static, str>);
-
-impl SemanticTokenType {
- pub const NAMESPACE: SemanticTokenType = SemanticTokenType::new("namespace");
- pub const TYPE: SemanticTokenType = SemanticTokenType::new("type");
- pub const CLASS: SemanticTokenType = SemanticTokenType::new("class");
- pub const ENUM: SemanticTokenType = SemanticTokenType::new("enum");
- pub const INTERFACE: SemanticTokenType = SemanticTokenType::new("interface");
- pub const STRUCT: SemanticTokenType = SemanticTokenType::new("struct");
- pub const TYPE_PARAMETER: SemanticTokenType = SemanticTokenType::new("typeParameter");
- pub const PARAMETER: SemanticTokenType = SemanticTokenType::new("parameter");
- pub const VARIABLE: SemanticTokenType = SemanticTokenType::new("variable");
- pub const PROPERTY: SemanticTokenType = SemanticTokenType::new("property");
- pub const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember");
- pub const EVENT: SemanticTokenType = SemanticTokenType::new("event");
- pub const FUNCTION: SemanticTokenType = SemanticTokenType::new("function");
- pub const METHOD: SemanticTokenType = SemanticTokenType::new("method");
- pub const MACRO: SemanticTokenType = SemanticTokenType::new("macro");
- pub const KEYWORD: SemanticTokenType = SemanticTokenType::new("keyword");
- pub const MODIFIER: SemanticTokenType = SemanticTokenType::new("modifier");
- pub const COMMENT: SemanticTokenType = SemanticTokenType::new("comment");
- pub const STRING: SemanticTokenType = SemanticTokenType::new("string");
- pub const NUMBER: SemanticTokenType = SemanticTokenType::new("number");
- pub const REGEXP: SemanticTokenType = SemanticTokenType::new("regexp");
- pub const OPERATOR: SemanticTokenType = SemanticTokenType::new("operator");
-
- /// since @3.17.0
- #[cfg(feature = "proposed")]
- pub const DECORATOR: SemanticTokenType = SemanticTokenType::new("decorator");
-
- pub const fn new(tag: &'static str) -> Self {
- SemanticTokenType(Cow::Borrowed(tag))
- }
-
- pub fn as_str(&self) -> &str {
- &self.0
- }
-}
-
-impl From<String> for SemanticTokenType {
- fn from(from: String) -> Self {
- SemanticTokenType(Cow::from(from))
- }
-}
-
-impl From<&'static str> for SemanticTokenType {
- fn from(from: &'static str) -> Self {
- SemanticTokenType::new(from)
- }
-}
-
-/// A set of predefined token modifiers. This set is not fixed
-/// and clients can specify additional token types via the
-/// corresponding client capabilities.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
-pub struct SemanticTokenModifier(Cow<'static, str>);
-
-impl SemanticTokenModifier {
- pub const DECLARATION: SemanticTokenModifier = SemanticTokenModifier::new("declaration");
- pub const DEFINITION: SemanticTokenModifier = SemanticTokenModifier::new("definition");
- pub const READONLY: SemanticTokenModifier = SemanticTokenModifier::new("readonly");
- pub const STATIC: SemanticTokenModifier = SemanticTokenModifier::new("static");
- pub const DEPRECATED: SemanticTokenModifier = SemanticTokenModifier::new("deprecated");
- pub const ABSTRACT: SemanticTokenModifier = SemanticTokenModifier::new("abstract");
- pub const ASYNC: SemanticTokenModifier = SemanticTokenModifier::new("async");
- pub const MODIFICATION: SemanticTokenModifier = SemanticTokenModifier::new("modification");
- pub const DOCUMENTATION: SemanticTokenModifier = SemanticTokenModifier::new("documentation");
- pub const DEFAULT_LIBRARY: SemanticTokenModifier = SemanticTokenModifier::new("defaultLibrary");
-
- pub const fn new(tag: &'static str) -> Self {
- SemanticTokenModifier(Cow::Borrowed(tag))
- }
-
- pub fn as_str(&self) -> &str {
- &self.0
- }
-}
-
-impl From<String> for SemanticTokenModifier {
- fn from(from: String) -> Self {
- SemanticTokenModifier(Cow::from(from))
- }
-}
-
-impl From<&'static str> for SemanticTokenModifier {
- fn from(from: &'static str) -> Self {
- SemanticTokenModifier::new(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
-pub struct TokenFormat(Cow<'static, str>);
-
-impl TokenFormat {
- pub const RELATIVE: TokenFormat = TokenFormat::new("relative");
-
- pub const fn new(tag: &'static str) -> Self {
- TokenFormat(Cow::Borrowed(tag))
- }
-
- pub fn as_str(&self) -> &str {
- &self.0
- }
-}
-
-impl From<String> for TokenFormat {
- fn from(from: String) -> Self {
- TokenFormat(Cow::from(from))
- }
-}
-
-impl From<&'static str> for TokenFormat {
- fn from(from: &'static str) -> Self {
- TokenFormat::new(from)
- }
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensLegend {
- /// The token types a server uses.
- pub token_types: Vec<SemanticTokenType>,
-
- /// The token modifiers a server uses.
- pub token_modifiers: Vec<SemanticTokenModifier>,
-}
-
-/// The actual tokens. For a detailed description about how the data is
-/// structured please see
-/// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
-#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
-pub struct SemanticToken {
- pub delta_line: u32,
- pub delta_start: u32,
- pub length: u32,
- pub token_type: u32,
- pub token_modifiers_bitset: u32,
-}
-
-impl SemanticToken {
- fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<SemanticToken>, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- let data = Vec::<u32>::deserialize(deserializer)?;
- let chunks = data.chunks_exact(5);
-
- if !chunks.remainder().is_empty() {
- return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
- }
-
- Result::Ok(
- chunks
- .map(|chunk| SemanticToken {
- delta_line: chunk[0],
- delta_start: chunk[1],
- length: chunk[2],
- token_type: chunk[3],
- token_modifiers_bitset: chunk[4],
- })
- .collect(),
- )
- }
-
- fn serialize_tokens<S>(tokens: &[SemanticToken], serializer: S) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
- for token in tokens.iter() {
- seq.serialize_element(&token.delta_line)?;
- seq.serialize_element(&token.delta_start)?;
- seq.serialize_element(&token.length)?;
- seq.serialize_element(&token.token_type)?;
- seq.serialize_element(&token.token_modifiers_bitset)?;
- }
- seq.end()
- }
-
- fn deserialize_tokens_opt<'de, D>(
- deserializer: D,
- ) -> Result<Option<Vec<SemanticToken>>, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- #[derive(Deserialize)]
- #[serde(transparent)]
- struct Wrapper {
- #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
- tokens: Vec<SemanticToken>,
- }
-
- Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
- }
-
- fn serialize_tokens_opt<S>(
- data: &Option<Vec<SemanticToken>>,
- serializer: S,
- ) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- #[derive(Serialize)]
- #[serde(transparent)]
- struct Wrapper {
- #[serde(serialize_with = "SemanticToken::serialize_tokens")]
- tokens: Vec<SemanticToken>,
- }
-
- let opt = data.as_ref().map(|t| Wrapper { tokens: t.to_vec() });
-
- opt.serialize(serializer)
- }
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokens {
- /// An optional result id. If provided and clients support delta updating
- /// the client will include the result id in the next semantic token request.
- /// A server can then instead of computing all semantic tokens again simply
- /// send a delta.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub result_id: Option<String>,
-
- /// The actual tokens. For a detailed description about how the data is
- /// structured please see
- /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
- #[serde(
- deserialize_with = "SemanticToken::deserialize_tokens",
- serialize_with = "SemanticToken::serialize_tokens"
- )]
- pub data: Vec<SemanticToken>,
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensPartialResult {
- #[serde(
- deserialize_with = "SemanticToken::deserialize_tokens",
- serialize_with = "SemanticToken::serialize_tokens"
- )]
- pub data: Vec<SemanticToken>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum SemanticTokensResult {
- Tokens(SemanticTokens),
- Partial(SemanticTokensPartialResult),
-}
-
-impl From<SemanticTokens> for SemanticTokensResult {
- fn from(from: SemanticTokens) -> Self {
- SemanticTokensResult::Tokens(from)
- }
-}
-
-impl From<SemanticTokensPartialResult> for SemanticTokensResult {
- fn from(from: SemanticTokensPartialResult) -> Self {
- SemanticTokensResult::Partial(from)
- }
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensEdit {
- pub start: u32,
- pub delete_count: u32,
-
- #[serde(
- default,
- skip_serializing_if = "Option::is_none",
- deserialize_with = "SemanticToken::deserialize_tokens_opt",
- serialize_with = "SemanticToken::serialize_tokens_opt"
- )]
- pub data: Option<Vec<SemanticToken>>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum SemanticTokensFullDeltaResult {
- Tokens(SemanticTokens),
- TokensDelta(SemanticTokensDelta),
- PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
-}
-
-impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
- fn from(from: SemanticTokens) -> Self {
- SemanticTokensFullDeltaResult::Tokens(from)
- }
-}
-
-impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
- fn from(from: SemanticTokensDelta) -> Self {
- SemanticTokensFullDeltaResult::TokensDelta(from)
- }
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensDelta {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub result_id: Option<String>,
- /// For a detailed description how these edits are structured please see
- /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L131>
- pub edits: Vec<SemanticTokensEdit>,
-}
-
-/// Capabilities specific to the `textDocument/semanticTokens/*` requests.
-///
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensClientCapabilities {
- /// Whether implementation supports dynamic registration. If this is set to `true`
- /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- /// return value for the corresponding server capability as well.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Which requests the client supports and might send to the server
- /// depending on the server's capability. Please note that clients might not
- /// show semantic tokens or degrade some of the user experience if a range
- /// or full request is advertised by the client but not provided by the
- /// server. If for example the client capability `requests.full` and
- /// `request.range` are both set to true but the server only provides a
- /// range provider the client might not render a minimap correctly or might
- /// even decide to not show any semantic tokens at all.
- pub requests: SemanticTokensClientCapabilitiesRequests,
-
- /// The token types that the client supports.
- pub token_types: Vec<SemanticTokenType>,
-
- /// The token modifiers that the client supports.
- pub token_modifiers: Vec<SemanticTokenModifier>,
-
- /// The token formats the clients supports.
- pub formats: Vec<TokenFormat>,
-
- /// Whether the client supports tokens that can overlap each other.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub overlapping_token_support: Option<bool>,
-
- /// Whether the client supports tokens that can span multiple lines.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub multiline_token_support: Option<bool>,
-
- /// Whether the client allows the server to actively cancel a
- /// semantic token request, e.g. supports returning
- /// ErrorCodes.ServerCancelled. If a server does the client
- /// needs to retrigger the request.
- ///
- /// since @3.17.0
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub server_cancel_support: Option<bool>,
-
-
- /// Whether the client uses semantic tokens to augment existing
- /// syntax tokens. If set to `true` client side created syntax
- /// tokens and semantic tokens are both used for colorization. If
- /// set to `false` the client only uses the returned semantic tokens
- /// for colorization.
- ///
- /// If the value is `undefined` then the client behavior is not
- /// specified.
- ///
- /// @since 3.17.0
- #[cfg(feature = "proposed")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub augments_syntax_tokens: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensClientCapabilitiesRequests {
- /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range: Option<bool>,
-
- /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub full: Option<SemanticTokensFullOptions>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum SemanticTokensFullOptions {
- Bool(bool),
- Delta {
- /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler.
- /// The server supports deltas for full documents.
- #[serde(skip_serializing_if = "Option::is_none")]
- delta: Option<bool>,
- },
-}
-
-/// @since 3.16.0
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensOptions {
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-
- /// The legend used by the server
- pub legend: SemanticTokensLegend,
-
- /// Server supports providing semantic tokens for a sepcific range
- /// of a document.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub range: Option<bool>,
-
- /// Server supports providing semantic tokens for a full document.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub full: Option<SemanticTokensFullOptions>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-
- #[serde(flatten)]
- pub semantic_tokens_options: SemanticTokensOptions,
-
- #[serde(flatten)]
- pub static_registration_options: StaticRegistrationOptions,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum SemanticTokensServerCapabilities {
- SemanticTokensOptions(SemanticTokensOptions),
- SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
-}
-
-impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
- fn from(from: SemanticTokensOptions) -> Self {
- SemanticTokensServerCapabilities::SemanticTokensOptions(from)
- }
-}
-
-impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
- fn from(from: SemanticTokensRegistrationOptions) -> Self {
- SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(from)
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensWorkspaceClientCapabilities {
- /// Whether the client implementation supports a refresh request sent from
- /// the server to the client.
- ///
- /// Note that this event is global and will force the client to refresh all
- /// semantic tokens currently shown. It should be used with absolute care
- /// and is useful for situation where a server for example detect a project
- /// wide change that requires such a calculation.
- pub refresh_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensParams {
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensDeltaParams {
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The result id of a previous response. The result Id can either point to a full response
- /// or a delta response depending on what was recevied last.
- pub previous_result_id: String,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SemanticTokensRangeParams {
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- /// The text document.
- pub text_document: TextDocumentIdentifier,
-
- /// The range the semantic tokens are requested for.
- pub range: Range,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-#[serde(untagged)]
-pub enum SemanticTokensRangeResult {
- Tokens(SemanticTokens),
- Partial(SemanticTokensPartialResult),
-}
-
-impl From<SemanticTokens> for SemanticTokensRangeResult {
- fn from(tokens: SemanticTokens) -> Self {
- SemanticTokensRangeResult::Tokens(tokens)
- }
-}
-
-impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
- fn from(partial: SemanticTokensPartialResult) -> Self {
- SemanticTokensRangeResult::Partial(partial)
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::tests::{test_deserialization, test_serialization};
-
- #[test]
- fn test_semantic_tokens_support_serialization() {
- test_serialization(
- &SemanticTokens {
- result_id: None,
- data: vec![],
- },
- r#"{"data":[]}"#,
- );
-
- test_serialization(
- &SemanticTokens {
- result_id: None,
- data: vec![SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- }],
- },
- r#"{"data":[2,5,3,0,3]}"#,
- );
-
- test_serialization(
- &SemanticTokens {
- result_id: None,
- data: vec![
- SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- },
- SemanticToken {
- delta_line: 0,
- delta_start: 5,
- length: 4,
- token_type: 1,
- token_modifiers_bitset: 0,
- },
- ],
- },
- r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
- );
- }
-
- #[test]
- fn test_semantic_tokens_support_deserialization() {
- test_deserialization(
- r#"{"data":[]}"#,
- &SemanticTokens {
- result_id: None,
- data: vec![],
- },
- );
-
- test_deserialization(
- r#"{"data":[2,5,3,0,3]}"#,
- &SemanticTokens {
- result_id: None,
- data: vec![SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- }],
- },
- );
-
- test_deserialization(
- r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
- &SemanticTokens {
- result_id: None,
- data: vec![
- SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- },
- SemanticToken {
- delta_line: 0,
- delta_start: 5,
- length: 4,
- token_type: 1,
- token_modifiers_bitset: 0,
- },
- ],
- },
- );
- }
-
- #[test]
- #[should_panic]
- fn test_semantic_tokens_support_deserialization_err() {
- test_deserialization(
- r#"{"data":[1]}"#,
- &SemanticTokens {
- result_id: None,
- data: vec![],
- },
- );
- }
-
- #[test]
- fn test_semantic_tokens_edit_support_deserialization() {
- test_deserialization(
- r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
- &SemanticTokensEdit {
- start: 0,
- delete_count: 1,
- data: Some(vec![
- SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- },
- SemanticToken {
- delta_line: 0,
- delta_start: 5,
- length: 4,
- token_type: 1,
- token_modifiers_bitset: 0,
- },
- ]),
- },
- );
-
- test_deserialization(
- r#"{"start":0,"deleteCount":1}"#,
- &SemanticTokensEdit {
- start: 0,
- delete_count: 1,
- data: None,
- },
- );
- }
-
- #[test]
- fn test_semantic_tokens_edit_support_serialization() {
- test_serialization(
- &SemanticTokensEdit {
- start: 0,
- delete_count: 1,
- data: Some(vec![
- SemanticToken {
- delta_line: 2,
- delta_start: 5,
- length: 3,
- token_type: 0,
- token_modifiers_bitset: 3,
- },
- SemanticToken {
- delta_line: 0,
- delta_start: 5,
- length: 4,
- token_type: 1,
- token_modifiers_bitset: 0,
- },
- ]),
- },
- r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
- );
-
- test_serialization(
- &SemanticTokensEdit {
- start: 0,
- delete_count: 1,
- data: None,
- },
- r#"{"start":0,"deleteCount":1}"#,
- );
- }
-}
+use std::borrow::Cow;
+
+use serde::ser::SerializeSeq;
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
+ TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+/// A set of predefined token types. This set is not fixed
+/// and clients can specify additional token types via the
+/// corresponding client capabilities.
+/// since @3.16.0
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
+pub struct SemanticTokenType(Cow<'static, str>);
+
+impl SemanticTokenType {
+ pub const NAMESPACE: SemanticTokenType = SemanticTokenType::new("namespace");
+ pub const TYPE: SemanticTokenType = SemanticTokenType::new("type");
+ pub const CLASS: SemanticTokenType = SemanticTokenType::new("class");
+ pub const ENUM: SemanticTokenType = SemanticTokenType::new("enum");
+ pub const INTERFACE: SemanticTokenType = SemanticTokenType::new("interface");
+ pub const STRUCT: SemanticTokenType = SemanticTokenType::new("struct");
+ pub const TYPE_PARAMETER: SemanticTokenType = SemanticTokenType::new("typeParameter");
+ pub const PARAMETER: SemanticTokenType = SemanticTokenType::new("parameter");
+ pub const VARIABLE: SemanticTokenType = SemanticTokenType::new("variable");
+ pub const PROPERTY: SemanticTokenType = SemanticTokenType::new("property");
+ pub const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember");
+ pub const EVENT: SemanticTokenType = SemanticTokenType::new("event");
+ pub const FUNCTION: SemanticTokenType = SemanticTokenType::new("function");
+ pub const METHOD: SemanticTokenType = SemanticTokenType::new("method");
+ pub const MACRO: SemanticTokenType = SemanticTokenType::new("macro");
+ pub const KEYWORD: SemanticTokenType = SemanticTokenType::new("keyword");
+ pub const MODIFIER: SemanticTokenType = SemanticTokenType::new("modifier");
+ pub const COMMENT: SemanticTokenType = SemanticTokenType::new("comment");
+ pub const STRING: SemanticTokenType = SemanticTokenType::new("string");
+ pub const NUMBER: SemanticTokenType = SemanticTokenType::new("number");
+ pub const REGEXP: SemanticTokenType = SemanticTokenType::new("regexp");
+ pub const OPERATOR: SemanticTokenType = SemanticTokenType::new("operator");
+
+ /// since @3.17.0
+ pub const DECORATOR: SemanticTokenType = SemanticTokenType::new("decorator");
+
+ pub const fn new(tag: &'static str) -> Self {
+ SemanticTokenType(Cow::Borrowed(tag))
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for SemanticTokenType {
+ fn from(from: String) -> Self {
+ SemanticTokenType(Cow::from(from))
+ }
+}
+
+impl From<&'static str> for SemanticTokenType {
+ fn from(from: &'static str) -> Self {
+ SemanticTokenType::new(from)
+ }
+}
+
+/// A set of predefined token modifiers. This set is not fixed
+/// and clients can specify additional token types via the
+/// corresponding client capabilities.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
+pub struct SemanticTokenModifier(Cow<'static, str>);
+
+impl SemanticTokenModifier {
+ pub const DECLARATION: SemanticTokenModifier = SemanticTokenModifier::new("declaration");
+ pub const DEFINITION: SemanticTokenModifier = SemanticTokenModifier::new("definition");
+ pub const READONLY: SemanticTokenModifier = SemanticTokenModifier::new("readonly");
+ pub const STATIC: SemanticTokenModifier = SemanticTokenModifier::new("static");
+ pub const DEPRECATED: SemanticTokenModifier = SemanticTokenModifier::new("deprecated");
+ pub const ABSTRACT: SemanticTokenModifier = SemanticTokenModifier::new("abstract");
+ pub const ASYNC: SemanticTokenModifier = SemanticTokenModifier::new("async");
+ pub const MODIFICATION: SemanticTokenModifier = SemanticTokenModifier::new("modification");
+ pub const DOCUMENTATION: SemanticTokenModifier = SemanticTokenModifier::new("documentation");
+ pub const DEFAULT_LIBRARY: SemanticTokenModifier = SemanticTokenModifier::new("defaultLibrary");
+
+ pub const fn new(tag: &'static str) -> Self {
+ SemanticTokenModifier(Cow::Borrowed(tag))
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for SemanticTokenModifier {
+ fn from(from: String) -> Self {
+ SemanticTokenModifier(Cow::from(from))
+ }
+}
+
+impl From<&'static str> for SemanticTokenModifier {
+ fn from(from: &'static str) -> Self {
+ SemanticTokenModifier::new(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
+pub struct TokenFormat(Cow<'static, str>);
+
+impl TokenFormat {
+ pub const RELATIVE: TokenFormat = TokenFormat::new("relative");
+
+ pub const fn new(tag: &'static str) -> Self {
+ TokenFormat(Cow::Borrowed(tag))
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for TokenFormat {
+ fn from(from: String) -> Self {
+ TokenFormat(Cow::from(from))
+ }
+}
+
+impl From<&'static str> for TokenFormat {
+ fn from(from: &'static str) -> Self {
+ TokenFormat::new(from)
+ }
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensLegend {
+ /// The token types a server uses.
+ pub token_types: Vec<SemanticTokenType>,
+
+ /// The token modifiers a server uses.
+ pub token_modifiers: Vec<SemanticTokenModifier>,
+}
+
+/// The actual tokens.
+#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
+pub struct SemanticToken {
+ pub delta_line: u32,
+ pub delta_start: u32,
+ pub length: u32,
+ pub token_type: u32,
+ pub token_modifiers_bitset: u32,
+}
+
+impl SemanticToken {
+ fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<SemanticToken>, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let data = Vec::<u32>::deserialize(deserializer)?;
+ let chunks = data.chunks_exact(5);
+
+ if !chunks.remainder().is_empty() {
+ return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
+ }
+
+ Result::Ok(
+ chunks
+ .map(|chunk| SemanticToken {
+ delta_line: chunk[0],
+ delta_start: chunk[1],
+ length: chunk[2],
+ token_type: chunk[3],
+ token_modifiers_bitset: chunk[4],
+ })
+ .collect(),
+ )
+ }
+
+ fn serialize_tokens<S>(tokens: &[SemanticToken], serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
+ for token in tokens.iter() {
+ seq.serialize_element(&token.delta_line)?;
+ seq.serialize_element(&token.delta_start)?;
+ seq.serialize_element(&token.length)?;
+ seq.serialize_element(&token.token_type)?;
+ seq.serialize_element(&token.token_modifiers_bitset)?;
+ }
+ seq.end()
+ }
+
+ fn deserialize_tokens_opt<'de, D>(
+ deserializer: D,
+ ) -> Result<Option<Vec<SemanticToken>>, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ #[derive(Deserialize)]
+ #[serde(transparent)]
+ struct Wrapper {
+ #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
+ tokens: Vec<SemanticToken>,
+ }
+
+ Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
+ }
+
+ fn serialize_tokens_opt<S>(
+ data: &Option<Vec<SemanticToken>>,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ #[derive(Serialize)]
+ #[serde(transparent)]
+ struct Wrapper {
+ #[serde(serialize_with = "SemanticToken::serialize_tokens")]
+ tokens: Vec<SemanticToken>,
+ }
+
+ let opt = data.as_ref().map(|t| Wrapper { tokens: t.to_vec() });
+
+ opt.serialize(serializer)
+ }
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokens {
+ /// An optional result id. If provided and clients support delta updating
+ /// the client will include the result id in the next semantic token request.
+ /// A server can then instead of computing all semantic tokens again simply
+ /// send a delta.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub result_id: Option<String>,
+
+ /// The actual tokens. For a detailed description about how the data is
+ /// structured please see
+ /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
+ #[serde(
+ deserialize_with = "SemanticToken::deserialize_tokens",
+ serialize_with = "SemanticToken::serialize_tokens"
+ )]
+ pub data: Vec<SemanticToken>,
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensPartialResult {
+ #[serde(
+ deserialize_with = "SemanticToken::deserialize_tokens",
+ serialize_with = "SemanticToken::serialize_tokens"
+ )]
+ pub data: Vec<SemanticToken>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum SemanticTokensResult {
+ Tokens(SemanticTokens),
+ Partial(SemanticTokensPartialResult),
+}
+
+impl From<SemanticTokens> for SemanticTokensResult {
+ fn from(from: SemanticTokens) -> Self {
+ SemanticTokensResult::Tokens(from)
+ }
+}
+
+impl From<SemanticTokensPartialResult> for SemanticTokensResult {
+ fn from(from: SemanticTokensPartialResult) -> Self {
+ SemanticTokensResult::Partial(from)
+ }
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensEdit {
+ pub start: u32,
+ pub delete_count: u32,
+
+ #[serde(
+ default,
+ skip_serializing_if = "Option::is_none",
+ deserialize_with = "SemanticToken::deserialize_tokens_opt",
+ serialize_with = "SemanticToken::serialize_tokens_opt"
+ )]
+ pub data: Option<Vec<SemanticToken>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum SemanticTokensFullDeltaResult {
+ Tokens(SemanticTokens),
+ TokensDelta(SemanticTokensDelta),
+ PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
+}
+
+impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
+ fn from(from: SemanticTokens) -> Self {
+ SemanticTokensFullDeltaResult::Tokens(from)
+ }
+}
+
+impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
+ fn from(from: SemanticTokensDelta) -> Self {
+ SemanticTokensFullDeltaResult::TokensDelta(from)
+ }
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensDelta {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub result_id: Option<String>,
+ /// For a detailed description how these edits are structured please see
+ /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L131>
+ pub edits: Vec<SemanticTokensEdit>,
+}
+
+/// Capabilities specific to the `textDocument/semanticTokens/*` requests.
+///
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensClientCapabilities {
+ /// Whether implementation supports dynamic registration. If this is set to `true`
+ /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
+ /// return value for the corresponding server capability as well.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Which requests the client supports and might send to the server
+ /// depending on the server's capability. Please note that clients might not
+ /// show semantic tokens or degrade some of the user experience if a range
+ /// or full request is advertised by the client but not provided by the
+ /// server. If for example the client capability `requests.full` and
+ /// `request.range` are both set to true but the server only provides a
+ /// range provider the client might not render a minimap correctly or might
+ /// even decide to not show any semantic tokens at all.
+ pub requests: SemanticTokensClientCapabilitiesRequests,
+
+ /// The token types that the client supports.
+ pub token_types: Vec<SemanticTokenType>,
+
+ /// The token modifiers that the client supports.
+ pub token_modifiers: Vec<SemanticTokenModifier>,
+
+ /// The token formats the clients supports.
+ pub formats: Vec<TokenFormat>,
+
+ /// Whether the client supports tokens that can overlap each other.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub overlapping_token_support: Option<bool>,
+
+ /// Whether the client supports tokens that can span multiple lines.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub multiline_token_support: Option<bool>,
+
+ /// Whether the client allows the server to actively cancel a
+ /// semantic token request, e.g. supports returning
+ /// ErrorCodes.ServerCancelled. If a server does the client
+ /// needs to retrigger the request.
+ ///
+ /// since @3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub server_cancel_support: Option<bool>,
+
+ /// Whether the client uses semantic tokens to augment existing
+ /// syntax tokens. If set to `true` client side created syntax
+ /// tokens and semantic tokens are both used for colorization. If
+ /// set to `false` the client only uses the returned semantic tokens
+ /// for colorization.
+ ///
+ /// If the value is `undefined` then the client behavior is not
+ /// specified.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub augments_syntax_tokens: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensClientCapabilitiesRequests {
+ /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range: Option<bool>,
+
+ /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub full: Option<SemanticTokensFullOptions>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum SemanticTokensFullOptions {
+ Bool(bool),
+ Delta {
+ /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler.
+ /// The server supports deltas for full documents.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ delta: Option<bool>,
+ },
+}
+
+/// @since 3.16.0
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+
+ /// The legend used by the server
+ pub legend: SemanticTokensLegend,
+
+ /// Server supports providing semantic tokens for a sepcific range
+ /// of a document.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub range: Option<bool>,
+
+ /// Server supports providing semantic tokens for a full document.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub full: Option<SemanticTokensFullOptions>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+
+ #[serde(flatten)]
+ pub semantic_tokens_options: SemanticTokensOptions,
+
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum SemanticTokensServerCapabilities {
+ SemanticTokensOptions(SemanticTokensOptions),
+ SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
+}
+
+impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
+ fn from(from: SemanticTokensOptions) -> Self {
+ SemanticTokensServerCapabilities::SemanticTokensOptions(from)
+ }
+}
+
+impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
+ fn from(from: SemanticTokensRegistrationOptions) -> Self {
+ SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(from)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensWorkspaceClientCapabilities {
+ /// Whether the client implementation supports a refresh request sent from
+ /// the server to the client.
+ ///
+ /// Note that this event is global and will force the client to refresh all
+ /// semantic tokens currently shown. It should be used with absolute care
+ /// and is useful for situation where a server for example detect a project
+ /// wide change that requires such a calculation.
+ pub refresh_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensParams {
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensDeltaParams {
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The result id of a previous response. The result Id can either point to a full response
+ /// or a delta response depending on what was recevied last.
+ pub previous_result_id: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SemanticTokensRangeParams {
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ /// The text document.
+ pub text_document: TextDocumentIdentifier,
+
+ /// The range the semantic tokens are requested for.
+ pub range: Range,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum SemanticTokensRangeResult {
+ Tokens(SemanticTokens),
+ Partial(SemanticTokensPartialResult),
+}
+
+impl From<SemanticTokens> for SemanticTokensRangeResult {
+ fn from(tokens: SemanticTokens) -> Self {
+ SemanticTokensRangeResult::Tokens(tokens)
+ }
+}
+
+impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
+ fn from(partial: SemanticTokensPartialResult) -> Self {
+ SemanticTokensRangeResult::Partial(partial)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{test_deserialization, test_serialization};
+
+ #[test]
+ fn test_semantic_tokens_support_serialization() {
+ test_serialization(
+ &SemanticTokens {
+ result_id: None,
+ data: vec![],
+ },
+ r#"{"data":[]}"#,
+ );
+
+ test_serialization(
+ &SemanticTokens {
+ result_id: None,
+ data: vec![SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ }],
+ },
+ r#"{"data":[2,5,3,0,3]}"#,
+ );
+
+ test_serialization(
+ &SemanticTokens {
+ result_id: None,
+ data: vec![
+ SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ },
+ SemanticToken {
+ delta_line: 0,
+ delta_start: 5,
+ length: 4,
+ token_type: 1,
+ token_modifiers_bitset: 0,
+ },
+ ],
+ },
+ r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
+ );
+ }
+
+ #[test]
+ fn test_semantic_tokens_support_deserialization() {
+ test_deserialization(
+ r#"{"data":[]}"#,
+ &SemanticTokens {
+ result_id: None,
+ data: vec![],
+ },
+ );
+
+ test_deserialization(
+ r#"{"data":[2,5,3,0,3]}"#,
+ &SemanticTokens {
+ result_id: None,
+ data: vec![SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ }],
+ },
+ );
+
+ test_deserialization(
+ r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
+ &SemanticTokens {
+ result_id: None,
+ data: vec![
+ SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ },
+ SemanticToken {
+ delta_line: 0,
+ delta_start: 5,
+ length: 4,
+ token_type: 1,
+ token_modifiers_bitset: 0,
+ },
+ ],
+ },
+ );
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_semantic_tokens_support_deserialization_err() {
+ test_deserialization(
+ r#"{"data":[1]}"#,
+ &SemanticTokens {
+ result_id: None,
+ data: vec![],
+ },
+ );
+ }
+
+ #[test]
+ fn test_semantic_tokens_edit_support_deserialization() {
+ test_deserialization(
+ r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
+ &SemanticTokensEdit {
+ start: 0,
+ delete_count: 1,
+ data: Some(vec![
+ SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ },
+ SemanticToken {
+ delta_line: 0,
+ delta_start: 5,
+ length: 4,
+ token_type: 1,
+ token_modifiers_bitset: 0,
+ },
+ ]),
+ },
+ );
+
+ test_deserialization(
+ r#"{"start":0,"deleteCount":1}"#,
+ &SemanticTokensEdit {
+ start: 0,
+ delete_count: 1,
+ data: None,
+ },
+ );
+ }
+
+ #[test]
+ fn test_semantic_tokens_edit_support_serialization() {
+ test_serialization(
+ &SemanticTokensEdit {
+ start: 0,
+ delete_count: 1,
+ data: Some(vec![
+ SemanticToken {
+ delta_line: 2,
+ delta_start: 5,
+ length: 3,
+ token_type: 0,
+ token_modifiers_bitset: 3,
+ },
+ SemanticToken {
+ delta_line: 0,
+ delta_start: 5,
+ length: 4,
+ token_type: 1,
+ token_modifiers_bitset: 0,
+ },
+ ]),
+ },
+ r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
+ );
+
+ test_serialization(
+ &SemanticTokensEdit {
+ start: 0,
+ delete_count: 1,
+ data: None,
+ },
+ r#"{"start":0,"deleteCount":1}"#,
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/signature_help.rs b/vendor/lsp-types/src/signature_help.rs
index 151f3bf7f..569417c2a 100644
--- a/vendor/lsp-types/src/signature_help.rs
+++ b/vendor/lsp-types/src/signature_help.rs
@@ -1,207 +1,207 @@
-use serde::{Deserialize, Serialize};
-
-use crate::{
- Documentation, MarkupKind, TextDocumentPositionParams, TextDocumentRegistrationOptions,
- WorkDoneProgressOptions, WorkDoneProgressParams,
-};
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureInformationSettings {
- /// Client supports the follow content formats for the documentation
- /// property. The order describes the preferred format of the client.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub documentation_format: Option<Vec<MarkupKind>>,
-
- #[serde(skip_serializing_if = "Option::is_none")]
- pub parameter_information: Option<ParameterInformationSettings>,
-
- /// The client support the `activeParameter` property on `SignatureInformation`
- /// literal.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub active_parameter_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ParameterInformationSettings {
- /// The client supports processing label offsets instead of a
- /// simple label string.
- ///
- /// @since 3.14.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub label_offset_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureHelpClientCapabilities {
- /// Whether completion supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// The client supports the following `SignatureInformation`
- /// specific properties.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub signature_information: Option<SignatureInformationSettings>,
-
- /// The client supports to send additional context information for a
- /// `textDocument/signatureHelp` request. A client that opts into
- /// contextSupport will also support the `retriggerCharacters` on
- /// `SignatureHelpOptions`.
- ///
- /// @since 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub context_support: Option<bool>,
-}
-
-/// Signature help options.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureHelpOptions {
- /// The characters that trigger signature help automatically.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trigger_characters: Option<Vec<String>>,
-
- /// List of characters that re-trigger signature help.
- /// These trigger characters are only active when signature help is already showing. All trigger characters
- /// are also counted as re-trigger characters.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub retrigger_characters: Option<Vec<String>>,
-
- #[serde(flatten)]
- pub work_done_progress_options: WorkDoneProgressOptions,
-}
-
-/// Signature help options.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct SignatureHelpRegistrationOptions {
- #[serde(flatten)]
- pub text_document_registration_options: TextDocumentRegistrationOptions,
-}
-
-/// Signature help options.
-#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct SignatureHelpTriggerKind(i32);
-lsp_enum! {
-impl SignatureHelpTriggerKind {
- /// Signature help was invoked manually by the user or by a command.
- pub const INVOKED: SignatureHelpTriggerKind = SignatureHelpTriggerKind(1);
- /// Signature help was triggered by a trigger character.
- pub const TRIGGER_CHARACTER: SignatureHelpTriggerKind = SignatureHelpTriggerKind(2);
- /// Signature help was triggered by the cursor moving or by the document content changing.
- pub const CONTENT_CHANGE: SignatureHelpTriggerKind = SignatureHelpTriggerKind(3);
-}
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureHelpParams {
- /// The signature help context. This is only available if the client specifies
- /// to send this using the client capability `textDocument.signatureHelp.contextSupport === true`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub context: Option<SignatureHelpContext>,
-
- #[serde(flatten)]
- pub text_document_position_params: TextDocumentPositionParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureHelpContext {
- /// Action that caused signature help to be triggered.
- pub trigger_kind: SignatureHelpTriggerKind,
-
- /// Character that caused signature help to be triggered.
- /// This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub trigger_character: Option<String>,
-
- /// `true` if signature help was already showing when it was triggered.
- /// Retriggers occur when the signature help is already active and can be caused by actions such as
- /// typing a trigger character, a cursor move, or document content changes.
- pub is_retrigger: bool,
-
- /// The currently active `SignatureHelp`.
- /// The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on
- /// the user navigating through available signatures.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub active_signature_help: Option<SignatureHelp>,
-}
-
-/// Signature help represents the signature of something
-/// callable. There can be multiple signature but only one
-/// active and only one active parameter.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureHelp {
- /// One or more signatures.
- pub signatures: Vec<SignatureInformation>,
-
- /// The active signature.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub active_signature: Option<u32>,
-
- /// The active parameter of the active signature.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub active_parameter: Option<u32>,
-}
-
-/// Represents the signature of something callable. A signature
-/// can have a label, like a function-name, a doc-comment, and
-/// a set of parameters.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct SignatureInformation {
- /// The label of this signature. Will be shown in
- /// the UI.
- pub label: String,
-
- /// The human-readable doc-comment of this signature. Will be shown
- /// in the UI but can be omitted.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub documentation: Option<Documentation>,
-
- /// The parameters of this signature.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub parameters: Option<Vec<ParameterInformation>>,
-
- /// The index of the active parameter.
- ///
- /// If provided, this is used in place of `SignatureHelp.activeParameter`.
- ///
- /// @since 3.16.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub active_parameter: Option<u32>,
-}
-
-/// Represents a parameter of a callable-signature. A parameter can
-/// have a label and a doc-comment.
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ParameterInformation {
- /// The label of this parameter information.
- ///
- /// Either a string or an inclusive start and exclusive end offsets within its containing
- /// signature label. (see SignatureInformation.label). *Note*: A label of type string must be
- /// a substring of its containing signature label.
- pub label: ParameterLabel,
-
- /// The human-readable doc-comment of this parameter. Will be shown
- /// in the UI but can be omitted.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub documentation: Option<Documentation>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum ParameterLabel {
- Simple(String),
- LabelOffsets([u32; 2]),
-}
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ Documentation, MarkupKind, TextDocumentPositionParams, TextDocumentRegistrationOptions,
+ WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureInformationSettings {
+ /// Client supports the follow content formats for the documentation
+ /// property. The order describes the preferred format of the client.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub documentation_format: Option<Vec<MarkupKind>>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub parameter_information: Option<ParameterInformationSettings>,
+
+ /// The client support the `activeParameter` property on `SignatureInformation`
+ /// literal.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub active_parameter_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ParameterInformationSettings {
+ /// The client supports processing label offsets instead of a
+ /// simple label string.
+ ///
+ /// @since 3.14.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub label_offset_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureHelpClientCapabilities {
+ /// Whether completion supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// The client supports the following `SignatureInformation`
+ /// specific properties.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub signature_information: Option<SignatureInformationSettings>,
+
+ /// The client supports to send additional context information for a
+ /// `textDocument/signatureHelp` request. A client that opts into
+ /// contextSupport will also support the `retriggerCharacters` on
+ /// `SignatureHelpOptions`.
+ ///
+ /// @since 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub context_support: Option<bool>,
+}
+
+/// Signature help options.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureHelpOptions {
+ /// The characters that trigger signature help automatically.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trigger_characters: Option<Vec<String>>,
+
+ /// List of characters that re-trigger signature help.
+ /// These trigger characters are only active when signature help is already showing. All trigger characters
+ /// are also counted as re-trigger characters.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub retrigger_characters: Option<Vec<String>>,
+
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+/// Signature help options.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct SignatureHelpRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+}
+
+/// Signature help options.
+#[derive(Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct SignatureHelpTriggerKind(i32);
+lsp_enum! {
+impl SignatureHelpTriggerKind {
+ /// Signature help was invoked manually by the user or by a command.
+ pub const INVOKED: SignatureHelpTriggerKind = SignatureHelpTriggerKind(1);
+ /// Signature help was triggered by a trigger character.
+ pub const TRIGGER_CHARACTER: SignatureHelpTriggerKind = SignatureHelpTriggerKind(2);
+ /// Signature help was triggered by the cursor moving or by the document content changing.
+ pub const CONTENT_CHANGE: SignatureHelpTriggerKind = SignatureHelpTriggerKind(3);
+}
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureHelpParams {
+ /// The signature help context. This is only available if the client specifies
+ /// to send this using the client capability `textDocument.signatureHelp.contextSupport === true`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub context: Option<SignatureHelpContext>,
+
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureHelpContext {
+ /// Action that caused signature help to be triggered.
+ pub trigger_kind: SignatureHelpTriggerKind,
+
+ /// Character that caused signature help to be triggered.
+ /// This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub trigger_character: Option<String>,
+
+ /// `true` if signature help was already showing when it was triggered.
+ /// Retriggers occur when the signature help is already active and can be caused by actions such as
+ /// typing a trigger character, a cursor move, or document content changes.
+ pub is_retrigger: bool,
+
+ /// The currently active `SignatureHelp`.
+ /// The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on
+ /// the user navigating through available signatures.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub active_signature_help: Option<SignatureHelp>,
+}
+
+/// Signature help represents the signature of something
+/// callable. There can be multiple signature but only one
+/// active and only one active parameter.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureHelp {
+ /// One or more signatures.
+ pub signatures: Vec<SignatureInformation>,
+
+ /// The active signature.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub active_signature: Option<u32>,
+
+ /// The active parameter of the active signature.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub active_parameter: Option<u32>,
+}
+
+/// Represents the signature of something callable. A signature
+/// can have a label, like a function-name, a doc-comment, and
+/// a set of parameters.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SignatureInformation {
+ /// The label of this signature. Will be shown in
+ /// the UI.
+ pub label: String,
+
+ /// The human-readable doc-comment of this signature. Will be shown
+ /// in the UI but can be omitted.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub documentation: Option<Documentation>,
+
+ /// The parameters of this signature.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub parameters: Option<Vec<ParameterInformation>>,
+
+ /// The index of the active parameter.
+ ///
+ /// If provided, this is used in place of `SignatureHelp.activeParameter`.
+ ///
+ /// @since 3.16.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub active_parameter: Option<u32>,
+}
+
+/// Represents a parameter of a callable-signature. A parameter can
+/// have a label and a doc-comment.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ParameterInformation {
+ /// The label of this parameter information.
+ ///
+ /// Either a string or an inclusive start and exclusive end offsets within its containing
+ /// signature label. (see SignatureInformation.label). *Note*: A label of type string must be
+ /// a substring of its containing signature label.
+ pub label: ParameterLabel,
+
+ /// The human-readable doc-comment of this parameter. Will be shown
+ /// in the UI but can be omitted.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub documentation: Option<Documentation>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum ParameterLabel {
+ Simple(String),
+ LabelOffsets([u32; 2]),
+}
diff --git a/vendor/lsp-types/src/trace.rs b/vendor/lsp-types/src/trace.rs
index 3272925bb..7cd42de07 100644
--- a/vendor/lsp-types/src/trace.rs
+++ b/vendor/lsp-types/src/trace.rs
@@ -1,82 +1,82 @@
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct SetTraceParams {
- /// The new value that should be assigned to the trace setting.
- pub value: TraceValue,
-}
-
-/// A TraceValue represents the level of verbosity with which the server systematically
-/// reports its execution trace using `LogTrace` notifications.
-///
-/// The initial trace value is set by the client at initialization and can be modified
-/// later using the `SetTrace` notification.
-#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub enum TraceValue {
- /// The server should not send any `$/logTrace` notification
- Off,
- /// The server should not add the 'verbose' field in the `LogTraceParams`
- Messages,
- Verbose,
-}
-
-impl Default for TraceValue {
- fn default() -> TraceValue {
- TraceValue::Off
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct LogTraceParams {
- /// The message to be logged.
- pub message: String,
- /// Additional information that can be computed if the `trace` configuration
- /// is set to `'verbose'`
- #[serde(skip_serializing_if = "Option::is_none")]
- pub verbose: Option<String>,
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::tests::test_serialization;
-
- #[test]
- fn test_set_trace_params() {
- test_serialization(
- &SetTraceParams {
- value: TraceValue::Off,
- },
- r#"{"value":"off"}"#,
- );
- }
-
- #[test]
- fn test_log_trace_params() {
- test_serialization(
- &LogTraceParams {
- message: "message".into(),
- verbose: None,
- },
- r#"{"message":"message"}"#,
- );
-
- test_serialization(
- &LogTraceParams {
- message: "message".into(),
- verbose: Some("verbose".into()),
- },
- r#"{"message":"message","verbose":"verbose"}"#,
- );
- }
-
- #[test]
- fn test_trace_value() {
- test_serialization(
- &vec![TraceValue::Off, TraceValue::Messages, TraceValue::Verbose],
- r#"["off","messages","verbose"]"#,
- );
- }
-}
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct SetTraceParams {
+ /// The new value that should be assigned to the trace setting.
+ pub value: TraceValue,
+}
+
+/// A TraceValue represents the level of verbosity with which the server systematically
+/// reports its execution trace using `LogTrace` notifications.
+///
+/// The initial trace value is set by the client at initialization and can be modified
+/// later using the `SetTrace` notification.
+#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum TraceValue {
+ /// The server should not send any `$/logTrace` notification
+ Off,
+ /// The server should not add the 'verbose' field in the `LogTraceParams`
+ Messages,
+ Verbose,
+}
+
+impl Default for TraceValue {
+ fn default() -> TraceValue {
+ TraceValue::Off
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct LogTraceParams {
+ /// The message to be logged.
+ pub message: String,
+ /// Additional information that can be computed if the `trace` configuration
+ /// is set to `'verbose'`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub verbose: Option<String>,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::test_serialization;
+
+ #[test]
+ fn test_set_trace_params() {
+ test_serialization(
+ &SetTraceParams {
+ value: TraceValue::Off,
+ },
+ r#"{"value":"off"}"#,
+ );
+ }
+
+ #[test]
+ fn test_log_trace_params() {
+ test_serialization(
+ &LogTraceParams {
+ message: "message".into(),
+ verbose: None,
+ },
+ r#"{"message":"message"}"#,
+ );
+
+ test_serialization(
+ &LogTraceParams {
+ message: "message".into(),
+ verbose: Some("verbose".into()),
+ },
+ r#"{"message":"message","verbose":"verbose"}"#,
+ );
+ }
+
+ #[test]
+ fn test_trace_value() {
+ test_serialization(
+ &vec![TraceValue::Off, TraceValue::Messages, TraceValue::Verbose],
+ r#"["off","messages","verbose"]"#,
+ );
+ }
+}
diff --git a/vendor/lsp-types/src/type_hierarchy.rs b/vendor/lsp-types/src/type_hierarchy.rs
new file mode 100644
index 000000000..6cc9812ba
--- /dev/null
+++ b/vendor/lsp-types/src/type_hierarchy.rs
@@ -0,0 +1,90 @@
+use crate::{
+ DynamicRegistrationClientCapabilities, LSPAny, PartialResultParams, Range,
+ StaticRegistrationOptions, SymbolKind, SymbolTag, TextDocumentPositionParams,
+ TextDocumentRegistrationOptions, Url, WorkDoneProgressOptions, WorkDoneProgressParams,
+};
+
+use serde::{Deserialize, Serialize};
+
+pub type TypeHierarchyClientCapabilities = DynamicRegistrationClientCapabilities;
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct TypeHierarchyOptions {
+ #[serde(flatten)]
+ pub work_done_progress_options: WorkDoneProgressOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct TypeHierarchyRegistrationOptions {
+ #[serde(flatten)]
+ pub text_document_registration_options: TextDocumentRegistrationOptions,
+ #[serde(flatten)]
+ pub type_hierarchy_options: TypeHierarchyOptions,
+ #[serde(flatten)]
+ pub static_registration_options: StaticRegistrationOptions,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct TypeHierarchyPrepareParams {
+ #[serde(flatten)]
+ pub text_document_position_params: TextDocumentPositionParams,
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct TypeHierarchySupertypesParams {
+ pub item: TypeHierarchyItem,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct TypeHierarchySubtypesParams {
+ pub item: TypeHierarchyItem,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TypeHierarchyItem {
+ /// The name of this item.
+ pub name: String,
+
+ /// The kind of this item.
+ pub kind: SymbolKind,
+
+ /// Tags for this item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<SymbolTag>,
+
+ /// More detail for this item, e.g. the signature of a function.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub detail: Option<String>,
+
+ /// The resource identifier of this item.
+ pub uri: Url,
+
+ /// The range enclosing this symbol not including leading/trailing whitespace
+ /// but everything else, e.g. comments and code.
+ pub range: Range,
+
+ /// The range that should be selected and revealed when this symbol is being
+ /// picked, e.g. the name of a function. Must be contained by the
+ /// [`range`](#TypeHierarchyItem.range).
+ pub selection_range: Range,
+
+ /// A data entry field that is preserved between a type hierarchy prepare and
+ /// supertypes or subtypes requests. It could also be used to identify the
+ /// type hierarchy in the server, helping improve the performance on
+ /// resolving supertypes and subtypes.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<LSPAny>,
+}
diff --git a/vendor/lsp-types/src/window.rs b/vendor/lsp-types/src/window.rs
index 89d28aaaf..d4dbdf169 100644
--- a/vendor/lsp-types/src/window.rs
+++ b/vendor/lsp-types/src/window.rs
@@ -1,176 +1,176 @@
-use std::collections::HashMap;
-
-use serde::{Deserialize, Serialize};
-
-use serde_json::Value;
-
-use url::Url;
-
-use crate::Range;
-
-#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
-#[serde(transparent)]
-pub struct MessageType(i32);
-lsp_enum! {
-impl MessageType {
- /// An error message.
- pub const ERROR: MessageType = MessageType(1);
- /// A warning message.
- pub const WARNING: MessageType = MessageType(2);
- /// An information message;
- pub const INFO: MessageType = MessageType(3);
- /// A log message.
- pub const LOG: MessageType = MessageType(4);
-}
-}
-
-/// Window specific client capabilities.
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WindowClientCapabilities {
- /// Whether client supports handling progress notifications. If set
- /// servers are allowed to report in `workDoneProgress` property in the
- /// request specific server capabilities.
- ///
- /// @since 3.15.0
- #[serde(skip_serializing_if = "Option::is_none")]
- pub work_done_progress: Option<bool>,
-
- /// Capabilities specific to the showMessage request.
- ///
- /// @since 3.16.0
- ///
- #[serde(skip_serializing_if = "Option::is_none")]
- pub show_message: Option<ShowMessageRequestClientCapabilities>,
-
- /// Client capabilities for the show document request.
- ///
- /// @since 3.16.0
- ///
- #[serde(skip_serializing_if = "Option::is_none")]
- pub show_document: Option<ShowDocumentClientCapabilities>,
-}
-
-/// Show message request client capabilities
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ShowMessageRequestClientCapabilities {
- /// Capabilities specific to the `MessageActionItem` type.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub message_action_item: Option<MessageActionItemCapabilities>,
-}
-
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MessageActionItemCapabilities {
- /// Whether the client supports additional attribues which
- /// are preserved and send back to the server in the
- /// request's response.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub additional_properties_support: Option<bool>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct MessageActionItem {
- /// A short title like 'Retry', 'Open Log' etc.
- pub title: String,
-
- /// Additional attributes that the client preserves and
- /// sends back to the server. This depends on the client
- /// capability window.messageActionItem.additionalPropertiesSupport
- #[serde(flatten)]
- pub properties: HashMap<String, MessageActionItemProperty>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(untagged)]
-pub enum MessageActionItemProperty {
- String(String),
- Boolean(bool),
- Integer(i32),
- Object(Value),
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct LogMessageParams {
- /// The message type. See {@link MessageType}
- #[serde(rename = "type")]
- pub typ: MessageType,
-
- /// The actual message
- pub message: String,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct ShowMessageParams {
- /// The message type. See {@link MessageType}.
- #[serde(rename = "type")]
- pub typ: MessageType,
-
- /// The actual message.
- pub message: String,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-pub struct ShowMessageRequestParams {
- /// The message type. See {@link MessageType}
- #[serde(rename = "type")]
- pub typ: MessageType,
-
- /// The actual message
- pub message: String,
-
- /// The message action items to present.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub actions: Option<Vec<MessageActionItem>>,
-}
-
-/// Client capabilities for the show document request.
-#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ShowDocumentClientCapabilities {
- /// The client has support for the show document request.
- pub support: bool,
-}
-
-/// Params to show a document.
-///
-/// @since 3.16.0
-///
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ShowDocumentParams {
- /// The document uri to show.
- pub uri: Url,
-
- /// Indicates to show the resource in an external program.
- /// To show for example `https://code.visualstudio.com/`
- /// in the default WEB browser set `external` to `true`.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub external: Option<bool>,
-
- /// An optional property to indicate whether the editor
- /// showing the document should take focus or not.
- /// Clients might ignore this property if an external
- /// program in started.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub take_focus: Option<bool>,
-
- /// An optional selection range if the document is a text
- /// document. Clients might ignore the property if an
- /// external program is started or the file is not a text
- /// file.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub selection: Option<Range>,
-}
-
-/// The result of an show document request.
-///
-/// @since 3.16.0
-#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ShowDocumentResult {
- /// A boolean indicating if the show was successful.
- pub success: bool,
-}
+use std::collections::HashMap;
+
+use serde::{Deserialize, Serialize};
+
+use serde_json::Value;
+
+use url::Url;
+
+use crate::Range;
+
+#[derive(Eq, PartialEq, Clone, Copy, Deserialize, Serialize)]
+#[serde(transparent)]
+pub struct MessageType(i32);
+lsp_enum! {
+impl MessageType {
+ /// An error message.
+ pub const ERROR: MessageType = MessageType(1);
+ /// A warning message.
+ pub const WARNING: MessageType = MessageType(2);
+ /// An information message;
+ pub const INFO: MessageType = MessageType(3);
+ /// A log message.
+ pub const LOG: MessageType = MessageType(4);
+}
+}
+
+/// Window specific client capabilities.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WindowClientCapabilities {
+ /// Whether client supports handling progress notifications. If set
+ /// servers are allowed to report in `workDoneProgress` property in the
+ /// request specific server capabilities.
+ ///
+ /// @since 3.15.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub work_done_progress: Option<bool>,
+
+ /// Capabilities specific to the showMessage request.
+ ///
+ /// @since 3.16.0
+ ///
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub show_message: Option<ShowMessageRequestClientCapabilities>,
+
+ /// Client capabilities for the show document request.
+ ///
+ /// @since 3.16.0
+ ///
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub show_document: Option<ShowDocumentClientCapabilities>,
+}
+
+/// Show message request client capabilities
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ShowMessageRequestClientCapabilities {
+ /// Capabilities specific to the `MessageActionItem` type.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub message_action_item: Option<MessageActionItemCapabilities>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MessageActionItemCapabilities {
+ /// Whether the client supports additional attribues which
+ /// are preserved and send back to the server in the
+ /// request's response.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub additional_properties_support: Option<bool>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct MessageActionItem {
+ /// A short title like 'Retry', 'Open Log' etc.
+ pub title: String,
+
+ /// Additional attributes that the client preserves and
+ /// sends back to the server. This depends on the client
+ /// capability window.messageActionItem.additionalPropertiesSupport
+ #[serde(flatten)]
+ pub properties: HashMap<String, MessageActionItemProperty>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum MessageActionItemProperty {
+ String(String),
+ Boolean(bool),
+ Integer(i32),
+ Object(Value),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct LogMessageParams {
+ /// The message type. See {@link MessageType}
+ #[serde(rename = "type")]
+ pub typ: MessageType,
+
+ /// The actual message
+ pub message: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct ShowMessageParams {
+ /// The message type. See {@link MessageType}.
+ #[serde(rename = "type")]
+ pub typ: MessageType,
+
+ /// The actual message.
+ pub message: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct ShowMessageRequestParams {
+ /// The message type. See {@link MessageType}
+ #[serde(rename = "type")]
+ pub typ: MessageType,
+
+ /// The actual message
+ pub message: String,
+
+ /// The message action items to present.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub actions: Option<Vec<MessageActionItem>>,
+}
+
+/// Client capabilities for the show document request.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ShowDocumentClientCapabilities {
+ /// The client has support for the show document request.
+ pub support: bool,
+}
+
+/// Params to show a document.
+///
+/// @since 3.16.0
+///
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ShowDocumentParams {
+ /// The document uri to show.
+ pub uri: Url,
+
+ /// Indicates to show the resource in an external program.
+ /// To show for example `https://code.visualstudio.com/`
+ /// in the default WEB browser set `external` to `true`.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub external: Option<bool>,
+
+ /// An optional property to indicate whether the editor
+ /// showing the document should take focus or not.
+ /// Clients might ignore this property if an external
+ /// program in started.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub take_focus: Option<bool>,
+
+ /// An optional selection range if the document is a text
+ /// document. Clients might ignore the property if an
+ /// external program is started or the file is not a text
+ /// file.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub selection: Option<Range>,
+}
+
+/// The result of an show document request.
+///
+/// @since 3.16.0
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ShowDocumentResult {
+ /// A boolean indicating if the show was successful.
+ pub success: bool,
+}
diff --git a/vendor/lsp-types/src/workspace_folders.rs b/vendor/lsp-types/src/workspace_folders.rs
index ae30d3a0b..677e6f696 100644
--- a/vendor/lsp-types/src/workspace_folders.rs
+++ b/vendor/lsp-types/src/workspace_folders.rs
@@ -1,49 +1,49 @@
-use serde::{Deserialize, Serialize};
-use url::Url;
-
-use crate::OneOf;
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceFoldersServerCapabilities {
- /// The server has support for workspace folders
- #[serde(skip_serializing_if = "Option::is_none")]
- pub supported: Option<bool>,
-
- /// Whether the server wants to receive workspace folder
- /// change notifications.
- ///
- /// If a string is provided, the string is treated as an ID
- /// under which the notification is registered on the client
- /// side. The ID can be used to unregister for these events
- /// using the `client/unregisterCapability` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub change_notifications: Option<OneOf<bool, String>>,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceFolder {
- /// The associated URI for this workspace folder.
- pub uri: Url,
- /// The name of the workspace folder. Defaults to the uri's basename.
- pub name: String,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DidChangeWorkspaceFoldersParams {
- /// The actual workspace folder change event.
- pub event: WorkspaceFoldersChangeEvent,
-}
-
-/// The workspace folder change event.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceFoldersChangeEvent {
- /// The array of added workspace folders
- pub added: Vec<WorkspaceFolder>,
-
- /// The array of the removed workspace folders
- pub removed: Vec<WorkspaceFolder>,
-}
+use serde::{Deserialize, Serialize};
+use url::Url;
+
+use crate::OneOf;
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceFoldersServerCapabilities {
+ /// The server has support for workspace folders
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub supported: Option<bool>,
+
+ /// Whether the server wants to receive workspace folder
+ /// change notifications.
+ ///
+ /// If a string is provided, the string is treated as an ID
+ /// under which the notification is registered on the client
+ /// side. The ID can be used to unregister for these events
+ /// using the `client/unregisterCapability` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change_notifications: Option<OneOf<bool, String>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceFolder {
+ /// The associated URI for this workspace folder.
+ pub uri: Url,
+ /// The name of the workspace folder. Defaults to the uri's basename.
+ pub name: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct DidChangeWorkspaceFoldersParams {
+ /// The actual workspace folder change event.
+ pub event: WorkspaceFoldersChangeEvent,
+}
+
+/// The workspace folder change event.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceFoldersChangeEvent {
+ /// The array of added workspace folders
+ pub added: Vec<WorkspaceFolder>,
+
+ /// The array of the removed workspace folders
+ pub removed: Vec<WorkspaceFolder>,
+}
diff --git a/vendor/lsp-types/src/workspace_symbols.rs b/vendor/lsp-types/src/workspace_symbols.rs
index 6b4143a54..8540bb46f 100644
--- a/vendor/lsp-types/src/workspace_symbols.rs
+++ b/vendor/lsp-types/src/workspace_symbols.rs
@@ -1,42 +1,106 @@
-use crate::{PartialResultParams, SymbolKindCapability, WorkDoneProgressParams};
-
-use crate::{SymbolTag, TagSupport};
-
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct WorkspaceSymbolClientCapabilities {
- /// This capability supports dynamic registration.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub dynamic_registration: Option<bool>,
-
- /// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub symbol_kind: Option<SymbolKindCapability>,
-
- /// The client supports tags on `SymbolInformation`.
- /// Clients supporting tags have to handle unknown tags gracefully.
- ///
- /// @since 3.16.0
- ///
- #[serde(
- default,
- skip_serializing_if = "Option::is_none",
- deserialize_with = "TagSupport::deserialize_compat"
- )]
- pub tag_support: Option<TagSupport<SymbolTag>>,
-}
-
-/// The parameters of a Workspace Symbol Request.
-#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
-pub struct WorkspaceSymbolParams {
- #[serde(flatten)]
- pub partial_result_params: PartialResultParams,
-
- #[serde(flatten)]
- pub work_done_progress_params: WorkDoneProgressParams,
-
- /// A non-empty query string
- pub query: String,
-}
+use crate::{
+ LSPAny, Location, OneOf, PartialResultParams, SymbolInformation, SymbolKind,
+ SymbolKindCapability, SymbolTag, TagSupport, Url, WorkDoneProgressParams,
+};
+
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceSymbolClientCapabilities {
+ /// This capability supports dynamic registration.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub dynamic_registration: Option<bool>,
+
+ /// Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub symbol_kind: Option<SymbolKindCapability>,
+
+ /// The client supports tags on `SymbolInformation`.
+ /// Clients supporting tags have to handle unknown tags gracefully.
+ ///
+ /// @since 3.16.0
+ ///
+ #[serde(
+ default,
+ skip_serializing_if = "Option::is_none",
+ deserialize_with = "TagSupport::deserialize_compat"
+ )]
+ pub tag_support: Option<TagSupport<SymbolTag>>,
+
+ /// The client support partial workspace symbols. The client will send the
+ /// request `workspaceSymbol/resolve` to the server to resolve additional
+ /// properties.
+ ///
+ /// @since 3.17.0
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub resolve_support: Option<WorkspaceSymbolResolveSupportCapability>,
+}
+
+/// The parameters of a Workspace Symbol Request.
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct WorkspaceSymbolParams {
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ /// A non-empty query string
+ pub query: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct WorkspaceSymbolResolveSupportCapability {
+ /// The properties that a client can resolve lazily. Usually
+ /// `location.range`
+ pub properties: Vec<String>,
+}
+
+/// A special workspace symbol that supports locations without a range
+///
+/// @since 3.17.0
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceSymbol {
+ /// The name of this symbol.
+ pub name: String,
+
+ /// The kind of this symbol.
+ pub kind: SymbolKind,
+
+ /// Tags for this completion item.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option<Vec<SymbolTag>>,
+
+ /// The name of the symbol containing this symbol. This information is for
+ /// user interface purposes (e.g. to render a qualifier in the user interface
+ /// if necessary). It can't be used to re-infer a hierarchy for the document
+ /// symbols.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub container_name: Option<String>,
+
+ /// The location of this symbol. Whether a server is allowed to
+ /// return a location without a range depends on the client
+ /// capability `workspace.symbol.resolveSupport`.
+ ///
+ /// See also `SymbolInformation.location`.
+ pub location: OneOf<Location, WorkspaceLocation>,
+
+ /// A data entry field that is preserved on a workspace symbol between a
+ /// workspace symbol request and a workspace symbol resolve request.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<LSPAny>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+pub struct WorkspaceLocation {
+ pub uri: Url,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
+#[serde(untagged)]
+pub enum WorkspaceSymbolResponse {
+ Flat(Vec<SymbolInformation>),
+ Nested(Vec<WorkspaceSymbol>),
+}
diff --git a/vendor/lsp-types/tests/lsif.rs b/vendor/lsp-types/tests/lsif.rs
index dd4a7230a..9f5c97798 100644
--- a/vendor/lsp-types/tests/lsif.rs
+++ b/vendor/lsp-types/tests/lsif.rs
@@ -1,16 +1,16 @@
-use lsp_types::lsif::Entry;
-
-#[test]
-#[cfg(unix)]
-fn run() {
- let jsonl = include_str!("tsc-unix.lsif");
- for json in jsonl.lines() {
- let r = serde_json::from_str::<Entry>(&json).expect(&format!("can not parse {}", json));
- let x = serde_json::to_string(&r).expect(&format!("can not serialize {}", json));
- assert_eq!(
- serde_json::from_str::<serde_json::Value>(&x).unwrap(),
- serde_json::from_str::<serde_json::Value>(json).unwrap(),
- "and strings:\ntheir: {}\n our: {}", json, x,
- );
- }
-}
+use lsp_types::lsif::Entry;
+
+#[test]
+#[cfg(unix)]
+fn run() {
+ let jsonl = include_str!("tsc-unix.lsif");
+ for json in jsonl.lines() {
+ let r = serde_json::from_str::<Entry>(&json).expect(&format!("can not parse {}", json));
+ let x = serde_json::to_string(&r).expect(&format!("can not serialize {}", json));
+ assert_eq!(
+ serde_json::from_str::<serde_json::Value>(&x).unwrap(),
+ serde_json::from_str::<serde_json::Value>(json).unwrap(),
+ "and strings:\ntheir: {}\n our: {}", json, x,
+ );
+ }
+}
diff --git a/vendor/lsp-types/tests/tsc-unix.lsif b/vendor/lsp-types/tests/tsc-unix.lsif
index 24c4dc046..27f6d5135 100644
--- a/vendor/lsp-types/tests/tsc-unix.lsif
+++ b/vendor/lsp-types/tests/tsc-unix.lsif
@@ -1,135 +1,135 @@
-{"id":1,"type":"vertex","label":"metaData","version":"0.4.3","projectRoot":"file:///media/hamid/nv1/garbage","positionEncoding":"utf-16","toolInfo":{"name":"lsif-tsc","args":["-p","."],"version":"0.7.2"}}
-{"id":2,"type":"vertex","label":"project","kind":"typescript"}
-{"id":3,"type":"vertex","label":"$event","kind":"begin","scope":"project","data":2}
-{"id":4,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/a.ts","languageId":"typescript"}
-{"id":5,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":4}
-{"id":6,"type":"vertex","label":"resultSet"}
-{"id":7,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"a:"}
-{"id":8,"type":"edge","label":"moniker","outV":6,"inV":7}
-{"id":9,"type":"vertex","label":"packageInformation","name":"garbage","manager":"npm","version":"1.0.0"}
-{"id":10,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:a:"}
-{"id":11,"type":"edge","label":"packageInformation","outV":10,"inV":9}
-{"id":12,"type":"edge","label":"nextMoniker","outV":7,"inV":10}
-{"id":13,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":0,"character":0},"tag":{"type":"definition","text":"","kind":7,"fullRange":{"start":{"line":0,"character":0},"end":{"line":3,"character":0}}}}
-{"id":14,"type":"edge","label":"next","outV":13,"inV":6}
-{"id":15,"type":"vertex","label":"resultSet"}
-{"id":16,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"a:a"}
-{"id":17,"type":"edge","label":"moniker","outV":15,"inV":16}
-{"id":18,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:a:a"}
-{"id":19,"type":"edge","label":"packageInformation","outV":18,"inV":9}
-{"id":20,"type":"edge","label":"nextMoniker","outV":16,"inV":18}
-{"id":21,"type":"vertex","label":"range","start":{"line":0,"character":13},"end":{"line":0,"character":14},"tag":{"type":"definition","text":"a","kind":7,"fullRange":{"start":{"line":0,"character":13},"end":{"line":2,"character":1}}}}
-{"id":22,"type":"edge","label":"next","outV":21,"inV":15}
-{"id":23,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"const a: (x: number, y: string) => string"}]}}
-{"id":24,"type":"edge","label":"textDocument/hover","outV":15,"inV":23}
-{"id":25,"type":"vertex","label":"resultSet"}
-{"id":26,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"dW50H+qRDGdPj+RYWpwLSg=="}
-{"id":27,"type":"edge","label":"moniker","outV":25,"inV":26}
-{"id":28,"type":"vertex","label":"range","start":{"line":0,"character":18},"end":{"line":0,"character":19},"tag":{"type":"definition","text":"x","kind":7,"fullRange":{"start":{"line":0,"character":18},"end":{"line":0,"character":27}}}}
-{"id":29,"type":"edge","label":"next","outV":28,"inV":25}
-{"id":30,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(parameter) x: number"}]}}
-{"id":31,"type":"edge","label":"textDocument/hover","outV":25,"inV":30}
-{"id":32,"type":"vertex","label":"resultSet"}
-{"id":33,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"W2CLXh2PW8+ib7xga5YF4A=="}
-{"id":34,"type":"edge","label":"moniker","outV":32,"inV":33}
-{"id":35,"type":"vertex","label":"range","start":{"line":0,"character":29},"end":{"line":0,"character":30},"tag":{"type":"definition","text":"y","kind":7,"fullRange":{"start":{"line":0,"character":29},"end":{"line":0,"character":38}}}}
-{"id":36,"type":"edge","label":"next","outV":35,"inV":32}
-{"id":37,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(parameter) y: string"}]}}
-{"id":38,"type":"edge","label":"textDocument/hover","outV":32,"inV":37}
-{"id":39,"type":"vertex","label":"resultSet"}
-{"id":40,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"WLUBvuhxsSADYrjTwQnBoA=="}
-{"id":41,"type":"edge","label":"moniker","outV":39,"inV":40}
-{"id":42,"type":"vertex","label":"range","start":{"line":0,"character":40},"end":{"line":0,"character":42},"tag":{"type":"reference","text":"=>"}}
-{"id":43,"type":"edge","label":"next","outV":42,"inV":39}
-{"id":44,"type":"vertex","label":"range","start":{"line":1,"character":18},"end":{"line":1,"character":19},"tag":{"type":"reference","text":"y"}}
-{"id":45,"type":"edge","label":"next","outV":44,"inV":32}
-{"id":46,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/node_modules/typescript-lsif/lib/lib.es2015.core.d.ts","languageId":"typescript"}
-{"id":47,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":46}
-{"id":48,"type":"vertex","label":"resultSet"}
-{"id":49,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"node_modules/typescript-lsif/lib/lib.es2015.core:String.repeat"}
-{"id":50,"type":"edge","label":"moniker","outV":48,"inV":49}
-{"id":51,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:node_modules/typescript-lsif/lib/lib.es2015.core:String.repeat"}
-{"id":52,"type":"edge","label":"packageInformation","outV":51,"inV":9}
-{"id":53,"type":"edge","label":"nextMoniker","outV":49,"inV":51}
-{"id":54,"type":"vertex","label":"range","start":{"line":446,"character":4},"end":{"line":446,"character":10},"tag":{"type":"definition","text":"repeat","kind":7,"fullRange":{"start":{"line":446,"character":4},"end":{"line":446,"character":34}}}}
-{"id":55,"type":"edge","label":"next","outV":54,"inV":48}
-{"id":56,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(method) String.repeat(count: number): string"},"Returns a String value that is made from count copies appended together. If count is 0,\nthe empty string is returned."]}}
-{"id":57,"type":"edge","label":"textDocument/hover","outV":48,"inV":56}
-{"id":58,"type":"vertex","label":"range","start":{"line":1,"character":20},"end":{"line":1,"character":26},"tag":{"type":"reference","text":"repeat"}}
-{"id":59,"type":"edge","label":"next","outV":58,"inV":48}
-{"id":60,"type":"vertex","label":"range","start":{"line":1,"character":27},"end":{"line":1,"character":28},"tag":{"type":"reference","text":"x"}}
-{"id":61,"type":"edge","label":"next","outV":60,"inV":25}
-{"id":62,"type":"vertex","label":"definitionResult"}
-{"id":63,"type":"edge","label":"textDocument/definition","outV":6,"inV":62}
-{"id":64,"type":"edge","label":"item","outV":62,"inVs":[13],"document":4}
-{"id":65,"type":"vertex","label":"referenceResult"}
-{"id":66,"type":"edge","label":"textDocument/references","outV":6,"inV":65}
-{"id":67,"type":"edge","label":"item","outV":65,"inVs":[13],"document":4,"property":"definitions"}
-{"id":68,"type":"vertex","label":"definitionResult"}
-{"id":69,"type":"edge","label":"textDocument/definition","outV":15,"inV":68}
-{"id":70,"type":"edge","label":"item","outV":68,"inVs":[21],"document":4}
-{"id":71,"type":"vertex","label":"referenceResult"}
-{"id":72,"type":"edge","label":"textDocument/references","outV":15,"inV":71}
-{"id":73,"type":"edge","label":"item","outV":71,"inVs":[21],"document":4,"property":"definitions"}
-{"id":74,"type":"vertex","label":"definitionResult"}
-{"id":75,"type":"edge","label":"textDocument/definition","outV":25,"inV":74}
-{"id":76,"type":"edge","label":"item","outV":74,"inVs":[28],"document":4}
-{"id":77,"type":"vertex","label":"referenceResult"}
-{"id":78,"type":"edge","label":"textDocument/references","outV":25,"inV":77}
-{"id":79,"type":"edge","label":"item","outV":77,"inVs":[28],"document":4,"property":"definitions"}
-{"id":80,"type":"edge","label":"item","outV":77,"inVs":[60],"document":4,"property":"references"}
-{"id":81,"type":"vertex","label":"definitionResult"}
-{"id":82,"type":"edge","label":"textDocument/definition","outV":32,"inV":81}
-{"id":83,"type":"edge","label":"item","outV":81,"inVs":[35],"document":4}
-{"id":84,"type":"vertex","label":"referenceResult"}
-{"id":85,"type":"edge","label":"textDocument/references","outV":32,"inV":84}
-{"id":86,"type":"edge","label":"item","outV":84,"inVs":[35],"document":4,"property":"definitions"}
-{"id":87,"type":"edge","label":"item","outV":84,"inVs":[44],"document":4,"property":"references"}
-{"id":88,"type":"vertex","label":"referenceResult"}
-{"id":89,"type":"edge","label":"textDocument/references","outV":39,"inV":88}
-{"id":90,"type":"edge","label":"item","outV":88,"inVs":[42],"document":4,"property":"references"}
-{"id":91,"type":"vertex","label":"referenceResult"}
-{"id":92,"type":"edge","label":"textDocument/references","outV":48,"inV":91}
-{"id":93,"type":"edge","label":"item","outV":91,"inVs":[58],"document":4,"property":"references"}
-{"id":94,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/b.ts","languageId":"typescript"}
-{"id":95,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":94}
-{"id":96,"type":"vertex","label":"resultSet"}
-{"id":97,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"b:"}
-{"id":98,"type":"edge","label":"moniker","outV":96,"inV":97}
-{"id":99,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:b:"}
-{"id":100,"type":"edge","label":"packageInformation","outV":99,"inV":9}
-{"id":101,"type":"edge","label":"nextMoniker","outV":97,"inV":99}
-{"id":102,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":0,"character":0},"tag":{"type":"definition","text":"","kind":7,"fullRange":{"start":{"line":0,"character":0},"end":{"line":3,"character":0}}}}
-{"id":103,"type":"edge","label":"next","outV":102,"inV":96}
-{"id":104,"type":"vertex","label":"resultSet"}
-{"id":105,"type":"edge","label":"next","outV":104,"inV":15}
-{"id":106,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"sNwThRvTZlfLwz6po1s2Zg=="}
-{"id":107,"type":"edge","label":"moniker","outV":104,"inV":106}
-{"id":108,"type":"vertex","label":"range","start":{"line":0,"character":9},"end":{"line":0,"character":10},"tag":{"type":"definition","text":"a","kind":7,"fullRange":{"start":{"line":0,"character":9},"end":{"line":0,"character":10}}}}
-{"id":109,"type":"edge","label":"next","outV":108,"inV":104}
-{"id":110,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(alias) const a: (x: number, y: string) => string\nimport a"}]}}
-{"id":111,"type":"edge","label":"textDocument/hover","outV":104,"inV":110}
-{"id":112,"type":"vertex","label":"range","start":{"line":0,"character":18},"end":{"line":0,"character":23},"tag":{"type":"reference","text":"\"./a\""}}
-{"id":113,"type":"edge","label":"next","outV":112,"inV":6}
-{"id":114,"type":"vertex","label":"range","start":{"line":2,"character":0},"end":{"line":2,"character":1},"tag":{"type":"reference","text":"a"}}
-{"id":115,"type":"edge","label":"next","outV":114,"inV":104}
-{"id":116,"type":"vertex","label":"definitionResult"}
-{"id":117,"type":"edge","label":"textDocument/definition","outV":96,"inV":116}
-{"id":118,"type":"edge","label":"item","outV":116,"inVs":[102],"document":94}
-{"id":119,"type":"vertex","label":"referenceResult"}
-{"id":120,"type":"edge","label":"textDocument/references","outV":96,"inV":119}
-{"id":121,"type":"edge","label":"item","outV":119,"inVs":[102],"document":94,"property":"definitions"}
-{"id":122,"type":"edge","label":"item","outV":71,"inVs":[108,114],"document":94,"property":"references"}
-{"id":123,"type":"edge","label":"item","outV":65,"inVs":[112],"document":94,"property":"references"}
-{"id":124,"type":"vertex","label":"definitionResult"}
-{"id":125,"type":"edge","label":"textDocument/definition","outV":48,"inV":124}
-{"id":126,"type":"edge","label":"item","outV":124,"inVs":[54],"document":46}
-{"id":127,"type":"edge","label":"item","outV":91,"inVs":[54],"document":46,"property":"definitions"}
-{"id":128,"type":"edge","label":"contains","outV":4,"inVs":[13,21,28,35,42,44,58,60]}
-{"id":129,"type":"vertex","label":"$event","kind":"end","scope":"document","data":4}
-{"id":130,"type":"edge","label":"contains","outV":46,"inVs":[54]}
-{"id":131,"type":"vertex","label":"$event","kind":"end","scope":"document","data":46}
-{"id":132,"type":"edge","label":"contains","outV":94,"inVs":[102,108,112,114]}
-{"id":133,"type":"vertex","label":"$event","kind":"end","scope":"document","data":94}
-{"id":134,"type":"edge","label":"contains","outV":2,"inVs":[4,46,94]}
-{"id":135,"type":"vertex","label":"$event","kind":"end","scope":"project","data":2}
+{"id":1,"type":"vertex","label":"metaData","version":"0.4.3","projectRoot":"file:///media/hamid/nv1/garbage","positionEncoding":"utf-16","toolInfo":{"name":"lsif-tsc","args":["-p","."],"version":"0.7.2"}}
+{"id":2,"type":"vertex","label":"project","kind":"typescript"}
+{"id":3,"type":"vertex","label":"$event","kind":"begin","scope":"project","data":2}
+{"id":4,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/a.ts","languageId":"typescript"}
+{"id":5,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":4}
+{"id":6,"type":"vertex","label":"resultSet"}
+{"id":7,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"a:"}
+{"id":8,"type":"edge","label":"moniker","outV":6,"inV":7}
+{"id":9,"type":"vertex","label":"packageInformation","name":"garbage","manager":"npm","version":"1.0.0"}
+{"id":10,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:a:"}
+{"id":11,"type":"edge","label":"packageInformation","outV":10,"inV":9}
+{"id":12,"type":"edge","label":"nextMoniker","outV":7,"inV":10}
+{"id":13,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":0,"character":0},"tag":{"type":"definition","text":"","kind":7,"fullRange":{"start":{"line":0,"character":0},"end":{"line":3,"character":0}}}}
+{"id":14,"type":"edge","label":"next","outV":13,"inV":6}
+{"id":15,"type":"vertex","label":"resultSet"}
+{"id":16,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"a:a"}
+{"id":17,"type":"edge","label":"moniker","outV":15,"inV":16}
+{"id":18,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:a:a"}
+{"id":19,"type":"edge","label":"packageInformation","outV":18,"inV":9}
+{"id":20,"type":"edge","label":"nextMoniker","outV":16,"inV":18}
+{"id":21,"type":"vertex","label":"range","start":{"line":0,"character":13},"end":{"line":0,"character":14},"tag":{"type":"definition","text":"a","kind":7,"fullRange":{"start":{"line":0,"character":13},"end":{"line":2,"character":1}}}}
+{"id":22,"type":"edge","label":"next","outV":21,"inV":15}
+{"id":23,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"const a: (x: number, y: string) => string"}]}}
+{"id":24,"type":"edge","label":"textDocument/hover","outV":15,"inV":23}
+{"id":25,"type":"vertex","label":"resultSet"}
+{"id":26,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"dW50H+qRDGdPj+RYWpwLSg=="}
+{"id":27,"type":"edge","label":"moniker","outV":25,"inV":26}
+{"id":28,"type":"vertex","label":"range","start":{"line":0,"character":18},"end":{"line":0,"character":19},"tag":{"type":"definition","text":"x","kind":7,"fullRange":{"start":{"line":0,"character":18},"end":{"line":0,"character":27}}}}
+{"id":29,"type":"edge","label":"next","outV":28,"inV":25}
+{"id":30,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(parameter) x: number"}]}}
+{"id":31,"type":"edge","label":"textDocument/hover","outV":25,"inV":30}
+{"id":32,"type":"vertex","label":"resultSet"}
+{"id":33,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"W2CLXh2PW8+ib7xga5YF4A=="}
+{"id":34,"type":"edge","label":"moniker","outV":32,"inV":33}
+{"id":35,"type":"vertex","label":"range","start":{"line":0,"character":29},"end":{"line":0,"character":30},"tag":{"type":"definition","text":"y","kind":7,"fullRange":{"start":{"line":0,"character":29},"end":{"line":0,"character":38}}}}
+{"id":36,"type":"edge","label":"next","outV":35,"inV":32}
+{"id":37,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(parameter) y: string"}]}}
+{"id":38,"type":"edge","label":"textDocument/hover","outV":32,"inV":37}
+{"id":39,"type":"vertex","label":"resultSet"}
+{"id":40,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"WLUBvuhxsSADYrjTwQnBoA=="}
+{"id":41,"type":"edge","label":"moniker","outV":39,"inV":40}
+{"id":42,"type":"vertex","label":"range","start":{"line":0,"character":40},"end":{"line":0,"character":42},"tag":{"type":"reference","text":"=>"}}
+{"id":43,"type":"edge","label":"next","outV":42,"inV":39}
+{"id":44,"type":"vertex","label":"range","start":{"line":1,"character":18},"end":{"line":1,"character":19},"tag":{"type":"reference","text":"y"}}
+{"id":45,"type":"edge","label":"next","outV":44,"inV":32}
+{"id":46,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/node_modules/typescript-lsif/lib/lib.es2015.core.d.ts","languageId":"typescript"}
+{"id":47,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":46}
+{"id":48,"type":"vertex","label":"resultSet"}
+{"id":49,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"node_modules/typescript-lsif/lib/lib.es2015.core:String.repeat"}
+{"id":50,"type":"edge","label":"moniker","outV":48,"inV":49}
+{"id":51,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:node_modules/typescript-lsif/lib/lib.es2015.core:String.repeat"}
+{"id":52,"type":"edge","label":"packageInformation","outV":51,"inV":9}
+{"id":53,"type":"edge","label":"nextMoniker","outV":49,"inV":51}
+{"id":54,"type":"vertex","label":"range","start":{"line":446,"character":4},"end":{"line":446,"character":10},"tag":{"type":"definition","text":"repeat","kind":7,"fullRange":{"start":{"line":446,"character":4},"end":{"line":446,"character":34}}}}
+{"id":55,"type":"edge","label":"next","outV":54,"inV":48}
+{"id":56,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(method) String.repeat(count: number): string"},"Returns a String value that is made from count copies appended together. If count is 0,\nthe empty string is returned."]}}
+{"id":57,"type":"edge","label":"textDocument/hover","outV":48,"inV":56}
+{"id":58,"type":"vertex","label":"range","start":{"line":1,"character":20},"end":{"line":1,"character":26},"tag":{"type":"reference","text":"repeat"}}
+{"id":59,"type":"edge","label":"next","outV":58,"inV":48}
+{"id":60,"type":"vertex","label":"range","start":{"line":1,"character":27},"end":{"line":1,"character":28},"tag":{"type":"reference","text":"x"}}
+{"id":61,"type":"edge","label":"next","outV":60,"inV":25}
+{"id":62,"type":"vertex","label":"definitionResult"}
+{"id":63,"type":"edge","label":"textDocument/definition","outV":6,"inV":62}
+{"id":64,"type":"edge","label":"item","outV":62,"inVs":[13],"document":4}
+{"id":65,"type":"vertex","label":"referenceResult"}
+{"id":66,"type":"edge","label":"textDocument/references","outV":6,"inV":65}
+{"id":67,"type":"edge","label":"item","outV":65,"inVs":[13],"document":4,"property":"definitions"}
+{"id":68,"type":"vertex","label":"definitionResult"}
+{"id":69,"type":"edge","label":"textDocument/definition","outV":15,"inV":68}
+{"id":70,"type":"edge","label":"item","outV":68,"inVs":[21],"document":4}
+{"id":71,"type":"vertex","label":"referenceResult"}
+{"id":72,"type":"edge","label":"textDocument/references","outV":15,"inV":71}
+{"id":73,"type":"edge","label":"item","outV":71,"inVs":[21],"document":4,"property":"definitions"}
+{"id":74,"type":"vertex","label":"definitionResult"}
+{"id":75,"type":"edge","label":"textDocument/definition","outV":25,"inV":74}
+{"id":76,"type":"edge","label":"item","outV":74,"inVs":[28],"document":4}
+{"id":77,"type":"vertex","label":"referenceResult"}
+{"id":78,"type":"edge","label":"textDocument/references","outV":25,"inV":77}
+{"id":79,"type":"edge","label":"item","outV":77,"inVs":[28],"document":4,"property":"definitions"}
+{"id":80,"type":"edge","label":"item","outV":77,"inVs":[60],"document":4,"property":"references"}
+{"id":81,"type":"vertex","label":"definitionResult"}
+{"id":82,"type":"edge","label":"textDocument/definition","outV":32,"inV":81}
+{"id":83,"type":"edge","label":"item","outV":81,"inVs":[35],"document":4}
+{"id":84,"type":"vertex","label":"referenceResult"}
+{"id":85,"type":"edge","label":"textDocument/references","outV":32,"inV":84}
+{"id":86,"type":"edge","label":"item","outV":84,"inVs":[35],"document":4,"property":"definitions"}
+{"id":87,"type":"edge","label":"item","outV":84,"inVs":[44],"document":4,"property":"references"}
+{"id":88,"type":"vertex","label":"referenceResult"}
+{"id":89,"type":"edge","label":"textDocument/references","outV":39,"inV":88}
+{"id":90,"type":"edge","label":"item","outV":88,"inVs":[42],"document":4,"property":"references"}
+{"id":91,"type":"vertex","label":"referenceResult"}
+{"id":92,"type":"edge","label":"textDocument/references","outV":48,"inV":91}
+{"id":93,"type":"edge","label":"item","outV":91,"inVs":[58],"document":4,"property":"references"}
+{"id":94,"type":"vertex","label":"document","uri":"file:///media/hamid/nv1/garbage/b.ts","languageId":"typescript"}
+{"id":95,"type":"vertex","label":"$event","kind":"begin","scope":"document","data":94}
+{"id":96,"type":"vertex","label":"resultSet"}
+{"id":97,"type":"vertex","label":"moniker","kind":"export","unique":"document","scheme":"tsc","identifier":"b:"}
+{"id":98,"type":"edge","label":"moniker","outV":96,"inV":97}
+{"id":99,"type":"vertex","label":"moniker","kind":"export","unique":"global","scheme":"npm","identifier":"garbage:b:"}
+{"id":100,"type":"edge","label":"packageInformation","outV":99,"inV":9}
+{"id":101,"type":"edge","label":"nextMoniker","outV":97,"inV":99}
+{"id":102,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":0,"character":0},"tag":{"type":"definition","text":"","kind":7,"fullRange":{"start":{"line":0,"character":0},"end":{"line":3,"character":0}}}}
+{"id":103,"type":"edge","label":"next","outV":102,"inV":96}
+{"id":104,"type":"vertex","label":"resultSet"}
+{"id":105,"type":"edge","label":"next","outV":104,"inV":15}
+{"id":106,"type":"vertex","label":"moniker","kind":"local","unique":"document","scheme":"tsc","identifier":"sNwThRvTZlfLwz6po1s2Zg=="}
+{"id":107,"type":"edge","label":"moniker","outV":104,"inV":106}
+{"id":108,"type":"vertex","label":"range","start":{"line":0,"character":9},"end":{"line":0,"character":10},"tag":{"type":"definition","text":"a","kind":7,"fullRange":{"start":{"line":0,"character":9},"end":{"line":0,"character":10}}}}
+{"id":109,"type":"edge","label":"next","outV":108,"inV":104}
+{"id":110,"type":"vertex","label":"hoverResult","result":{"contents":[{"language":"typescript","value":"(alias) const a: (x: number, y: string) => string\nimport a"}]}}
+{"id":111,"type":"edge","label":"textDocument/hover","outV":104,"inV":110}
+{"id":112,"type":"vertex","label":"range","start":{"line":0,"character":18},"end":{"line":0,"character":23},"tag":{"type":"reference","text":"\"./a\""}}
+{"id":113,"type":"edge","label":"next","outV":112,"inV":6}
+{"id":114,"type":"vertex","label":"range","start":{"line":2,"character":0},"end":{"line":2,"character":1},"tag":{"type":"reference","text":"a"}}
+{"id":115,"type":"edge","label":"next","outV":114,"inV":104}
+{"id":116,"type":"vertex","label":"definitionResult"}
+{"id":117,"type":"edge","label":"textDocument/definition","outV":96,"inV":116}
+{"id":118,"type":"edge","label":"item","outV":116,"inVs":[102],"document":94}
+{"id":119,"type":"vertex","label":"referenceResult"}
+{"id":120,"type":"edge","label":"textDocument/references","outV":96,"inV":119}
+{"id":121,"type":"edge","label":"item","outV":119,"inVs":[102],"document":94,"property":"definitions"}
+{"id":122,"type":"edge","label":"item","outV":71,"inVs":[108,114],"document":94,"property":"references"}
+{"id":123,"type":"edge","label":"item","outV":65,"inVs":[112],"document":94,"property":"references"}
+{"id":124,"type":"vertex","label":"definitionResult"}
+{"id":125,"type":"edge","label":"textDocument/definition","outV":48,"inV":124}
+{"id":126,"type":"edge","label":"item","outV":124,"inVs":[54],"document":46}
+{"id":127,"type":"edge","label":"item","outV":91,"inVs":[54],"document":46,"property":"definitions"}
+{"id":128,"type":"edge","label":"contains","outV":4,"inVs":[13,21,28,35,42,44,58,60]}
+{"id":129,"type":"vertex","label":"$event","kind":"end","scope":"document","data":4}
+{"id":130,"type":"edge","label":"contains","outV":46,"inVs":[54]}
+{"id":131,"type":"vertex","label":"$event","kind":"end","scope":"document","data":46}
+{"id":132,"type":"edge","label":"contains","outV":94,"inVs":[102,108,112,114]}
+{"id":133,"type":"vertex","label":"$event","kind":"end","scope":"document","data":94}
+{"id":134,"type":"edge","label":"contains","outV":2,"inVs":[4,46,94]}
+{"id":135,"type":"vertex","label":"$event","kind":"end","scope":"project","data":2}
diff --git a/vendor/memmap2/.cargo-checksum.json b/vendor/memmap2/.cargo-checksum.json
index c189c8a9c..656d284c6 100644
--- a/vendor/memmap2/.cargo-checksum.json
+++ b/vendor/memmap2/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"fb280b4d71ea601557e7ab637a7f82f73eb519dd9d16d38684bf6fe4e800a020","Cargo.lock":"80c9c7be560eee59e3eded68a49b53278198fc23495807c6a22e9c5190914ebe","Cargo.toml":"4738a7e9e1f236b95d41ef8837c351f895fcb1cc1cb82ab7e3bf39c72849cba4","LICENSE-APACHE":"04ea4849dba9dcae07113850c6f1b1a69052c625210639914eee352023f750ad","LICENSE-MIT":"0d25d03b5ab49576178ad0cae7a2648d12c17ad0452fe49c07e55e4b59aa5257","README.md":"c7b3cd928f0d1a10faa255e2f84a2a06636e55ea3e7edd4f6334dd9215151205","examples/cat.rs":"ab0b575d19662e2d5b6c7cea2756b57530e495d56acdb4fd2b56c0ba4d768dfd","src/advice.rs":"194bfd6a32495f6b0c739d083b06230ae656927767f15c1b49b245b63431cc4d","src/lib.rs":"ea214bce1c2409b8ee7a390c85f5114bc681a551d02aba23280030fe58588eb9","src/stub.rs":"f276bb5e4bc29c2129ebc660b01a1de173b9575e2e866ea5a34e0ee6318f1177","src/unix.rs":"03fe91a320d0146993019ea51e486275b8c8e13e42a995e649b8c76690e3f167","src/windows.rs":"bbb39200ac35b5517626c12efad4886f7b5d34e56256284914c556dec1567e38"},"package":"95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"2c44b332748c22db37c7bc17f8c90edd5616c084a9b7238eef5067f3dd287813","Cargo.lock":"ed33b2c7bd22c158bd527fefa4079e62a3e98a63c2b653ba50d977a8a43f92c2","Cargo.toml":"cad69c38d627420a71d549945b9bbc4b21323b2855d63e3b98d344d2fe3ea07e","LICENSE-APACHE":"04ea4849dba9dcae07113850c6f1b1a69052c625210639914eee352023f750ad","LICENSE-MIT":"0d25d03b5ab49576178ad0cae7a2648d12c17ad0452fe49c07e55e4b59aa5257","README.md":"c7b3cd928f0d1a10faa255e2f84a2a06636e55ea3e7edd4f6334dd9215151205","examples/cat.rs":"ab0b575d19662e2d5b6c7cea2756b57530e495d56acdb4fd2b56c0ba4d768dfd","src/advice.rs":"194bfd6a32495f6b0c739d083b06230ae656927767f15c1b49b245b63431cc4d","src/lib.rs":"4f345df103c78344e07454da7b8f3941af094efd599e3d51a29ea735fab19dba","src/stub.rs":"f276bb5e4bc29c2129ebc660b01a1de173b9575e2e866ea5a34e0ee6318f1177","src/unix.rs":"03fe91a320d0146993019ea51e486275b8c8e13e42a995e649b8c76690e3f167","src/windows.rs":"bbb39200ac35b5517626c12efad4886f7b5d34e56256284914c556dec1567e38"},"package":"4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"} \ No newline at end of file
diff --git a/vendor/memmap2/CHANGELOG.md b/vendor/memmap2/CHANGELOG.md
index 14f481c4e..07b4eb270 100644
--- a/vendor/memmap2/CHANGELOG.md
+++ b/vendor/memmap2/CHANGELOG.md
@@ -6,6 +6,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
## [Unreleased]
+## [0.5.8] - 2022-11-09
+### Added
+- `MmapRaw::advise`, `MmapRaw::lock` and `MmapRaw::unlock`.
+ [@diwic](https://github.com/diwic)
+- Improve `MmapMut::make_exec` documentation.
+
## [0.5.7] - 2022-08-15
### Changed
- Simplify file size retrieving code.
@@ -117,7 +123,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
### Removed
- `winapi` dependency. [memmap-rs/pull/89](https://github.com/danburkert/memmap-rs/pull/89)
-[Unreleased]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.7...HEAD
+[Unreleased]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.8...HEAD
+[0.5.8]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.7...v0.5.8
[0.5.7]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.6...v0.5.7
[0.5.6]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.5...v0.5.6
[0.5.5]: https://github.com/RazrFalcon/memmap2-rs/compare/v0.5.4...v0.5.5
diff --git a/vendor/memmap2/Cargo.lock b/vendor/memmap2/Cargo.lock
index 5a4a2b5f2..f5fe93bb2 100644
--- a/vendor/memmap2/Cargo.lock
+++ b/vendor/memmap2/Cargo.lock
@@ -40,7 +40,7 @@ checksum = "a7f823d141fe0a24df1e23b4af4e3c7ba9e5966ec514ea068c93024aa7deb765"
[[package]]
name = "memmap2"
-version = "0.5.7"
+version = "0.5.8"
dependencies = [
"libc",
"owning_ref",
diff --git a/vendor/memmap2/Cargo.toml b/vendor/memmap2/Cargo.toml
index 29d796270..9c3575f9b 100644
--- a/vendor/memmap2/Cargo.toml
+++ b/vendor/memmap2/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "memmap2"
-version = "0.5.7"
+version = "0.5.8"
authors = [
"Dan Burkert <dan@danburkert.com>",
"Yevhenii Reizner <razrfalcon@gmail.com>",
@@ -26,7 +26,7 @@ keywords = [
"io",
"file",
]
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
repository = "https://github.com/RazrFalcon/memmap2-rs"
[dependencies.stable_deref_trait]
diff --git a/vendor/memmap2/src/lib.rs b/vendor/memmap2/src/lib.rs
index 0b92ea0e1..58df7589b 100644
--- a/vendor/memmap2/src/lib.rs
+++ b/vendor/memmap2/src/lib.rs
@@ -800,6 +800,30 @@ impl MmapRaw {
pub fn flush_async_range(&self, offset: usize, len: usize) -> Result<()> {
self.inner.flush_async(offset, len)
}
+
+ /// Advise OS how this memory map will be accessed. Only supported on Unix.
+ ///
+ /// See [madvise()](https://man7.org/linux/man-pages/man2/madvise.2.html) map page.
+ #[cfg(unix)]
+ pub fn advise(&self, advice: Advice) -> Result<()> {
+ self.inner.advise(advice)
+ }
+
+ /// Lock the whole memory map into RAM. Only supported on Unix.
+ ///
+ /// See [mlock()](https://man7.org/linux/man-pages/man2/mlock.2.html) map page.
+ #[cfg(unix)]
+ pub fn lock(&mut self) -> Result<()> {
+ self.inner.lock()
+ }
+
+ /// Unlock the whole memory map. Only supported on Unix.
+ ///
+ /// See [munlock()](https://man7.org/linux/man-pages/man2/munlock.2.html) map page.
+ #[cfg(unix)]
+ pub fn unlock(&mut self) -> Result<()> {
+ self.inner.unlock()
+ }
}
impl fmt::Debug for MmapRaw {
@@ -1006,6 +1030,12 @@ impl MmapMut {
///
/// If the memory map is file-backed, the file must have been opened with execute permissions.
///
+ /// On systems with separate instructions and data caches (a category that includes many ARM
+ /// chips), a platform-specific call may be needed to ensure that the changes are visible to the
+ /// execution unit (e.g. when using this function to implement a JIT compiler). For more
+ /// details, see [this ARM write-up](https://community.arm.com/arm-community-blogs/b/architectures-and-processors-blog/posts/caches-and-self-modifying-code)
+ /// or the `man` page for [`sys_icache_invalidate`](https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/sys_icache_invalidate.3.html).
+ ///
/// # Errors
///
/// This method returns an error when the underlying system call fails, which can happen for a
@@ -1088,7 +1118,7 @@ mod test {
#[cfg(unix)]
use crate::advice::Advice;
- use std::fs::{self, OpenOptions};
+ use std::fs::OpenOptions;
use std::io::{Read, Write};
#[cfg(unix)]
use std::os::unix::io::AsRawFd;
@@ -1622,7 +1652,7 @@ mod test {
/// Returns true if a non-zero amount of memory is locked.
#[cfg(target_os = "linux")]
fn is_locked() -> bool {
- let status = &fs::read_to_string("/proc/self/status")
+ let status = &std::fs::read_to_string("/proc/self/status")
.expect("/proc/self/status should be available");
for line in status.lines() {
if line.starts_with("VmLck:") {
diff --git a/vendor/miniz_oxide-0.5.3/.cargo-checksum.json b/vendor/miniz_oxide-0.5.3/.cargo-checksum.json
new file mode 100644
index 000000000..a30f6d5ad
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"bdec209313b683bf315ff5134fde0322e46a27ae8ec9de303ffa22cc67826365","LICENSE":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-APACHE.md":"0d542e0c8804e39aa7f37eb00da5a762149dc682d7829451287e11b938e94594","LICENSE-MIT.md":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-ZLIB.md":"c89bcc058da12a0fb24402b8ea4542a21515dd1da2e8c67bba4ed9bd269f1c96","Readme.md":"b6a6668b073a3356748b642ce51b31233b6408ffcca3e52801ef473a9f7925c7","src/deflate/buffer.rs":"76bcca4e79bef412eeebdd06d2d0a4348ed9ee17edbdaa6d451d8bf03b1cde85","src/deflate/core.rs":"8087c155cb47f57a9747565857dcef59fff0a7a499abbfdb0c60e694d3234db8","src/deflate/mod.rs":"8ade5b9683b8d728fe5e8f5c23e0630165bfdbef3e56a18b1b729f9bbd4a4b1d","src/deflate/stream.rs":"016c82b09a989492c8c8ea89027d339fcf59a5ca2155e7026ac094ca74344712","src/inflate/core.rs":"49bd596d5255ac88b486f6f978ab7b26663cdab01a6ebaa41bf4559f12b0fed8","src/inflate/mod.rs":"8b65692f1bb71b4973df8da7ca9ffc8c4e4e439f6b5993e16a96d20dc3a08f52","src/inflate/output_buffer.rs":"1ae90d03ba8c9d667fe248b6066731774afdf93cc79cd3bf90e0711b963b0b72","src/inflate/stream.rs":"f82c44ffdff054aff05307ed5709e432b54d5997bb4bbfff8f760171c33c76c3","src/lib.rs":"a9d6a889415ffe3d800c8516fb0ac0bae3585010966d1fdf3b06a85330c36854","src/shared.rs":"a8c47fcb566591e39fcd50d44f3b4d0f567318b8ca36c8d732ee0d8c99a14906"},"package":"6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"} \ No newline at end of file
diff --git a/vendor/miniz_oxide-0.5.3/Cargo.toml b/vendor/miniz_oxide-0.5.3/Cargo.toml
new file mode 100644
index 000000000..7546128ce
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/Cargo.toml
@@ -0,0 +1,55 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "miniz_oxide"
+version = "0.5.3"
+authors = ["Frommi <daniil.liferenko@gmail.com>", "oyvindln <oyvindln@users.noreply.github.com>"]
+exclude = ["benches/*", "tests/*"]
+description = "DEFLATE compression and decompression library rewritten in Rust based on miniz"
+homepage = "https://github.com/Frommi/miniz_oxide/tree/master/miniz_oxide"
+documentation = "https://docs.rs/miniz_oxide"
+readme = "Readme.md"
+keywords = ["zlib", "miniz", "deflate", "encoding"]
+categories = ["compression"]
+license = "MIT OR Zlib OR Apache-2.0"
+repository = "https://github.com/Frommi/miniz_oxide/tree/master/miniz_oxide"
+
+[lib]
+name = "miniz_oxide"
+[dependencies.adler]
+version = "1.0"
+default-features = false
+
+[dependencies.alloc]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-alloc"
+
+[dependencies.compiler_builtins]
+version = "0.1.2"
+optional = true
+
+[dependencies.core]
+version = "1.0.0"
+optional = true
+package = "rustc-std-workspace-core"
+
+[dependencies.simd-adler32]
+version = "0.3"
+optional = true
+default-features = false
+
+[features]
+default = []
+rustc-dep-of-std = ["core", "alloc", "compiler_builtins", "adler/rustc-dep-of-std"]
+simd = ["simd-adler32"]
diff --git a/vendor/time-macros/LICENSE-MIT b/vendor/miniz_oxide-0.5.3/LICENSE
index a11a75573..64c53792c 100644
--- a/vendor/time-macros/LICENSE-MIT
+++ b/vendor/miniz_oxide-0.5.3/LICENSE
@@ -1,4 +1,6 @@
-Copyright (c) 2022 Jacob Pratt et al.
+MIT License
+
+Copyright (c) 2017 Frommi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/time-macros/LICENSE-Apache b/vendor/miniz_oxide-0.5.3/LICENSE-APACHE.md
index 7646f21e3..f433b1a53 100644
--- a/vendor/time-macros/LICENSE-Apache
+++ b/vendor/miniz_oxide-0.5.3/LICENSE-APACHE.md
@@ -175,28 +175,3 @@
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2022 Jacob Pratt et al.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/miniz_oxide-0.5.3/LICENSE-MIT.md b/vendor/miniz_oxide-0.5.3/LICENSE-MIT.md
new file mode 100644
index 000000000..64c53792c
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/LICENSE-MIT.md
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Frommi
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/miniz_oxide-0.5.3/LICENSE-ZLIB.md b/vendor/miniz_oxide-0.5.3/LICENSE-ZLIB.md
new file mode 100644
index 000000000..7f513d1ac
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/LICENSE-ZLIB.md
@@ -0,0 +1,11 @@
+Copyright (c) 2020 Frommi
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
diff --git a/vendor/miniz_oxide-0.5.3/Readme.md b/vendor/miniz_oxide-0.5.3/Readme.md
new file mode 100644
index 000000000..0eac176e8
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/Readme.md
@@ -0,0 +1,35 @@
+# miniz_oxide
+
+A fully safe, pure rust replacement for the [miniz](https://github.com/richgel999/miniz) DEFLATE/zlib encoder/decoder.
+The main intention of this crate is to be used as a back-end for the [flate2](https://github.com/alexcrichton/flate2-rs), but it can also be used on it's own. Using flate2 with the ```rust_backend``` feature provides an easy to use streaming API for miniz_oxide.
+
+The library is fully [no_std](https://docs.rust-embedded.org/book/intro/no-std.html), though it requires the use of the `alloc` and `collection` crates as it allocates memory.
+
+miniz_oxide 0.5.x Requires at least rust 1.40.0 0.3.x requires at least rust 0.36.0.
+
+miniz_oxide features no use of unsafe code.
+
+miniz_oxide can optionally be made to use a simd-accelerated version of adler32 via the [simd-adler32](https://crates.io/crates/simd-adler32) crate by enabling the 'simd' feature. This is not enabled by default as due to the use of simd intrinsics, the simd-adler32 has to use unsafe. The default setup uses the [adler](https://crates.io/crates/adler) crate which features no unsafe code.
+
+## Usage
+Simple compression/decompression:
+```rust
+
+use miniz_oxide::deflate::compress_to_vec;
+use miniz_oxide::inflate::decompress_to_vec;
+
+fn roundtrip(data: &[u8]) {
+ // Compress the input
+ let compressed = compress_to_vec(data, 6);
+ // Decompress the compressed input
+ let decompressed = decompress_to_vec(compressed.as_slice()).expect("Failed to decompress!");
+ // Check roundtrip succeeded
+ assert_eq!(data, decompressed);
+}
+
+fn main() {
+ roundtrip("Hello, world!".as_bytes());
+}
+
+```
+These simple functions will do everything in one go and are thus not recommended for use cases where the input size may be large or unknown, for that use case consider using miniz_oxide via flate2 or the low-level streaming functions instead.
diff --git a/vendor/miniz_oxide-0.5.3/src/deflate/buffer.rs b/vendor/miniz_oxide-0.5.3/src/deflate/buffer.rs
new file mode 100644
index 000000000..f246c07df
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/deflate/buffer.rs
@@ -0,0 +1,58 @@
+//! Buffer wrappers implementing default so we can allocate the buffers with `Box::default()`
+//! to avoid stack copies. Box::new() doesn't at the moment, and using a vec means we would lose
+//! static length info.
+
+use crate::deflate::core::{LZ_DICT_SIZE, MAX_MATCH_LEN};
+
+/// Size of the buffer of lz77 encoded data.
+pub const LZ_CODE_BUF_SIZE: usize = 64 * 1024;
+/// Size of the output buffer.
+pub const OUT_BUF_SIZE: usize = (LZ_CODE_BUF_SIZE * 13) / 10;
+pub const LZ_DICT_FULL_SIZE: usize = LZ_DICT_SIZE + MAX_MATCH_LEN - 1 + 1;
+
+/// Size of hash values in the hash chains.
+pub const LZ_HASH_BITS: i32 = 15;
+/// How many bits to shift when updating the current hash value.
+pub const LZ_HASH_SHIFT: i32 = (LZ_HASH_BITS + 2) / 3;
+/// Size of the chained hash tables.
+pub const LZ_HASH_SIZE: usize = 1 << LZ_HASH_BITS;
+
+#[inline]
+pub fn update_hash(current_hash: u16, byte: u8) -> u16 {
+ ((current_hash << LZ_HASH_SHIFT) ^ u16::from(byte)) & (LZ_HASH_SIZE as u16 - 1)
+}
+
+pub struct HashBuffers {
+ pub dict: [u8; LZ_DICT_FULL_SIZE],
+ pub next: [u16; LZ_DICT_SIZE],
+ pub hash: [u16; LZ_DICT_SIZE],
+}
+
+impl HashBuffers {
+ #[inline]
+ pub fn reset(&mut self) {
+ *self = HashBuffers::default();
+ }
+}
+
+impl Default for HashBuffers {
+ fn default() -> HashBuffers {
+ HashBuffers {
+ dict: [0; LZ_DICT_FULL_SIZE],
+ next: [0; LZ_DICT_SIZE],
+ hash: [0; LZ_DICT_SIZE],
+ }
+ }
+}
+
+pub struct LocalBuf {
+ pub b: [u8; OUT_BUF_SIZE],
+}
+
+impl Default for LocalBuf {
+ fn default() -> LocalBuf {
+ LocalBuf {
+ b: [0; OUT_BUF_SIZE],
+ }
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/deflate/core.rs b/vendor/miniz_oxide-0.5.3/src/deflate/core.rs
new file mode 100644
index 000000000..91a9bf8b8
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/deflate/core.rs
@@ -0,0 +1,2463 @@
+//! Streaming compression functionality.
+
+use alloc::boxed::Box;
+use core::convert::TryInto;
+use core::{cmp, mem};
+
+use super::super::*;
+use super::deflate_flags::*;
+use super::CompressionLevel;
+use crate::deflate::buffer::{
+ update_hash, HashBuffers, LocalBuf, LZ_CODE_BUF_SIZE, LZ_DICT_FULL_SIZE, LZ_HASH_BITS,
+ LZ_HASH_SHIFT, LZ_HASH_SIZE, OUT_BUF_SIZE,
+};
+use crate::shared::{update_adler32, HUFFMAN_LENGTH_ORDER, MZ_ADLER32_INIT};
+use crate::DataFormat;
+
+// Currently not bubbled up outside this module, so can fill in with more
+// context eventually if needed.
+type Result<T, E = Error> = core::result::Result<T, E>;
+struct Error {}
+
+const MAX_PROBES_MASK: i32 = 0xFFF;
+
+const MAX_SUPPORTED_HUFF_CODESIZE: usize = 32;
+
+/// Length code for length values.
+#[rustfmt::skip]
+const LEN_SYM: [u16; 256] = [
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 265, 266, 266, 267, 267, 268, 268,
+ 269, 269, 269, 269, 270, 270, 270, 270, 271, 271, 271, 271, 272, 272, 272, 272,
+ 273, 273, 273, 273, 273, 273, 273, 273, 274, 274, 274, 274, 274, 274, 274, 274,
+ 275, 275, 275, 275, 275, 275, 275, 275, 276, 276, 276, 276, 276, 276, 276, 276,
+ 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
+ 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278,
+ 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279,
+ 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280,
+ 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281,
+ 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281,
+ 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
+ 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
+ 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283,
+ 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283,
+ 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284,
+ 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 285
+];
+
+/// Number of extra bits for length values.
+#[rustfmt::skip]
+const LEN_EXTRA: [u8; 256] = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0
+];
+
+/// Distance codes for distances smaller than 512.
+#[rustfmt::skip]
+const SMALL_DIST_SYM: [u8; 512] = [
+ 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
+ 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17
+];
+
+/// Number of extra bits for distances smaller than 512.
+#[rustfmt::skip]
+const SMALL_DIST_EXTRA: [u8; 512] = [
+ 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
+];
+
+/// Base values to calculate distances above 512.
+#[rustfmt::skip]
+const LARGE_DIST_SYM: [u8; 128] = [
+ 0, 0, 18, 19, 20, 20, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23,
+ 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25,
+ 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
+ 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
+ 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
+ 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
+ 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
+ 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
+];
+
+/// Number of extra bits distances above 512.
+#[rustfmt::skip]
+const LARGE_DIST_EXTRA: [u8; 128] = [
+ 0, 0, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
+ 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13
+];
+
+#[rustfmt::skip]
+const BITMASKS: [u32; 17] = [
+ 0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF,
+ 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF
+];
+
+/// The maximum number of checks for matches in the hash table the compressor will make for each
+/// compression level.
+const NUM_PROBES: [u32; 11] = [0, 1, 6, 32, 16, 32, 128, 256, 512, 768, 1500];
+
+#[derive(Copy, Clone)]
+struct SymFreq {
+ key: u16,
+ sym_index: u16,
+}
+
+pub mod deflate_flags {
+ /// Whether to use a zlib wrapper.
+ pub const TDEFL_WRITE_ZLIB_HEADER: u32 = 0x0000_1000;
+ /// Should we compute the adler32 checksum.
+ pub const TDEFL_COMPUTE_ADLER32: u32 = 0x0000_2000;
+ /// Should we use greedy parsing (as opposed to lazy parsing where look ahead one or more
+ /// bytes to check for better matches.)
+ pub const TDEFL_GREEDY_PARSING_FLAG: u32 = 0x0000_4000;
+ /// Used in miniz to skip zero-initializing hash and dict. We don't do this here, so
+ /// this flag is ignored.
+ pub const TDEFL_NONDETERMINISTIC_PARSING_FLAG: u32 = 0x0000_8000;
+ /// Only look for matches with a distance of 0.
+ pub const TDEFL_RLE_MATCHES: u32 = 0x0001_0000;
+ /// Only use matches that are at least 6 bytes long.
+ pub const TDEFL_FILTER_MATCHES: u32 = 0x0002_0000;
+ /// Force the compressor to only output static blocks. (Blocks using the default huffman codes
+ /// specified in the deflate specification.)
+ pub const TDEFL_FORCE_ALL_STATIC_BLOCKS: u32 = 0x0004_0000;
+ /// Force the compressor to only output raw/uncompressed blocks.
+ pub const TDEFL_FORCE_ALL_RAW_BLOCKS: u32 = 0x0008_0000;
+}
+
+/// Strategy setting for compression.
+///
+/// The non-default settings offer some special-case compression variants.
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum CompressionStrategy {
+ /// Don't use any of the special strategies.
+ Default = 0,
+ /// Only use matches that are at least 5 bytes long.
+ Filtered = 1,
+ /// Don't look for matches, only huffman encode the literals.
+ HuffmanOnly = 2,
+ /// Only look for matches with a distance of 1, i.e do run-length encoding only.
+ RLE = 3,
+ /// Only use static/fixed blocks. (Blocks using the default huffman codes
+ /// specified in the deflate specification.)
+ Fixed = 4,
+}
+
+/// A list of deflate flush types.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TDEFLFlush {
+ /// Normal operation.
+ ///
+ /// Compress as much as there is space for, and then return waiting for more input.
+ None = 0,
+
+ /// Try to flush all the current data and output an empty raw block.
+ Sync = 2,
+
+ /// Same as [`Sync`][Self::Sync], but reset the dictionary so that the following data does not
+ /// depend on previous data.
+ Full = 3,
+
+ /// Try to flush everything and end the deflate stream.
+ ///
+ /// On success this will yield a [`TDEFLStatus::Done`] return status.
+ Finish = 4,
+}
+
+impl From<MZFlush> for TDEFLFlush {
+ fn from(flush: MZFlush) -> Self {
+ match flush {
+ MZFlush::None => TDEFLFlush::None,
+ MZFlush::Sync => TDEFLFlush::Sync,
+ MZFlush::Full => TDEFLFlush::Full,
+ MZFlush::Finish => TDEFLFlush::Finish,
+ _ => TDEFLFlush::None, // TODO: ??? What to do ???
+ }
+ }
+}
+
+impl TDEFLFlush {
+ pub fn new(flush: i32) -> Result<Self, MZError> {
+ match flush {
+ 0 => Ok(TDEFLFlush::None),
+ 2 => Ok(TDEFLFlush::Sync),
+ 3 => Ok(TDEFLFlush::Full),
+ 4 => Ok(TDEFLFlush::Finish),
+ _ => Err(MZError::Param),
+ }
+ }
+}
+
+/// Return status of compression.
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TDEFLStatus {
+ /// Usage error.
+ ///
+ /// This indicates that either the [`CompressorOxide`] experienced a previous error, or the
+ /// stream has already been [`TDEFLFlush::Finish`]'d.
+ BadParam = -2,
+
+ /// Error putting data into output buffer.
+ ///
+ /// This usually indicates a too-small buffer.
+ PutBufFailed = -1,
+
+ /// Compression succeeded normally.
+ Okay = 0,
+
+ /// Compression succeeded and the deflate stream was ended.
+ ///
+ /// This is the result of calling compression with [`TDEFLFlush::Finish`].
+ Done = 1,
+}
+
+const MAX_HUFF_SYMBOLS: usize = 288;
+/// Size of hash chain for fast compression mode.
+const LEVEL1_HASH_SIZE_MASK: u32 = 4095;
+/// The number of huffman tables used by the compressor.
+/// Literal/length, Distances and Length of the huffman codes for the other two tables.
+const MAX_HUFF_TABLES: usize = 3;
+/// Literal/length codes
+const MAX_HUFF_SYMBOLS_0: usize = 288;
+/// Distance codes.
+const MAX_HUFF_SYMBOLS_1: usize = 32;
+/// Huffman length values.
+const MAX_HUFF_SYMBOLS_2: usize = 19;
+/// Size of the chained hash table.
+pub(crate) const LZ_DICT_SIZE: usize = 32_768;
+/// Mask used when stepping through the hash chains.
+const LZ_DICT_SIZE_MASK: usize = (LZ_DICT_SIZE as u32 - 1) as usize;
+/// The minimum length of a match.
+const MIN_MATCH_LEN: u8 = 3;
+/// The maximum length of a match.
+pub(crate) const MAX_MATCH_LEN: usize = 258;
+
+const DEFAULT_FLAGS: u32 = NUM_PROBES[4] | TDEFL_WRITE_ZLIB_HEADER;
+
+mod zlib {
+ const DEFAULT_CM: u8 = 8;
+ const DEFAULT_CINFO: u8 = 7 << 4;
+ const _DEFAULT_FDICT: u8 = 0;
+ const DEFAULT_CMF: u8 = DEFAULT_CM | DEFAULT_CINFO;
+ /// The 16-bit value consisting of CMF and FLG must be divisible by this to be valid.
+ const FCHECK_DIVISOR: u8 = 31;
+
+ /// Generate FCHECK from CMF and FLG (without FCKECH )so that they are correct according to the
+ /// specification, i.e (CMF*256 + FCHK) % 31 = 0.
+ /// Returns flg with the FCHKECK bits added (any existing FCHECK bits are ignored).
+ fn add_fcheck(cmf: u8, flg: u8) -> u8 {
+ let rem = ((usize::from(cmf) * 256) + usize::from(flg)) % usize::from(FCHECK_DIVISOR);
+
+ // Clear existing FCHECK if any
+ let flg = flg & 0b11100000;
+
+ // Casting is safe as rem can't overflow since it is a value mod 31
+ // We can simply add the value to flg as (31 - rem) will never be above 2^5
+ flg + (FCHECK_DIVISOR - rem as u8)
+ }
+
+ fn zlib_level_from_flags(flags: u32) -> u8 {
+ use super::NUM_PROBES;
+
+ let num_probes = flags & (super::MAX_PROBES_MASK as u32);
+ if flags & super::TDEFL_GREEDY_PARSING_FLAG != 0 {
+ if num_probes <= 1 {
+ 0
+ } else {
+ 1
+ }
+ } else if num_probes >= NUM_PROBES[9] {
+ 3
+ } else {
+ 2
+ }
+ }
+
+ /// Get the zlib header for the level using the default window size and no
+ /// dictionary.
+ fn header_from_level(level: u8) -> [u8; 2] {
+ let cmf = DEFAULT_CMF;
+ [cmf, add_fcheck(cmf, (level as u8) << 6)]
+ }
+
+ /// Create a zlib header from the given compression flags.
+ /// Only level is considered.
+ pub fn header_from_flags(flags: u32) -> [u8; 2] {
+ let level = zlib_level_from_flags(flags);
+ header_from_level(level)
+ }
+
+ #[cfg(test)]
+ mod test {
+ #[test]
+ fn zlib() {
+ use super::super::*;
+ use super::*;
+
+ let test_level = |level, expected| {
+ let flags = create_comp_flags_from_zip_params(
+ level,
+ MZ_DEFAULT_WINDOW_BITS,
+ CompressionStrategy::Default as i32,
+ );
+ assert_eq!(zlib_level_from_flags(flags), expected);
+ };
+
+ assert_eq!(zlib_level_from_flags(DEFAULT_FLAGS), 2);
+ test_level(0, 0);
+ test_level(1, 0);
+ test_level(2, 1);
+ test_level(3, 1);
+ for i in 4..=8 {
+ test_level(i, 2)
+ }
+ test_level(9, 3);
+ test_level(10, 3);
+ }
+
+ #[test]
+ fn test_header() {
+ let header = super::header_from_level(3);
+ assert_eq!(
+ ((usize::from(header[0]) * 256) + usize::from(header[1])) % 31,
+ 0
+ );
+ }
+ }
+}
+
+fn memset<T: Copy>(slice: &mut [T], val: T) {
+ for x in slice {
+ *x = val
+ }
+}
+
+#[cfg(test)]
+#[inline]
+fn write_u16_le(val: u16, slice: &mut [u8], pos: usize) {
+ slice[pos] = val as u8;
+ slice[pos + 1] = (val >> 8) as u8;
+}
+
+// Read the two bytes starting at pos and interpret them as an u16.
+#[inline]
+const fn read_u16_le(slice: &[u8], pos: usize) -> u16 {
+ // The compiler is smart enough to optimize this into an unaligned load.
+ slice[pos] as u16 | ((slice[pos + 1] as u16) << 8)
+}
+
+/// Main compression struct.
+pub struct CompressorOxide {
+ lz: LZOxide,
+ params: ParamsOxide,
+ huff: Box<HuffmanOxide>,
+ dict: DictOxide,
+}
+
+impl CompressorOxide {
+ /// Create a new `CompressorOxide` with the given flags.
+ ///
+ /// # Notes
+ /// This function may be changed to take different parameters in the future.
+ pub fn new(flags: u32) -> Self {
+ CompressorOxide {
+ lz: LZOxide::new(),
+ params: ParamsOxide::new(flags),
+ /// Put HuffmanOxide on the heap with default trick to avoid
+ /// excessive stack copies.
+ huff: Box::default(),
+ dict: DictOxide::new(flags),
+ }
+ }
+
+ /// Get the adler32 checksum of the currently encoded data.
+ pub const fn adler32(&self) -> u32 {
+ self.params.adler32
+ }
+
+ /// Get the return status of the previous [`compress`](fn.compress.html)
+ /// call with this compressor.
+ pub const fn prev_return_status(&self) -> TDEFLStatus {
+ self.params.prev_return_status
+ }
+
+ /// Get the raw compressor flags.
+ ///
+ /// # Notes
+ /// This function may be deprecated or changed in the future to use more rust-style flags.
+ pub const fn flags(&self) -> i32 {
+ self.params.flags as i32
+ }
+
+ /// Returns whether the compressor is wrapping the data in a zlib format or not.
+ pub fn data_format(&self) -> DataFormat {
+ if (self.params.flags & TDEFL_WRITE_ZLIB_HEADER) != 0 {
+ DataFormat::Zlib
+ } else {
+ DataFormat::Raw
+ }
+ }
+
+ /// Reset the state of the compressor, keeping the same parameters.
+ ///
+ /// This avoids re-allocating data.
+ pub fn reset(&mut self) {
+ // LZ buf and huffman has no settings or dynamic memory
+ // that needs to be saved, so we simply replace them.
+ self.lz = LZOxide::new();
+ self.params.reset();
+ *self.huff = HuffmanOxide::default();
+ self.dict.reset();
+ }
+
+ /// Set the compression level of the compressor.
+ ///
+ /// Using this to change level after compression has started is supported.
+ /// # Notes
+ /// The compression strategy will be reset to the default one when this is called.
+ pub fn set_compression_level(&mut self, level: CompressionLevel) {
+ let format = self.data_format();
+ self.set_format_and_level(format, level as u8);
+ }
+
+ /// Set the compression level of the compressor using an integer value.
+ ///
+ /// Using this to change level after compression has started is supported.
+ /// # Notes
+ /// The compression strategy will be reset to the default one when this is called.
+ pub fn set_compression_level_raw(&mut self, level: u8) {
+ let format = self.data_format();
+ self.set_format_and_level(format, level);
+ }
+
+ /// Update the compression settings of the compressor.
+ ///
+ /// Changing the `DataFormat` after compression has started will result in
+ /// a corrupted stream.
+ ///
+ /// # Notes
+ /// This function mainly intended for setting the initial settings after e.g creating with
+ /// `default` or after calling `CompressorOxide::reset()`, and behaviour may be changed
+ /// to disallow calling it after starting compression in the future.
+ pub fn set_format_and_level(&mut self, data_format: DataFormat, level: u8) {
+ let flags = create_comp_flags_from_zip_params(
+ level.into(),
+ data_format.to_window_bits(),
+ CompressionStrategy::Default as i32,
+ );
+ self.params.update_flags(flags);
+ self.dict.update_flags(flags);
+ }
+}
+
+impl Default for CompressorOxide {
+ /// Initialize the compressor with a level of 4, zlib wrapper and
+ /// the default strategy.
+ #[inline(always)]
+ fn default() -> Self {
+ CompressorOxide {
+ lz: LZOxide::new(),
+ params: ParamsOxide::new(DEFAULT_FLAGS),
+ /// Put HuffmanOxide on the heap with default trick to avoid
+ /// excessive stack copies.
+ huff: Box::default(),
+ dict: DictOxide::new(DEFAULT_FLAGS),
+ }
+ }
+}
+
+/// Callback function and user used in `compress_to_output`.
+pub struct CallbackFunc<'a> {
+ pub put_buf_func: &'a mut dyn FnMut(&[u8]) -> bool,
+}
+
+impl<'a> CallbackFunc<'a> {
+ fn flush_output(
+ &mut self,
+ saved_output: SavedOutputBufferOxide,
+ params: &mut ParamsOxide,
+ ) -> i32 {
+ // TODO: As this could be unsafe since
+ // we can't verify the function pointer
+ // this whole function should maybe be unsafe as well.
+ let call_success = (self.put_buf_func)(&params.local_buf.b[0..saved_output.pos as usize]);
+
+ if !call_success {
+ params.prev_return_status = TDEFLStatus::PutBufFailed;
+ return params.prev_return_status as i32;
+ }
+
+ params.flush_remaining as i32
+ }
+}
+
+struct CallbackBuf<'a> {
+ pub out_buf: &'a mut [u8],
+}
+
+impl<'a> CallbackBuf<'a> {
+ fn flush_output(
+ &mut self,
+ saved_output: SavedOutputBufferOxide,
+ params: &mut ParamsOxide,
+ ) -> i32 {
+ if saved_output.local {
+ let n = cmp::min(
+ saved_output.pos as usize,
+ self.out_buf.len() - params.out_buf_ofs,
+ );
+ (&mut self.out_buf[params.out_buf_ofs..params.out_buf_ofs + n])
+ .copy_from_slice(&params.local_buf.b[..n]);
+
+ params.out_buf_ofs += n;
+ if saved_output.pos != n {
+ params.flush_ofs = n as u32;
+ params.flush_remaining = (saved_output.pos - n) as u32;
+ }
+ } else {
+ params.out_buf_ofs += saved_output.pos;
+ }
+
+ params.flush_remaining as i32
+ }
+}
+
+enum CallbackOut<'a> {
+ Func(CallbackFunc<'a>),
+ Buf(CallbackBuf<'a>),
+}
+
+impl<'a> CallbackOut<'a> {
+ fn new_output_buffer<'b>(
+ &'b mut self,
+ local_buf: &'b mut [u8],
+ out_buf_ofs: usize,
+ ) -> OutputBufferOxide<'b> {
+ let is_local;
+ let buf_len = OUT_BUF_SIZE - 16;
+ let chosen_buffer = match *self {
+ CallbackOut::Buf(ref mut cb) if cb.out_buf.len() - out_buf_ofs >= OUT_BUF_SIZE => {
+ is_local = false;
+ &mut cb.out_buf[out_buf_ofs..out_buf_ofs + buf_len]
+ }
+ _ => {
+ is_local = true;
+ &mut local_buf[..buf_len]
+ }
+ };
+
+ OutputBufferOxide {
+ inner: chosen_buffer,
+ inner_pos: 0,
+ local: is_local,
+ bit_buffer: 0,
+ bits_in: 0,
+ }
+ }
+}
+
+struct CallbackOxide<'a> {
+ in_buf: Option<&'a [u8]>,
+ in_buf_size: Option<&'a mut usize>,
+ out_buf_size: Option<&'a mut usize>,
+ out: CallbackOut<'a>,
+}
+
+impl<'a> CallbackOxide<'a> {
+ fn new_callback_buf(in_buf: &'a [u8], out_buf: &'a mut [u8]) -> Self {
+ CallbackOxide {
+ in_buf: Some(in_buf),
+ in_buf_size: None,
+ out_buf_size: None,
+ out: CallbackOut::Buf(CallbackBuf { out_buf }),
+ }
+ }
+
+ fn new_callback_func(in_buf: &'a [u8], callback_func: CallbackFunc<'a>) -> Self {
+ CallbackOxide {
+ in_buf: Some(in_buf),
+ in_buf_size: None,
+ out_buf_size: None,
+ out: CallbackOut::Func(callback_func),
+ }
+ }
+
+ fn update_size(&mut self, in_size: Option<usize>, out_size: Option<usize>) {
+ if let (Some(in_size), Some(size)) = (in_size, self.in_buf_size.as_mut()) {
+ **size = in_size;
+ }
+
+ if let (Some(out_size), Some(size)) = (out_size, self.out_buf_size.as_mut()) {
+ **size = out_size
+ }
+ }
+
+ fn flush_output(
+ &mut self,
+ saved_output: SavedOutputBufferOxide,
+ params: &mut ParamsOxide,
+ ) -> i32 {
+ if saved_output.pos == 0 {
+ return params.flush_remaining as i32;
+ }
+
+ self.update_size(Some(params.src_pos), None);
+ match self.out {
+ CallbackOut::Func(ref mut cf) => cf.flush_output(saved_output, params),
+ CallbackOut::Buf(ref mut cb) => cb.flush_output(saved_output, params),
+ }
+ }
+}
+
+struct OutputBufferOxide<'a> {
+ pub inner: &'a mut [u8],
+ pub inner_pos: usize,
+ pub local: bool,
+
+ pub bit_buffer: u32,
+ pub bits_in: u32,
+}
+
+impl<'a> OutputBufferOxide<'a> {
+ fn put_bits(&mut self, bits: u32, len: u32) {
+ assert!(bits <= ((1u32 << len) - 1u32));
+ self.bit_buffer |= bits << self.bits_in;
+ self.bits_in += len;
+ while self.bits_in >= 8 {
+ self.inner[self.inner_pos] = self.bit_buffer as u8;
+ self.inner_pos += 1;
+ self.bit_buffer >>= 8;
+ self.bits_in -= 8;
+ }
+ }
+
+ const fn save(&self) -> SavedOutputBufferOxide {
+ SavedOutputBufferOxide {
+ pos: self.inner_pos,
+ bit_buffer: self.bit_buffer,
+ bits_in: self.bits_in,
+ local: self.local,
+ }
+ }
+
+ fn load(&mut self, saved: SavedOutputBufferOxide) {
+ self.inner_pos = saved.pos;
+ self.bit_buffer = saved.bit_buffer;
+ self.bits_in = saved.bits_in;
+ self.local = saved.local;
+ }
+
+ fn pad_to_bytes(&mut self) {
+ if self.bits_in != 0 {
+ let len = 8 - self.bits_in;
+ self.put_bits(0, len);
+ }
+ }
+}
+
+struct SavedOutputBufferOxide {
+ pub pos: usize,
+ pub bit_buffer: u32,
+ pub bits_in: u32,
+ pub local: bool,
+}
+
+struct BitBuffer {
+ pub bit_buffer: u64,
+ pub bits_in: u32,
+}
+
+impl BitBuffer {
+ fn put_fast(&mut self, bits: u64, len: u32) {
+ self.bit_buffer |= bits << self.bits_in;
+ self.bits_in += len;
+ }
+
+ fn flush(&mut self, output: &mut OutputBufferOxide) -> Result<()> {
+ let pos = output.inner_pos;
+ {
+ // isolation to please borrow checker
+ let inner = &mut output.inner[pos..pos + 8];
+ let bytes = u64::to_le_bytes(self.bit_buffer);
+ inner.copy_from_slice(&bytes);
+ }
+ match output.inner_pos.checked_add((self.bits_in >> 3) as usize) {
+ Some(n) if n <= output.inner.len() => output.inner_pos = n,
+ _ => return Err(Error {}),
+ }
+ self.bit_buffer >>= self.bits_in & !7;
+ self.bits_in &= 7;
+ Ok(())
+ }
+}
+
+/// A struct containing data about huffman codes and symbol frequencies.
+///
+/// NOTE: Only the literal/lengths have enough symbols to actually use
+/// the full array. It's unclear why it's defined like this in miniz,
+/// it could be for cache/alignment reasons.
+struct HuffmanOxide {
+ /// Number of occurrences of each symbol.
+ pub count: [[u16; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+ /// The bits of the huffman code assigned to the symbol
+ pub codes: [[u16; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+ /// The length of the huffman code assigned to the symbol.
+ pub code_sizes: [[u8; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+}
+
+/// Tables used for literal/lengths in `HuffmanOxide`.
+const LITLEN_TABLE: usize = 0;
+/// Tables for distances.
+const DIST_TABLE: usize = 1;
+/// Tables for the run-length encoded huffman lengths for literals/lengths/distances.
+const HUFF_CODES_TABLE: usize = 2;
+
+/// Status of RLE encoding of huffman code lengths.
+struct Rle {
+ pub z_count: u32,
+ pub repeat_count: u32,
+ pub prev_code_size: u8,
+}
+
+impl Rle {
+ fn prev_code_size(
+ &mut self,
+ packed_code_sizes: &mut [u8],
+ packed_pos: &mut usize,
+ h: &mut HuffmanOxide,
+ ) -> Result<()> {
+ let mut write = |buf| write(buf, packed_code_sizes, packed_pos);
+ let counts = &mut h.count[HUFF_CODES_TABLE];
+ if self.repeat_count != 0 {
+ if self.repeat_count < 3 {
+ counts[self.prev_code_size as usize] =
+ counts[self.prev_code_size as usize].wrapping_add(self.repeat_count as u16);
+ let code = self.prev_code_size;
+ write(&[code, code, code][..self.repeat_count as usize])?;
+ } else {
+ counts[16] = counts[16].wrapping_add(1);
+ write(&[16, (self.repeat_count - 3) as u8][..])?;
+ }
+ self.repeat_count = 0;
+ }
+
+ Ok(())
+ }
+
+ fn zero_code_size(
+ &mut self,
+ packed_code_sizes: &mut [u8],
+ packed_pos: &mut usize,
+ h: &mut HuffmanOxide,
+ ) -> Result<()> {
+ let mut write = |buf| write(buf, packed_code_sizes, packed_pos);
+ let counts = &mut h.count[HUFF_CODES_TABLE];
+ if self.z_count != 0 {
+ if self.z_count < 3 {
+ counts[0] = counts[0].wrapping_add(self.z_count as u16);
+ write(&[0, 0, 0][..self.z_count as usize])?;
+ } else if self.z_count <= 10 {
+ counts[17] = counts[17].wrapping_add(1);
+ write(&[17, (self.z_count - 3) as u8][..])?;
+ } else {
+ counts[18] = counts[18].wrapping_add(1);
+ write(&[18, (self.z_count - 11) as u8][..])?;
+ }
+ self.z_count = 0;
+ }
+
+ Ok(())
+ }
+}
+
+fn write(src: &[u8], dst: &mut [u8], dst_pos: &mut usize) -> Result<()> {
+ match dst.get_mut(*dst_pos..*dst_pos + src.len()) {
+ Some(s) => s.copy_from_slice(src),
+ None => return Err(Error {}),
+ }
+ *dst_pos += src.len();
+ Ok(())
+}
+
+impl Default for HuffmanOxide {
+ fn default() -> Self {
+ HuffmanOxide {
+ count: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+ codes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+ code_sizes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],
+ }
+ }
+}
+
+impl HuffmanOxide {
+ fn radix_sort_symbols<'a>(
+ symbols0: &'a mut [SymFreq],
+ symbols1: &'a mut [SymFreq],
+ ) -> &'a mut [SymFreq] {
+ let mut hist = [[0; 256]; 2];
+
+ for freq in symbols0.iter() {
+ hist[0][(freq.key & 0xFF) as usize] += 1;
+ hist[1][((freq.key >> 8) & 0xFF) as usize] += 1;
+ }
+
+ let mut n_passes = 2;
+ if symbols0.len() == hist[1][0] {
+ n_passes -= 1;
+ }
+
+ let mut current_symbols = symbols0;
+ let mut new_symbols = symbols1;
+
+ for (pass, hist_item) in hist.iter().enumerate().take(n_passes) {
+ let mut offsets = [0; 256];
+ let mut offset = 0;
+ for i in 0..256 {
+ offsets[i] = offset;
+ offset += hist_item[i];
+ }
+
+ for sym in current_symbols.iter() {
+ let j = ((sym.key >> (pass * 8)) & 0xFF) as usize;
+ new_symbols[offsets[j]] = *sym;
+ offsets[j] += 1;
+ }
+
+ mem::swap(&mut current_symbols, &mut new_symbols);
+ }
+
+ current_symbols
+ }
+
+ fn calculate_minimum_redundancy(symbols: &mut [SymFreq]) {
+ match symbols.len() {
+ 0 => (),
+ 1 => symbols[0].key = 1,
+ n => {
+ symbols[0].key += symbols[1].key;
+ let mut root = 0;
+ let mut leaf = 2;
+ for next in 1..n - 1 {
+ if (leaf >= n) || (symbols[root].key < symbols[leaf].key) {
+ symbols[next].key = symbols[root].key;
+ symbols[root].key = next as u16;
+ root += 1;
+ } else {
+ symbols[next].key = symbols[leaf].key;
+ leaf += 1;
+ }
+
+ if (leaf >= n) || (root < next && symbols[root].key < symbols[leaf].key) {
+ symbols[next].key = symbols[next].key.wrapping_add(symbols[root].key);
+ symbols[root].key = next as u16;
+ root += 1;
+ } else {
+ symbols[next].key = symbols[next].key.wrapping_add(symbols[leaf].key);
+ leaf += 1;
+ }
+ }
+
+ symbols[n - 2].key = 0;
+ for next in (0..n - 2).rev() {
+ symbols[next].key = symbols[symbols[next].key as usize].key + 1;
+ }
+
+ let mut avbl = 1;
+ let mut used = 0;
+ let mut dpth = 0;
+ let mut root = (n - 2) as i32;
+ let mut next = (n - 1) as i32;
+ while avbl > 0 {
+ while (root >= 0) && (symbols[root as usize].key == dpth) {
+ used += 1;
+ root -= 1;
+ }
+ while avbl > used {
+ symbols[next as usize].key = dpth;
+ next -= 1;
+ avbl -= 1;
+ }
+ avbl = 2 * used;
+ dpth += 1;
+ used = 0;
+ }
+ }
+ }
+ }
+
+ fn enforce_max_code_size(num_codes: &mut [i32], code_list_len: usize, max_code_size: usize) {
+ if code_list_len <= 1 {
+ return;
+ }
+
+ num_codes[max_code_size] += num_codes[max_code_size + 1..].iter().sum::<i32>();
+ let total = num_codes[1..=max_code_size]
+ .iter()
+ .rev()
+ .enumerate()
+ .fold(0u32, |total, (i, &x)| total + ((x as u32) << i));
+
+ for _ in (1 << max_code_size)..total {
+ num_codes[max_code_size] -= 1;
+ for i in (1..max_code_size).rev() {
+ if num_codes[i] != 0 {
+ num_codes[i] -= 1;
+ num_codes[i + 1] += 2;
+ break;
+ }
+ }
+ }
+ }
+
+ fn optimize_table(
+ &mut self,
+ table_num: usize,
+ table_len: usize,
+ code_size_limit: usize,
+ static_table: bool,
+ ) {
+ let mut num_codes = [0i32; MAX_SUPPORTED_HUFF_CODESIZE + 1];
+ let mut next_code = [0u32; MAX_SUPPORTED_HUFF_CODESIZE + 1];
+
+ if static_table {
+ for &code_size in &self.code_sizes[table_num][..table_len] {
+ num_codes[code_size as usize] += 1;
+ }
+ } else {
+ let mut symbols0 = [SymFreq {
+ key: 0,
+ sym_index: 0,
+ }; MAX_HUFF_SYMBOLS];
+ let mut symbols1 = [SymFreq {
+ key: 0,
+ sym_index: 0,
+ }; MAX_HUFF_SYMBOLS];
+
+ let mut num_used_symbols = 0;
+ for i in 0..table_len {
+ if self.count[table_num][i] != 0 {
+ symbols0[num_used_symbols] = SymFreq {
+ key: self.count[table_num][i],
+ sym_index: i as u16,
+ };
+ num_used_symbols += 1;
+ }
+ }
+
+ let symbols = Self::radix_sort_symbols(
+ &mut symbols0[..num_used_symbols],
+ &mut symbols1[..num_used_symbols],
+ );
+ Self::calculate_minimum_redundancy(symbols);
+
+ for symbol in symbols.iter() {
+ num_codes[symbol.key as usize] += 1;
+ }
+
+ Self::enforce_max_code_size(&mut num_codes, num_used_symbols, code_size_limit);
+
+ memset(&mut self.code_sizes[table_num][..], 0);
+ memset(&mut self.codes[table_num][..], 0);
+
+ let mut last = num_used_symbols;
+ for (i, &num_item) in num_codes
+ .iter()
+ .enumerate()
+ .take(code_size_limit + 1)
+ .skip(1)
+ {
+ let first = last - num_item as usize;
+ for symbol in &symbols[first..last] {
+ self.code_sizes[table_num][symbol.sym_index as usize] = i as u8;
+ }
+ last = first;
+ }
+ }
+
+ let mut j = 0;
+ next_code[1] = 0;
+ for i in 2..=code_size_limit {
+ j = (j + num_codes[i - 1]) << 1;
+ next_code[i] = j as u32;
+ }
+
+ for (&code_size, huff_code) in self.code_sizes[table_num]
+ .iter()
+ .take(table_len)
+ .zip(self.codes[table_num].iter_mut().take(table_len))
+ {
+ if code_size == 0 {
+ continue;
+ }
+
+ let mut code = next_code[code_size as usize];
+ next_code[code_size as usize] += 1;
+
+ let mut rev_code = 0;
+ for _ in 0..code_size {
+ rev_code = (rev_code << 1) | (code & 1);
+ code >>= 1;
+ }
+ *huff_code = rev_code as u16;
+ }
+ }
+
+ fn start_static_block(&mut self, output: &mut OutputBufferOxide) {
+ memset(&mut self.code_sizes[LITLEN_TABLE][0..144], 8);
+ memset(&mut self.code_sizes[LITLEN_TABLE][144..256], 9);
+ memset(&mut self.code_sizes[LITLEN_TABLE][256..280], 7);
+ memset(&mut self.code_sizes[LITLEN_TABLE][280..288], 8);
+
+ memset(&mut self.code_sizes[DIST_TABLE][..32], 5);
+
+ self.optimize_table(LITLEN_TABLE, 288, 15, true);
+ self.optimize_table(DIST_TABLE, 32, 15, true);
+
+ output.put_bits(0b01, 2)
+ }
+
+ fn start_dynamic_block(&mut self, output: &mut OutputBufferOxide) -> Result<()> {
+ // There will always be one, and only one end of block code.
+ self.count[0][256] = 1;
+
+ self.optimize_table(0, MAX_HUFF_SYMBOLS_0, 15, false);
+ self.optimize_table(1, MAX_HUFF_SYMBOLS_1, 15, false);
+
+ let num_lit_codes = 286
+ - &self.code_sizes[0][257..286]
+ .iter()
+ .rev()
+ .take_while(|&x| *x == 0)
+ .count();
+
+ let num_dist_codes = 30
+ - &self.code_sizes[1][1..30]
+ .iter()
+ .rev()
+ .take_while(|&x| *x == 0)
+ .count();
+
+ let mut code_sizes_to_pack = [0u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1];
+ let mut packed_code_sizes = [0u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1];
+
+ let total_code_sizes_to_pack = num_lit_codes + num_dist_codes;
+
+ code_sizes_to_pack[..num_lit_codes].copy_from_slice(&self.code_sizes[0][..num_lit_codes]);
+
+ code_sizes_to_pack[num_lit_codes..total_code_sizes_to_pack]
+ .copy_from_slice(&self.code_sizes[1][..num_dist_codes]);
+
+ let mut rle = Rle {
+ z_count: 0,
+ repeat_count: 0,
+ prev_code_size: 0xFF,
+ };
+
+ memset(&mut self.count[HUFF_CODES_TABLE][..MAX_HUFF_SYMBOLS_2], 0);
+
+ let mut packed_pos = 0;
+ for &code_size in &code_sizes_to_pack[..total_code_sizes_to_pack] {
+ if code_size == 0 {
+ rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ rle.z_count += 1;
+ if rle.z_count == 138 {
+ rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ }
+ } else {
+ rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ if code_size != rle.prev_code_size {
+ rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ self.count[HUFF_CODES_TABLE][code_size as usize] =
+ self.count[HUFF_CODES_TABLE][code_size as usize].wrapping_add(1);
+ write(&[code_size], &mut packed_code_sizes, &mut packed_pos)?;
+ } else {
+ rle.repeat_count += 1;
+ if rle.repeat_count == 6 {
+ rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ }
+ }
+ }
+ rle.prev_code_size = code_size;
+ }
+
+ if rle.repeat_count != 0 {
+ rle.prev_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ } else {
+ rle.zero_code_size(&mut packed_code_sizes, &mut packed_pos, self)?;
+ }
+
+ self.optimize_table(2, MAX_HUFF_SYMBOLS_2, 7, false);
+
+ output.put_bits(2, 2);
+
+ output.put_bits((num_lit_codes - 257) as u32, 5);
+ output.put_bits((num_dist_codes - 1) as u32, 5);
+
+ let mut num_bit_lengths = 18
+ - HUFFMAN_LENGTH_ORDER
+ .iter()
+ .rev()
+ .take_while(|&swizzle| self.code_sizes[HUFF_CODES_TABLE][*swizzle as usize] == 0)
+ .count();
+
+ num_bit_lengths = cmp::max(4, num_bit_lengths + 1);
+ output.put_bits(num_bit_lengths as u32 - 4, 4);
+ for &swizzle in &HUFFMAN_LENGTH_ORDER[..num_bit_lengths] {
+ output.put_bits(
+ u32::from(self.code_sizes[HUFF_CODES_TABLE][swizzle as usize]),
+ 3,
+ );
+ }
+
+ let mut packed_code_size_index = 0;
+ while packed_code_size_index < packed_pos {
+ let code = packed_code_sizes[packed_code_size_index] as usize;
+ packed_code_size_index += 1;
+ assert!(code < MAX_HUFF_SYMBOLS_2);
+ output.put_bits(
+ u32::from(self.codes[HUFF_CODES_TABLE][code]),
+ u32::from(self.code_sizes[HUFF_CODES_TABLE][code]),
+ );
+ if code >= 16 {
+ output.put_bits(
+ u32::from(packed_code_sizes[packed_code_size_index]),
+ [2, 3, 7][code - 16],
+ );
+ packed_code_size_index += 1;
+ }
+ }
+
+ Ok(())
+ }
+}
+
+struct DictOxide {
+ /// The maximum number of checks in the hash chain, for the initial,
+ /// and the lazy match respectively.
+ pub max_probes: [u32; 2],
+ /// Buffer of input data.
+ /// Padded with 1 byte to simplify matching code in `compress_fast`.
+ pub b: Box<HashBuffers>,
+
+ pub code_buf_dict_pos: usize,
+ pub lookahead_size: usize,
+ pub lookahead_pos: usize,
+ pub size: usize,
+}
+
+const fn probes_from_flags(flags: u32) -> [u32; 2] {
+ [
+ 1 + ((flags & 0xFFF) + 2) / 3,
+ 1 + (((flags & 0xFFF) >> 2) + 2) / 3,
+ ]
+}
+
+impl DictOxide {
+ fn new(flags: u32) -> Self {
+ DictOxide {
+ max_probes: probes_from_flags(flags),
+ b: Box::default(),
+ code_buf_dict_pos: 0,
+ lookahead_size: 0,
+ lookahead_pos: 0,
+ size: 0,
+ }
+ }
+
+ fn update_flags(&mut self, flags: u32) {
+ self.max_probes = probes_from_flags(flags);
+ }
+
+ fn reset(&mut self) {
+ self.b.reset();
+ self.code_buf_dict_pos = 0;
+ self.lookahead_size = 0;
+ self.lookahead_pos = 0;
+ self.size = 0;
+ }
+
+ /// Do an unaligned read of the data at `pos` in the dictionary and treat it as if it was of
+ /// type T.
+ #[inline]
+ fn read_unaligned_u32(&self, pos: usize) -> u32 {
+ // Masking the value here helps avoid bounds checks.
+ let pos = (pos & LZ_DICT_SIZE_MASK) as usize;
+ let end = pos + 4;
+ // Somehow this assertion makes things faster.
+ assert!(end < LZ_DICT_FULL_SIZE);
+
+ let bytes: [u8; 4] = self.b.dict[pos..end].try_into().unwrap();
+ u32::from_le_bytes(bytes)
+ }
+
+ /// Do an unaligned read of the data at `pos` in the dictionary and treat it as if it was of
+ /// type T.
+ #[inline]
+ fn read_unaligned_u64(&self, pos: usize) -> u64 {
+ let pos = pos as usize;
+ let bytes: [u8; 8] = self.b.dict[pos..pos + 8].try_into().unwrap();
+ u64::from_le_bytes(bytes)
+ }
+
+ /// Do an unaligned read of the data at `pos` in the dictionary and treat it as if it was of
+ /// type T.
+ #[inline]
+ fn read_as_u16(&self, pos: usize) -> u16 {
+ read_u16_le(&self.b.dict[..], pos)
+ }
+
+ /// Try to find a match for the data at lookahead_pos in the dictionary that is
+ /// longer than `match_len`.
+ /// Returns a tuple containing (match_distance, match_length). Will be equal to the input
+ /// values if no better matches were found.
+ fn find_match(
+ &self,
+ lookahead_pos: usize,
+ max_dist: usize,
+ max_match_len: u32,
+ mut match_dist: u32,
+ mut match_len: u32,
+ ) -> (u32, u32) {
+ // Clamp the match len and max_match_len to be valid. (It should be when this is called, but
+ // do it for now just in case for safety reasons.)
+ // This should normally end up as at worst conditional moves,
+ // so it shouldn't slow us down much.
+ // TODO: Statically verify these so we don't need to do this.
+ let max_match_len = cmp::min(MAX_MATCH_LEN as u32, max_match_len);
+ match_len = cmp::max(match_len, 1);
+
+ let pos = lookahead_pos as usize & LZ_DICT_SIZE_MASK;
+ let mut probe_pos = pos;
+ // Number of probes into the hash chains.
+ let mut num_probes_left = self.max_probes[(match_len >= 32) as usize];
+
+ // If we already have a match of the full length don't bother searching for another one.
+ if max_match_len <= match_len {
+ return (match_dist, match_len);
+ }
+
+ // Read the last byte of the current match, and the next one, used to compare matches.
+ let mut c01: u16 = self.read_as_u16(pos as usize + match_len as usize - 1);
+ // Read the two bytes at the end position of the current match.
+ let s01: u16 = self.read_as_u16(pos as usize);
+
+ 'outer: loop {
+ let mut dist;
+ 'found: loop {
+ num_probes_left -= 1;
+ if num_probes_left == 0 {
+ // We have done as many probes in the hash chain as the current compression
+ // settings allow, so return the best match we found, if any.
+ return (match_dist, match_len);
+ }
+
+ for _ in 0..3 {
+ let next_probe_pos = self.b.next[probe_pos as usize] as usize;
+
+ dist = (lookahead_pos - next_probe_pos) & 0xFFFF;
+ if next_probe_pos == 0 || dist > max_dist {
+ // We reached the end of the hash chain, or the next value is further away
+ // than the maximum allowed distance, so return the best match we found, if
+ // any.
+ return (match_dist, match_len);
+ }
+
+ // Mask the position value to get the position in the hash chain of the next
+ // position to match against.
+ probe_pos = next_probe_pos & LZ_DICT_SIZE_MASK;
+
+ if self.read_as_u16((probe_pos + match_len as usize - 1) as usize) == c01 {
+ break 'found;
+ }
+ }
+ }
+
+ if dist == 0 {
+ // We've looked through the whole match range, so return the best match we
+ // found.
+ return (match_dist, match_len);
+ }
+
+ // Check if the two first bytes match.
+ if self.read_as_u16(probe_pos as usize) != s01 {
+ continue;
+ }
+
+ let mut p = pos + 2;
+ let mut q = probe_pos + 2;
+ // The first two bytes matched, so check the full length of the match.
+ for _ in 0..32 {
+ let p_data: u64 = self.read_unaligned_u64(p);
+ let q_data: u64 = self.read_unaligned_u64(q);
+ // Compare of 8 bytes at a time by using unaligned loads of 64-bit integers.
+ let xor_data = p_data ^ q_data;
+ if xor_data == 0 {
+ p += 8;
+ q += 8;
+ } else {
+ // If not all of the last 8 bytes matched, check how may of them did.
+ let trailing = xor_data.trailing_zeros();
+
+ let probe_len = p - pos + (trailing as usize >> 3);
+ if probe_len > match_len as usize {
+ match_dist = dist as u32;
+ match_len = cmp::min(max_match_len, probe_len as u32);
+ if match_len == max_match_len {
+ // We found a match that had the maximum allowed length,
+ // so there is now point searching further.
+ return (match_dist, match_len);
+ }
+ // We found a better match, so save the last two bytes for further match
+ // comparisons.
+ c01 = self.read_as_u16(pos + match_len as usize - 1)
+ }
+ continue 'outer;
+ }
+ }
+
+ return (dist as u32, cmp::min(max_match_len, MAX_MATCH_LEN as u32));
+ }
+ }
+}
+
+struct ParamsOxide {
+ pub flags: u32,
+ pub greedy_parsing: bool,
+ pub block_index: u32,
+
+ pub saved_match_dist: u32,
+ pub saved_match_len: u32,
+ pub saved_lit: u8,
+
+ pub flush: TDEFLFlush,
+ pub flush_ofs: u32,
+ pub flush_remaining: u32,
+ pub finished: bool,
+
+ pub adler32: u32,
+
+ pub src_pos: usize,
+
+ pub out_buf_ofs: usize,
+ pub prev_return_status: TDEFLStatus,
+
+ pub saved_bit_buffer: u32,
+ pub saved_bits_in: u32,
+
+ pub local_buf: Box<LocalBuf>,
+}
+
+impl ParamsOxide {
+ fn new(flags: u32) -> Self {
+ ParamsOxide {
+ flags,
+ greedy_parsing: flags & TDEFL_GREEDY_PARSING_FLAG != 0,
+ block_index: 0,
+ saved_match_dist: 0,
+ saved_match_len: 0,
+ saved_lit: 0,
+ flush: TDEFLFlush::None,
+ flush_ofs: 0,
+ flush_remaining: 0,
+ finished: false,
+ adler32: MZ_ADLER32_INIT,
+ src_pos: 0,
+ out_buf_ofs: 0,
+ prev_return_status: TDEFLStatus::Okay,
+ saved_bit_buffer: 0,
+ saved_bits_in: 0,
+ local_buf: Box::default(),
+ }
+ }
+
+ fn update_flags(&mut self, flags: u32) {
+ self.flags = flags;
+ self.greedy_parsing = self.flags & TDEFL_GREEDY_PARSING_FLAG != 0;
+ }
+
+ /// Reset state, saving settings.
+ fn reset(&mut self) {
+ self.block_index = 0;
+ self.saved_match_len = 0;
+ self.saved_match_dist = 0;
+ self.saved_lit = 0;
+ self.flush = TDEFLFlush::None;
+ self.flush_ofs = 0;
+ self.flush_remaining = 0;
+ self.finished = false;
+ self.adler32 = MZ_ADLER32_INIT;
+ self.src_pos = 0;
+ self.out_buf_ofs = 0;
+ self.prev_return_status = TDEFLStatus::Okay;
+ self.saved_bit_buffer = 0;
+ self.saved_bits_in = 0;
+ self.local_buf.b = [0; OUT_BUF_SIZE];
+ }
+}
+
+struct LZOxide {
+ pub codes: [u8; LZ_CODE_BUF_SIZE],
+ pub code_position: usize,
+ pub flag_position: usize,
+
+ // The total number of bytes in the current block.
+ // (Could maybe use usize, but it's not possible to exceed a block size of )
+ pub total_bytes: u32,
+ pub num_flags_left: u32,
+}
+
+impl LZOxide {
+ const fn new() -> Self {
+ LZOxide {
+ codes: [0; LZ_CODE_BUF_SIZE],
+ code_position: 1,
+ flag_position: 0,
+ total_bytes: 0,
+ num_flags_left: 8,
+ }
+ }
+
+ fn write_code(&mut self, val: u8) {
+ self.codes[self.code_position] = val;
+ self.code_position += 1;
+ }
+
+ fn init_flag(&mut self) {
+ if self.num_flags_left == 8 {
+ *self.get_flag() = 0;
+ self.code_position -= 1;
+ } else {
+ *self.get_flag() >>= self.num_flags_left;
+ }
+ }
+
+ fn get_flag(&mut self) -> &mut u8 {
+ &mut self.codes[self.flag_position]
+ }
+
+ fn plant_flag(&mut self) {
+ self.flag_position = self.code_position;
+ self.code_position += 1;
+ }
+
+ fn consume_flag(&mut self) {
+ self.num_flags_left -= 1;
+ if self.num_flags_left == 0 {
+ self.num_flags_left = 8;
+ self.plant_flag();
+ }
+ }
+}
+
+fn compress_lz_codes(
+ huff: &HuffmanOxide,
+ output: &mut OutputBufferOxide,
+ lz_code_buf: &[u8],
+) -> Result<bool> {
+ let mut flags = 1;
+ let mut bb = BitBuffer {
+ bit_buffer: u64::from(output.bit_buffer),
+ bits_in: output.bits_in,
+ };
+
+ let mut i: usize = 0;
+ while i < lz_code_buf.len() {
+ if flags == 1 {
+ flags = u32::from(lz_code_buf[i]) | 0x100;
+ i += 1;
+ }
+
+ // The lz code was a length code
+ if flags & 1 == 1 {
+ flags >>= 1;
+
+ let sym;
+ let num_extra_bits;
+
+ let match_len = lz_code_buf[i] as usize;
+
+ let match_dist = read_u16_le(lz_code_buf, i + 1);
+
+ i += 3;
+
+ debug_assert!(huff.code_sizes[0][LEN_SYM[match_len] as usize] != 0);
+ bb.put_fast(
+ u64::from(huff.codes[0][LEN_SYM[match_len] as usize]),
+ u32::from(huff.code_sizes[0][LEN_SYM[match_len] as usize]),
+ );
+ bb.put_fast(
+ match_len as u64 & u64::from(BITMASKS[LEN_EXTRA[match_len] as usize]),
+ u32::from(LEN_EXTRA[match_len]),
+ );
+
+ if match_dist < 512 {
+ sym = SMALL_DIST_SYM[match_dist as usize] as usize;
+ num_extra_bits = SMALL_DIST_EXTRA[match_dist as usize] as usize;
+ } else {
+ sym = LARGE_DIST_SYM[(match_dist >> 8) as usize] as usize;
+ num_extra_bits = LARGE_DIST_EXTRA[(match_dist >> 8) as usize] as usize;
+ }
+
+ debug_assert!(huff.code_sizes[1][sym] != 0);
+ bb.put_fast(
+ u64::from(huff.codes[1][sym]),
+ u32::from(huff.code_sizes[1][sym]),
+ );
+ bb.put_fast(
+ u64::from(match_dist) & u64::from(BITMASKS[num_extra_bits as usize]),
+ num_extra_bits as u32,
+ );
+ } else {
+ // The lz code was a literal
+ for _ in 0..3 {
+ flags >>= 1;
+ let lit = lz_code_buf[i];
+ i += 1;
+
+ debug_assert!(huff.code_sizes[0][lit as usize] != 0);
+ bb.put_fast(
+ u64::from(huff.codes[0][lit as usize]),
+ u32::from(huff.code_sizes[0][lit as usize]),
+ );
+
+ if flags & 1 == 1 || i >= lz_code_buf.len() {
+ break;
+ }
+ }
+ }
+
+ bb.flush(output)?;
+ }
+
+ output.bits_in = 0;
+ output.bit_buffer = 0;
+ while bb.bits_in != 0 {
+ let n = cmp::min(bb.bits_in, 16);
+ output.put_bits(bb.bit_buffer as u32 & BITMASKS[n as usize], n);
+ bb.bit_buffer >>= n;
+ bb.bits_in -= n;
+ }
+
+ // Output the end of block symbol.
+ output.put_bits(
+ u32::from(huff.codes[0][256]),
+ u32::from(huff.code_sizes[0][256]),
+ );
+
+ Ok(true)
+}
+
+fn compress_block(
+ huff: &mut HuffmanOxide,
+ output: &mut OutputBufferOxide,
+ lz: &LZOxide,
+ static_block: bool,
+) -> Result<bool> {
+ if static_block {
+ huff.start_static_block(output);
+ } else {
+ huff.start_dynamic_block(output)?;
+ }
+
+ compress_lz_codes(huff, output, &lz.codes[..lz.code_position])
+}
+
+fn flush_block(
+ d: &mut CompressorOxide,
+ callback: &mut CallbackOxide,
+ flush: TDEFLFlush,
+) -> Result<i32> {
+ let mut saved_buffer;
+ {
+ let mut output = callback
+ .out
+ .new_output_buffer(&mut d.params.local_buf.b, d.params.out_buf_ofs);
+ output.bit_buffer = d.params.saved_bit_buffer;
+ output.bits_in = d.params.saved_bits_in;
+
+ let use_raw_block = (d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0)
+ && (d.dict.lookahead_pos - d.dict.code_buf_dict_pos) <= d.dict.size;
+
+ assert!(d.params.flush_remaining == 0);
+ d.params.flush_ofs = 0;
+ d.params.flush_remaining = 0;
+
+ d.lz.init_flag();
+
+ // If we are at the start of the stream, write the zlib header if requested.
+ if d.params.flags & TDEFL_WRITE_ZLIB_HEADER != 0 && d.params.block_index == 0 {
+ let header = zlib::header_from_flags(d.params.flags as u32);
+ output.put_bits(header[0].into(), 8);
+ output.put_bits(header[1].into(), 8);
+ }
+
+ // Output the block header.
+ output.put_bits((flush == TDEFLFlush::Finish) as u32, 1);
+
+ saved_buffer = output.save();
+
+ let comp_success = if !use_raw_block {
+ let use_static =
+ (d.params.flags & TDEFL_FORCE_ALL_STATIC_BLOCKS != 0) || (d.lz.total_bytes < 48);
+ compress_block(&mut d.huff, &mut output, &d.lz, use_static)?
+ } else {
+ false
+ };
+
+ // If we failed to compress anything and the output would take up more space than the output
+ // data, output a stored block instead, which has at most 5 bytes of overhead.
+ // We only use some simple heuristics for now.
+ // A stored block will have an overhead of at least 4 bytes containing the block length
+ // but usually more due to the length parameters having to start at a byte boundary and thus
+ // requiring up to 5 bytes of padding.
+ // As a static block will have an overhead of at most 1 bit per byte
+ // (as literals are either 8 or 9 bytes), a raw block will
+ // never take up less space if the number of input bytes are less than 32.
+ let expanded = (d.lz.total_bytes > 32)
+ && (output.inner_pos - saved_buffer.pos + 1 >= (d.lz.total_bytes as usize))
+ && (d.dict.lookahead_pos - d.dict.code_buf_dict_pos <= d.dict.size);
+
+ if use_raw_block || expanded {
+ output.load(saved_buffer);
+
+ // Block header.
+ output.put_bits(0, 2);
+
+ // Block length has to start on a byte boundary, s opad.
+ output.pad_to_bytes();
+
+ // Block length and ones complement of block length.
+ output.put_bits(d.lz.total_bytes & 0xFFFF, 16);
+ output.put_bits(!d.lz.total_bytes & 0xFFFF, 16);
+
+ // Write the actual bytes.
+ for i in 0..d.lz.total_bytes {
+ let pos = (d.dict.code_buf_dict_pos + i as usize) & LZ_DICT_SIZE_MASK;
+ output.put_bits(u32::from(d.dict.b.dict[pos as usize]), 8);
+ }
+ } else if !comp_success {
+ output.load(saved_buffer);
+ compress_block(&mut d.huff, &mut output, &d.lz, true)?;
+ }
+
+ if flush != TDEFLFlush::None {
+ if flush == TDEFLFlush::Finish {
+ output.pad_to_bytes();
+ if d.params.flags & TDEFL_WRITE_ZLIB_HEADER != 0 {
+ let mut adler = d.params.adler32;
+ for _ in 0..4 {
+ output.put_bits((adler >> 24) & 0xFF, 8);
+ adler <<= 8;
+ }
+ }
+ } else {
+ // Sync or Full flush.
+ // Output an empty raw block.
+ output.put_bits(0, 3);
+ output.pad_to_bytes();
+ output.put_bits(0, 16);
+ output.put_bits(0xFFFF, 16);
+ }
+ }
+
+ memset(&mut d.huff.count[0][..MAX_HUFF_SYMBOLS_0], 0);
+ memset(&mut d.huff.count[1][..MAX_HUFF_SYMBOLS_1], 0);
+
+ d.lz.code_position = 1;
+ d.lz.flag_position = 0;
+ d.lz.num_flags_left = 8;
+ d.dict.code_buf_dict_pos += d.lz.total_bytes as usize;
+ d.lz.total_bytes = 0;
+ d.params.block_index += 1;
+
+ saved_buffer = output.save();
+
+ d.params.saved_bit_buffer = saved_buffer.bit_buffer;
+ d.params.saved_bits_in = saved_buffer.bits_in;
+ }
+
+ Ok(callback.flush_output(saved_buffer, &mut d.params))
+}
+
+fn record_literal(h: &mut HuffmanOxide, lz: &mut LZOxide, lit: u8) {
+ lz.total_bytes += 1;
+ lz.write_code(lit);
+
+ *lz.get_flag() >>= 1;
+ lz.consume_flag();
+
+ h.count[0][lit as usize] += 1;
+}
+
+fn record_match(h: &mut HuffmanOxide, lz: &mut LZOxide, mut match_len: u32, mut match_dist: u32) {
+ assert!(match_len >= MIN_MATCH_LEN.into());
+ assert!(match_dist >= 1);
+ assert!(match_dist as usize <= LZ_DICT_SIZE);
+
+ lz.total_bytes += match_len;
+ match_dist -= 1;
+ match_len -= u32::from(MIN_MATCH_LEN);
+ lz.write_code(match_len as u8);
+ lz.write_code(match_dist as u8);
+ lz.write_code((match_dist >> 8) as u8);
+
+ *lz.get_flag() >>= 1;
+ *lz.get_flag() |= 0x80;
+ lz.consume_flag();
+
+ let symbol = if match_dist < 512 {
+ SMALL_DIST_SYM[match_dist as usize]
+ } else {
+ LARGE_DIST_SYM[((match_dist >> 8) & 127) as usize]
+ } as usize;
+ h.count[1][symbol] += 1;
+ h.count[0][LEN_SYM[match_len as usize] as usize] += 1;
+}
+
+fn compress_normal(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {
+ let mut src_pos = d.params.src_pos;
+ let in_buf = match callback.in_buf {
+ None => return true,
+ Some(in_buf) => in_buf,
+ };
+
+ let mut lookahead_size = d.dict.lookahead_size;
+ let mut lookahead_pos = d.dict.lookahead_pos;
+ let mut saved_lit = d.params.saved_lit;
+ let mut saved_match_dist = d.params.saved_match_dist;
+ let mut saved_match_len = d.params.saved_match_len;
+
+ while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size != 0) {
+ let src_buf_left = in_buf.len() - src_pos;
+ let num_bytes_to_process = cmp::min(src_buf_left, MAX_MATCH_LEN - lookahead_size as usize);
+
+ if lookahead_size + d.dict.size >= usize::from(MIN_MATCH_LEN) - 1
+ && num_bytes_to_process > 0
+ {
+ let dictb = &mut d.dict.b;
+
+ let mut dst_pos = (lookahead_pos + lookahead_size as usize) & LZ_DICT_SIZE_MASK;
+ let mut ins_pos = lookahead_pos + lookahead_size as usize - 2;
+ // Start the hash value from the first two bytes
+ let mut hash = update_hash(
+ u16::from(dictb.dict[(ins_pos & LZ_DICT_SIZE_MASK) as usize]),
+ dictb.dict[((ins_pos + 1) & LZ_DICT_SIZE_MASK) as usize],
+ );
+
+ lookahead_size += num_bytes_to_process;
+
+ for &c in &in_buf[src_pos..src_pos + num_bytes_to_process] {
+ // Add byte to input buffer.
+ dictb.dict[dst_pos as usize] = c;
+ if (dst_pos as usize) < MAX_MATCH_LEN - 1 {
+ dictb.dict[LZ_DICT_SIZE + dst_pos as usize] = c;
+ }
+
+ // Generate hash from the current byte,
+ hash = update_hash(hash, c);
+ dictb.next[(ins_pos & LZ_DICT_SIZE_MASK) as usize] = dictb.hash[hash as usize];
+ // and insert it into the hash chain.
+ dictb.hash[hash as usize] = ins_pos as u16;
+ dst_pos = (dst_pos + 1) & LZ_DICT_SIZE_MASK;
+ ins_pos += 1;
+ }
+ src_pos += num_bytes_to_process;
+ } else {
+ let dictb = &mut d.dict.b;
+ for &c in &in_buf[src_pos..src_pos + num_bytes_to_process] {
+ let dst_pos = (lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK;
+ dictb.dict[dst_pos as usize] = c;
+ if (dst_pos as usize) < MAX_MATCH_LEN - 1 {
+ dictb.dict[LZ_DICT_SIZE + dst_pos as usize] = c;
+ }
+
+ lookahead_size += 1;
+ if lookahead_size + d.dict.size >= MIN_MATCH_LEN.into() {
+ let ins_pos = lookahead_pos + lookahead_size - 3;
+ let hash = ((u32::from(dictb.dict[(ins_pos & LZ_DICT_SIZE_MASK) as usize])
+ << (LZ_HASH_SHIFT * 2))
+ ^ ((u32::from(dictb.dict[((ins_pos + 1) & LZ_DICT_SIZE_MASK) as usize])
+ << LZ_HASH_SHIFT)
+ ^ u32::from(c)))
+ & (LZ_HASH_SIZE as u32 - 1);
+
+ dictb.next[(ins_pos & LZ_DICT_SIZE_MASK) as usize] = dictb.hash[hash as usize];
+ dictb.hash[hash as usize] = ins_pos as u16;
+ }
+ }
+
+ src_pos += num_bytes_to_process;
+ }
+
+ d.dict.size = cmp::min(LZ_DICT_SIZE - lookahead_size, d.dict.size);
+ if d.params.flush == TDEFLFlush::None && (lookahead_size as usize) < MAX_MATCH_LEN {
+ break;
+ }
+
+ let mut len_to_move = 1;
+ let mut cur_match_dist = 0;
+ let mut cur_match_len = if saved_match_len != 0 {
+ saved_match_len
+ } else {
+ u32::from(MIN_MATCH_LEN) - 1
+ };
+ let cur_pos = lookahead_pos & LZ_DICT_SIZE_MASK;
+ if d.params.flags & (TDEFL_RLE_MATCHES | TDEFL_FORCE_ALL_RAW_BLOCKS) != 0 {
+ // If TDEFL_RLE_MATCHES is set, we only look for repeating sequences of the current byte.
+ if d.dict.size != 0 && d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS == 0 {
+ let c = d.dict.b.dict[((cur_pos.wrapping_sub(1)) & LZ_DICT_SIZE_MASK) as usize];
+ cur_match_len = d.dict.b.dict[cur_pos as usize..(cur_pos + lookahead_size) as usize]
+ .iter()
+ .take_while(|&x| *x == c)
+ .count() as u32;
+ if cur_match_len < MIN_MATCH_LEN.into() {
+ cur_match_len = 0
+ } else {
+ cur_match_dist = 1
+ }
+ }
+ } else {
+ // Try to find a match for the bytes at the current position.
+ let dist_len = d.dict.find_match(
+ lookahead_pos,
+ d.dict.size,
+ lookahead_size as u32,
+ cur_match_dist,
+ cur_match_len,
+ );
+ cur_match_dist = dist_len.0;
+ cur_match_len = dist_len.1;
+ }
+
+ let far_and_small = cur_match_len == MIN_MATCH_LEN.into() && cur_match_dist >= 8 * 1024;
+ let filter_small = d.params.flags & TDEFL_FILTER_MATCHES != 0 && cur_match_len <= 5;
+ if far_and_small || filter_small || cur_pos == cur_match_dist as usize {
+ cur_match_dist = 0;
+ cur_match_len = 0;
+ }
+
+ if saved_match_len != 0 {
+ if cur_match_len > saved_match_len {
+ record_literal(&mut d.huff, &mut d.lz, saved_lit);
+ if cur_match_len >= 128 {
+ record_match(&mut d.huff, &mut d.lz, cur_match_len, cur_match_dist);
+ saved_match_len = 0;
+ len_to_move = cur_match_len as usize;
+ } else {
+ saved_lit = d.dict.b.dict[cur_pos as usize];
+ saved_match_dist = cur_match_dist;
+ saved_match_len = cur_match_len;
+ }
+ } else {
+ record_match(&mut d.huff, &mut d.lz, saved_match_len, saved_match_dist);
+ len_to_move = (saved_match_len - 1) as usize;
+ saved_match_len = 0;
+ }
+ } else if cur_match_dist == 0 {
+ record_literal(
+ &mut d.huff,
+ &mut d.lz,
+ d.dict.b.dict[cmp::min(cur_pos as usize, d.dict.b.dict.len() - 1)],
+ );
+ } else if d.params.greedy_parsing
+ || (d.params.flags & TDEFL_RLE_MATCHES != 0)
+ || cur_match_len >= 128
+ {
+ // If we are using lazy matching, check for matches at the next byte if the current
+ // match was shorter than 128 bytes.
+ record_match(&mut d.huff, &mut d.lz, cur_match_len, cur_match_dist);
+ len_to_move = cur_match_len as usize;
+ } else {
+ saved_lit = d.dict.b.dict[cmp::min(cur_pos as usize, d.dict.b.dict.len() - 1)];
+ saved_match_dist = cur_match_dist;
+ saved_match_len = cur_match_len;
+ }
+
+ lookahead_pos += len_to_move;
+ assert!(lookahead_size >= len_to_move);
+ lookahead_size -= len_to_move;
+ d.dict.size = cmp::min(d.dict.size + len_to_move, LZ_DICT_SIZE);
+
+ let lz_buf_tight = d.lz.code_position > LZ_CODE_BUF_SIZE - 8;
+ let raw = d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0;
+ let fat = ((d.lz.code_position * 115) >> 7) >= d.lz.total_bytes as usize;
+ let fat_or_raw = (d.lz.total_bytes > 31 * 1024) && (fat || raw);
+
+ if lz_buf_tight || fat_or_raw {
+ d.params.src_pos = src_pos;
+ // These values are used in flush_block, so we need to write them back here.
+ d.dict.lookahead_size = lookahead_size;
+ d.dict.lookahead_pos = lookahead_pos;
+
+ let n = flush_block(d, callback, TDEFLFlush::None)
+ .unwrap_or(TDEFLStatus::PutBufFailed as i32);
+ if n != 0 {
+ d.params.saved_lit = saved_lit;
+ d.params.saved_match_dist = saved_match_dist;
+ d.params.saved_match_len = saved_match_len;
+ return n > 0;
+ }
+ }
+ }
+
+ d.params.src_pos = src_pos;
+ d.dict.lookahead_size = lookahead_size;
+ d.dict.lookahead_pos = lookahead_pos;
+ d.params.saved_lit = saved_lit;
+ d.params.saved_match_dist = saved_match_dist;
+ d.params.saved_match_len = saved_match_len;
+ true
+}
+
+const COMP_FAST_LOOKAHEAD_SIZE: usize = 4096;
+
+fn compress_fast(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {
+ let mut src_pos = d.params.src_pos;
+ let mut lookahead_size = d.dict.lookahead_size;
+ let mut lookahead_pos = d.dict.lookahead_pos;
+
+ let mut cur_pos = lookahead_pos & LZ_DICT_SIZE_MASK;
+ let in_buf = match callback.in_buf {
+ None => return true,
+ Some(in_buf) => in_buf,
+ };
+
+ debug_assert!(d.lz.code_position < LZ_CODE_BUF_SIZE - 2);
+
+ while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size > 0) {
+ let mut dst_pos = ((lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK) as usize;
+ let mut num_bytes_to_process = cmp::min(
+ in_buf.len() - src_pos,
+ (COMP_FAST_LOOKAHEAD_SIZE - lookahead_size) as usize,
+ );
+ lookahead_size += num_bytes_to_process;
+
+ while num_bytes_to_process != 0 {
+ let n = cmp::min(LZ_DICT_SIZE - dst_pos, num_bytes_to_process);
+ d.dict.b.dict[dst_pos..dst_pos + n].copy_from_slice(&in_buf[src_pos..src_pos + n]);
+
+ if dst_pos < MAX_MATCH_LEN - 1 {
+ let m = cmp::min(n, MAX_MATCH_LEN - 1 - dst_pos);
+ d.dict.b.dict[dst_pos + LZ_DICT_SIZE..dst_pos + LZ_DICT_SIZE + m]
+ .copy_from_slice(&in_buf[src_pos..src_pos + m]);
+ }
+
+ src_pos += n;
+ dst_pos = (dst_pos + n) & LZ_DICT_SIZE_MASK as usize;
+ num_bytes_to_process -= n;
+ }
+
+ d.dict.size = cmp::min(LZ_DICT_SIZE - lookahead_size, d.dict.size);
+ if d.params.flush == TDEFLFlush::None && lookahead_size < COMP_FAST_LOOKAHEAD_SIZE {
+ break;
+ }
+
+ while lookahead_size >= 4 {
+ let mut cur_match_len = 1;
+
+ let first_trigram = d.dict.read_unaligned_u32(cur_pos) & 0xFF_FFFF;
+
+ let hash = (first_trigram ^ (first_trigram >> (24 - (LZ_HASH_BITS - 8))))
+ & LEVEL1_HASH_SIZE_MASK;
+
+ let mut probe_pos = usize::from(d.dict.b.hash[hash as usize]);
+ d.dict.b.hash[hash as usize] = lookahead_pos as u16;
+
+ let mut cur_match_dist = (lookahead_pos - probe_pos as usize) as u16;
+ if cur_match_dist as usize <= d.dict.size {
+ probe_pos &= LZ_DICT_SIZE_MASK;
+
+ let trigram = d.dict.read_unaligned_u32(probe_pos) & 0xFF_FFFF;
+
+ if first_trigram == trigram {
+ // Trigram was tested, so we can start with "+ 3" displacement.
+ let mut p = cur_pos + 3;
+ let mut q = probe_pos + 3;
+ cur_match_len = (|| {
+ for _ in 0..32 {
+ let p_data: u64 = d.dict.read_unaligned_u64(p);
+ let q_data: u64 = d.dict.read_unaligned_u64(q);
+ let xor_data = p_data ^ q_data;
+ if xor_data == 0 {
+ p += 8;
+ q += 8;
+ } else {
+ let trailing = xor_data.trailing_zeros();
+ return p as u32 - cur_pos as u32 + (trailing >> 3);
+ }
+ }
+
+ if cur_match_dist == 0 {
+ 0
+ } else {
+ MAX_MATCH_LEN as u32
+ }
+ })();
+
+ if cur_match_len < MIN_MATCH_LEN.into()
+ || (cur_match_len == MIN_MATCH_LEN.into() && cur_match_dist >= 8 * 1024)
+ {
+ let lit = first_trigram as u8;
+ cur_match_len = 1;
+ d.lz.write_code(lit);
+ *d.lz.get_flag() >>= 1;
+ d.huff.count[0][lit as usize] += 1;
+ } else {
+ // Limit the match to the length of the lookahead so we don't create a match
+ // that ends after the end of the input data.
+ cur_match_len = cmp::min(cur_match_len, lookahead_size as u32);
+ debug_assert!(cur_match_len >= MIN_MATCH_LEN.into());
+ debug_assert!(cur_match_dist >= 1);
+ debug_assert!(cur_match_dist as usize <= LZ_DICT_SIZE);
+ cur_match_dist -= 1;
+
+ d.lz.write_code((cur_match_len - u32::from(MIN_MATCH_LEN)) as u8);
+ d.lz.write_code(cur_match_dist as u8);
+ d.lz.write_code((cur_match_dist >> 8) as u8);
+
+ *d.lz.get_flag() >>= 1;
+ *d.lz.get_flag() |= 0x80;
+ if cur_match_dist < 512 {
+ d.huff.count[1][SMALL_DIST_SYM[cur_match_dist as usize] as usize] += 1;
+ } else {
+ d.huff.count[1]
+ [LARGE_DIST_SYM[(cur_match_dist >> 8) as usize] as usize] += 1;
+ }
+
+ d.huff.count[0][LEN_SYM[(cur_match_len - u32::from(MIN_MATCH_LEN)) as usize]
+ as usize] += 1;
+ }
+ } else {
+ d.lz.write_code(first_trigram as u8);
+ *d.lz.get_flag() >>= 1;
+ d.huff.count[0][first_trigram as u8 as usize] += 1;
+ }
+
+ d.lz.consume_flag();
+ d.lz.total_bytes += cur_match_len;
+ lookahead_pos += cur_match_len as usize;
+ d.dict.size = cmp::min(d.dict.size + cur_match_len as usize, LZ_DICT_SIZE);
+ cur_pos = (cur_pos + cur_match_len as usize) & LZ_DICT_SIZE_MASK;
+ lookahead_size -= cur_match_len as usize;
+
+ if d.lz.code_position > LZ_CODE_BUF_SIZE - 8 {
+ // These values are used in flush_block, so we need to write them back here.
+ d.dict.lookahead_size = lookahead_size;
+ d.dict.lookahead_pos = lookahead_pos;
+
+ let n = match flush_block(d, callback, TDEFLFlush::None) {
+ Err(_) => {
+ d.params.src_pos = src_pos;
+ d.params.prev_return_status = TDEFLStatus::PutBufFailed;
+ return false;
+ }
+ Ok(status) => status,
+ };
+ if n != 0 {
+ d.params.src_pos = src_pos;
+ return n > 0;
+ }
+ debug_assert!(d.lz.code_position < LZ_CODE_BUF_SIZE - 2);
+
+ lookahead_size = d.dict.lookahead_size;
+ lookahead_pos = d.dict.lookahead_pos;
+ }
+ }
+ }
+
+ while lookahead_size != 0 {
+ let lit = d.dict.b.dict[cur_pos as usize];
+ d.lz.total_bytes += 1;
+ d.lz.write_code(lit);
+ *d.lz.get_flag() >>= 1;
+ d.lz.consume_flag();
+
+ d.huff.count[0][lit as usize] += 1;
+ lookahead_pos += 1;
+ d.dict.size = cmp::min(d.dict.size + 1, LZ_DICT_SIZE);
+ cur_pos = (cur_pos + 1) & LZ_DICT_SIZE_MASK;
+ lookahead_size -= 1;
+
+ if d.lz.code_position > LZ_CODE_BUF_SIZE - 8 {
+ // These values are used in flush_block, so we need to write them back here.
+ d.dict.lookahead_size = lookahead_size;
+ d.dict.lookahead_pos = lookahead_pos;
+
+ let n = match flush_block(d, callback, TDEFLFlush::None) {
+ Err(_) => {
+ d.params.prev_return_status = TDEFLStatus::PutBufFailed;
+ d.params.src_pos = src_pos;
+ return false;
+ }
+ Ok(status) => status,
+ };
+ if n != 0 {
+ d.params.src_pos = src_pos;
+ return n > 0;
+ }
+
+ lookahead_size = d.dict.lookahead_size;
+ lookahead_pos = d.dict.lookahead_pos;
+ }
+ }
+ }
+
+ d.params.src_pos = src_pos;
+ d.dict.lookahead_size = lookahead_size;
+ d.dict.lookahead_pos = lookahead_pos;
+ true
+}
+
+fn flush_output_buffer(c: &mut CallbackOxide, p: &mut ParamsOxide) -> (TDEFLStatus, usize, usize) {
+ let mut res = (TDEFLStatus::Okay, p.src_pos, 0);
+ if let CallbackOut::Buf(ref mut cb) = c.out {
+ let n = cmp::min(cb.out_buf.len() - p.out_buf_ofs, p.flush_remaining as usize);
+ if n != 0 {
+ (&mut cb.out_buf[p.out_buf_ofs..p.out_buf_ofs + n])
+ .copy_from_slice(&p.local_buf.b[p.flush_ofs as usize..p.flush_ofs as usize + n]);
+ }
+ p.flush_ofs += n as u32;
+ p.flush_remaining -= n as u32;
+ p.out_buf_ofs += n;
+ res.2 = p.out_buf_ofs;
+ }
+
+ if p.finished && p.flush_remaining == 0 {
+ res.0 = TDEFLStatus::Done
+ }
+ res
+}
+
+/// Main compression function. Tries to compress as much as possible from `in_buf` and
+/// puts compressed output into `out_buf`.
+///
+/// The value of `flush` determines if the compressor should attempt to flush all output
+/// and alternatively try to finish the stream.
+///
+/// Use [`TDEFLFlush::Finish`] on the final call to signal that the stream is finishing.
+///
+/// Note that this function does not keep track of whether a flush marker has been output, so
+/// if called using [`TDEFLFlush::Sync`], the caller needs to ensure there is enough space in the
+/// output buffer if they want to avoid repeated flush markers.
+/// See #105 for details.
+///
+/// # Returns
+/// Returns a tuple containing the current status of the compressor, the current position
+/// in the input buffer and the current position in the output buffer.
+pub fn compress(
+ d: &mut CompressorOxide,
+ in_buf: &[u8],
+ out_buf: &mut [u8],
+ flush: TDEFLFlush,
+) -> (TDEFLStatus, usize, usize) {
+ compress_inner(
+ d,
+ &mut CallbackOxide::new_callback_buf(in_buf, out_buf),
+ flush,
+ )
+}
+
+/// Main compression function. Callbacks output.
+///
+/// # Returns
+/// Returns a tuple containing the current status of the compressor, the current position
+/// in the input buffer.
+///
+/// The caller is responsible for ensuring the `CallbackFunc` struct will not cause undefined
+/// behaviour.
+pub fn compress_to_output(
+ d: &mut CompressorOxide,
+ in_buf: &[u8],
+ flush: TDEFLFlush,
+ mut callback_func: impl FnMut(&[u8]) -> bool,
+) -> (TDEFLStatus, usize) {
+ let res = compress_inner(
+ d,
+ &mut CallbackOxide::new_callback_func(
+ in_buf,
+ CallbackFunc {
+ put_buf_func: &mut callback_func,
+ },
+ ),
+ flush,
+ );
+
+ (res.0, res.1)
+}
+
+fn compress_inner(
+ d: &mut CompressorOxide,
+ callback: &mut CallbackOxide,
+ flush: TDEFLFlush,
+) -> (TDEFLStatus, usize, usize) {
+ d.params.out_buf_ofs = 0;
+ d.params.src_pos = 0;
+
+ let prev_ok = d.params.prev_return_status == TDEFLStatus::Okay;
+ let flush_finish_once = d.params.flush != TDEFLFlush::Finish || flush == TDEFLFlush::Finish;
+
+ d.params.flush = flush;
+ if !prev_ok || !flush_finish_once {
+ d.params.prev_return_status = TDEFLStatus::BadParam;
+ return (d.params.prev_return_status, 0, 0);
+ }
+
+ if d.params.flush_remaining != 0 || d.params.finished {
+ let res = flush_output_buffer(callback, &mut d.params);
+ d.params.prev_return_status = res.0;
+ return res;
+ }
+
+ let one_probe = d.params.flags & MAX_PROBES_MASK as u32 == 1;
+ let greedy = d.params.flags & TDEFL_GREEDY_PARSING_FLAG != 0;
+ let filter_or_rle_or_raw = d.params.flags
+ & (TDEFL_FILTER_MATCHES | TDEFL_FORCE_ALL_RAW_BLOCKS | TDEFL_RLE_MATCHES)
+ != 0;
+
+ let compress_success = if one_probe && greedy && !filter_or_rle_or_raw {
+ compress_fast(d, callback)
+ } else {
+ compress_normal(d, callback)
+ };
+
+ if !compress_success {
+ return (
+ d.params.prev_return_status,
+ d.params.src_pos,
+ d.params.out_buf_ofs,
+ );
+ }
+
+ if let Some(in_buf) = callback.in_buf {
+ if d.params.flags & (TDEFL_WRITE_ZLIB_HEADER | TDEFL_COMPUTE_ADLER32) != 0 {
+ d.params.adler32 = update_adler32(d.params.adler32, &in_buf[..d.params.src_pos]);
+ }
+ }
+
+ let flush_none = d.params.flush == TDEFLFlush::None;
+ let in_left = callback.in_buf.map_or(0, |buf| buf.len()) - d.params.src_pos;
+ let remaining = in_left != 0 || d.params.flush_remaining != 0;
+ if !flush_none && d.dict.lookahead_size == 0 && !remaining {
+ let flush = d.params.flush;
+ match flush_block(d, callback, flush) {
+ Err(_) => {
+ d.params.prev_return_status = TDEFLStatus::PutBufFailed;
+ return (
+ d.params.prev_return_status,
+ d.params.src_pos,
+ d.params.out_buf_ofs,
+ );
+ }
+ Ok(x) if x < 0 => {
+ return (
+ d.params.prev_return_status,
+ d.params.src_pos,
+ d.params.out_buf_ofs,
+ )
+ }
+ _ => {
+ d.params.finished = d.params.flush == TDEFLFlush::Finish;
+ if d.params.flush == TDEFLFlush::Full {
+ memset(&mut d.dict.b.hash[..], 0);
+ memset(&mut d.dict.b.next[..], 0);
+ d.dict.size = 0;
+ }
+ }
+ }
+ }
+
+ let res = flush_output_buffer(callback, &mut d.params);
+ d.params.prev_return_status = res.0;
+
+ res
+}
+
+/// Create a set of compression flags using parameters used by zlib and other compressors.
+/// Mainly intended for use with transition from c libraries as it deals with raw integers.
+///
+/// # Parameters
+/// `level` determines compression level. Clamped to maximum of 10. Negative values result in
+/// `CompressionLevel::DefaultLevel`.
+/// `window_bits`: Above 0, wraps the stream in a zlib wrapper, 0 or negative for a raw deflate
+/// stream.
+/// `strategy`: Sets the strategy if this conforms to any of the values in `CompressionStrategy`.
+///
+/// # Notes
+/// This function may be removed or moved to the `miniz_oxide_c_api` in the future.
+pub fn create_comp_flags_from_zip_params(level: i32, window_bits: i32, strategy: i32) -> u32 {
+ let num_probes = (if level >= 0 {
+ cmp::min(10, level)
+ } else {
+ CompressionLevel::DefaultLevel as i32
+ }) as usize;
+ let greedy = if level <= 3 {
+ TDEFL_GREEDY_PARSING_FLAG
+ } else {
+ 0
+ };
+ let mut comp_flags = NUM_PROBES[num_probes] | greedy;
+
+ if window_bits > 0 {
+ comp_flags |= TDEFL_WRITE_ZLIB_HEADER;
+ }
+
+ if level == 0 {
+ comp_flags |= TDEFL_FORCE_ALL_RAW_BLOCKS;
+ } else if strategy == CompressionStrategy::Filtered as i32 {
+ comp_flags |= TDEFL_FILTER_MATCHES;
+ } else if strategy == CompressionStrategy::HuffmanOnly as i32 {
+ comp_flags &= !MAX_PROBES_MASK as u32;
+ } else if strategy == CompressionStrategy::Fixed as i32 {
+ comp_flags |= TDEFL_FORCE_ALL_STATIC_BLOCKS;
+ } else if strategy == CompressionStrategy::RLE as i32 {
+ comp_flags |= TDEFL_RLE_MATCHES;
+ }
+
+ comp_flags
+}
+
+#[cfg(test)]
+mod test {
+ use super::{
+ compress_to_output, create_comp_flags_from_zip_params, read_u16_le, write_u16_le,
+ CompressionStrategy, CompressorOxide, TDEFLFlush, TDEFLStatus, DEFAULT_FLAGS,
+ MZ_DEFAULT_WINDOW_BITS,
+ };
+ use crate::inflate::decompress_to_vec;
+ use alloc::vec;
+
+ #[test]
+ fn u16_to_slice() {
+ let mut slice = [0, 0];
+ write_u16_le(2000, &mut slice, 0);
+ assert_eq!(slice, [208, 7]);
+ }
+
+ #[test]
+ fn u16_from_slice() {
+ let mut slice = [208, 7];
+ assert_eq!(read_u16_le(&mut slice, 0), 2000);
+ }
+
+ #[test]
+ fn compress_output() {
+ assert_eq!(
+ DEFAULT_FLAGS,
+ create_comp_flags_from_zip_params(
+ 4,
+ MZ_DEFAULT_WINDOW_BITS,
+ CompressionStrategy::Default as i32
+ )
+ );
+
+ let slice = [
+ 1, 2, 3, 4, 1, 2, 3, 1, 2, 3, 1, 2, 6, 1, 2, 3, 1, 2, 3, 2, 3, 1, 2, 3,
+ ];
+ let mut encoded = vec![];
+ let flags = create_comp_flags_from_zip_params(6, 0, 0);
+ let mut d = CompressorOxide::new(flags);
+ let (status, in_consumed) =
+ compress_to_output(&mut d, &slice, TDEFLFlush::Finish, |out: &[u8]| {
+ encoded.extend_from_slice(out);
+ true
+ });
+
+ assert_eq!(status, TDEFLStatus::Done);
+ assert_eq!(in_consumed, slice.len());
+
+ let decoded = decompress_to_vec(&encoded[..]).unwrap();
+ assert_eq!(&decoded[..], &slice[..]);
+ }
+
+ #[test]
+ /// Check fast compress mode
+ fn compress_fast() {
+ let slice = [
+ 1, 2, 3, 4, 1, 2, 3, 1, 2, 3, 1, 2, 6, 1, 2, 3, 1, 2, 3, 2, 3, 1, 2, 3,
+ ];
+ let mut encoded = vec![];
+ let flags = create_comp_flags_from_zip_params(1, 0, 0);
+ let mut d = CompressorOxide::new(flags);
+ let (status, in_consumed) =
+ compress_to_output(&mut d, &slice, TDEFLFlush::Finish, |out: &[u8]| {
+ encoded.extend_from_slice(out);
+ true
+ });
+
+ assert_eq!(status, TDEFLStatus::Done);
+ assert_eq!(in_consumed, slice.len());
+
+ // Needs to be altered if algorithm improves.
+ assert_eq!(
+ &encoded[..],
+ [99, 100, 98, 102, 1, 98, 48, 98, 3, 147, 204, 76, 204, 140, 76, 204, 0]
+ );
+
+ let decoded = decompress_to_vec(&encoded[..]).unwrap();
+ assert_eq!(&decoded[..], &slice[..]);
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/deflate/mod.rs b/vendor/miniz_oxide-0.5.3/src/deflate/mod.rs
new file mode 100644
index 000000000..471b94b9d
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/deflate/mod.rs
@@ -0,0 +1,227 @@
+//! This module contains functionality for compression.
+
+use crate::alloc::vec;
+use crate::alloc::vec::Vec;
+
+mod buffer;
+pub mod core;
+pub mod stream;
+use self::core::*;
+
+/// How much processing the compressor should do to compress the data.
+/// `NoCompression` and `Bestspeed` have special meanings, the other levels determine the number
+/// of checks for matches in the hash chains and whether to use lazy or greedy parsing.
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum CompressionLevel {
+ /// Don't do any compression, only output uncompressed blocks.
+ NoCompression = 0,
+ /// Fast compression. Uses a special compression routine that is optimized for speed.
+ BestSpeed = 1,
+ /// Slow/high compression. Do a lot of checks to try to find good matches.
+ BestCompression = 9,
+ /// Even more checks, can be very slow.
+ UberCompression = 10,
+ /// Default compromise between speed and compression.
+ DefaultLevel = 6,
+ /// Use the default compression level.
+ DefaultCompression = -1,
+}
+
+// Missing safe rust analogue (this and mem-to-mem are quite similar)
+/*
+fn tdefl_compress(
+ d: Option<&mut CompressorOxide>,
+ in_buf: *const c_void,
+ in_size: Option<&mut usize>,
+ out_buf: *mut c_void,
+ out_size: Option<&mut usize>,
+ flush: TDEFLFlush,
+) -> TDEFLStatus {
+ let res = match d {
+ None => {
+ in_size.map(|size| *size = 0);
+ out_size.map(|size| *size = 0);
+ (TDEFLStatus::BadParam, 0, 0)
+ },
+ Some(compressor) => {
+ let callback_res = CallbackOxide::new(
+ compressor.callback_func.clone(),
+ in_buf,
+ in_size,
+ out_buf,
+ out_size,
+ );
+
+ if let Ok(mut callback) = callback_res {
+ let res = compress(compressor, &mut callback, flush);
+ callback.update_size(Some(res.1), Some(res.2));
+ res
+ } else {
+ (TDEFLStatus::BadParam, 0, 0)
+ }
+ }
+ };
+ res.0
+}*/
+
+// Missing safe rust analogue
+/*
+fn tdefl_init(
+ d: Option<&mut CompressorOxide>,
+ put_buf_func: PutBufFuncPtr,
+ put_buf_user: *mut c_void,
+ flags: c_int,
+) -> TDEFLStatus {
+ if let Some(d) = d {
+ *d = CompressorOxide::new(
+ put_buf_func.map(|func|
+ CallbackFunc { put_buf_func: func, put_buf_user: put_buf_user }
+ ),
+ flags as u32,
+ );
+ TDEFLStatus::Okay
+ } else {
+ TDEFLStatus::BadParam
+ }
+}*/
+
+// Missing safe rust analogue (though maybe best served by flate2 front-end instead)
+/*
+fn tdefl_compress_mem_to_output(
+ buf: *const c_void,
+ buf_len: usize,
+ put_buf_func: PutBufFuncPtr,
+ put_buf_user: *mut c_void,
+ flags: c_int,
+) -> bool*/
+
+// Missing safe Rust analogue
+/*
+fn tdefl_compress_mem_to_mem(
+ out_buf: *mut c_void,
+ out_buf_len: usize,
+ src_buf: *const c_void,
+ src_buf_len: usize,
+ flags: c_int,
+) -> usize*/
+
+/// Compress the input data to a vector, using the specified compression level (0-10).
+pub fn compress_to_vec(input: &[u8], level: u8) -> Vec<u8> {
+ compress_to_vec_inner(input, level, 0, 0)
+}
+
+/// Compress the input data to a vector, using the specified compression level (0-10), and with a
+/// zlib wrapper.
+pub fn compress_to_vec_zlib(input: &[u8], level: u8) -> Vec<u8> {
+ compress_to_vec_inner(input, level, 1, 0)
+}
+
+/// Simple function to compress data to a vec.
+fn compress_to_vec_inner(input: &[u8], level: u8, window_bits: i32, strategy: i32) -> Vec<u8> {
+ // The comp flags function sets the zlib flag if the window_bits parameter is > 0.
+ let flags = create_comp_flags_from_zip_params(level.into(), window_bits, strategy);
+ let mut compressor = CompressorOxide::new(flags);
+ let mut output = vec![0; ::core::cmp::max(input.len() / 2, 2)];
+
+ let mut in_pos = 0;
+ let mut out_pos = 0;
+ loop {
+ let (status, bytes_in, bytes_out) = compress(
+ &mut compressor,
+ &input[in_pos..],
+ &mut output[out_pos..],
+ TDEFLFlush::Finish,
+ );
+
+ out_pos += bytes_out;
+ in_pos += bytes_in;
+
+ match status {
+ TDEFLStatus::Done => {
+ output.truncate(out_pos);
+ break;
+ }
+ TDEFLStatus::Okay => {
+ // We need more space, so resize the vector.
+ if output.len().saturating_sub(out_pos) < 30 {
+ output.resize(output.len() * 2, 0)
+ }
+ }
+ // Not supposed to happen unless there is a bug.
+ _ => panic!("Bug! Unexpectedly failed to compress!"),
+ }
+ }
+
+ output
+}
+
+#[cfg(test)]
+mod test {
+ use super::{compress_to_vec, compress_to_vec_inner, CompressionStrategy};
+ use crate::inflate::decompress_to_vec;
+ use alloc::vec;
+
+ /// Test deflate example.
+ ///
+ /// Check if the encoder produces the same code as the example given by Mark Adler here:
+ /// https://stackoverflow.com/questions/17398931/deflate-encoding-with-static-huffman-codes/17415203
+ #[test]
+ fn compress_small() {
+ let test_data = b"Deflate late";
+ let check = [
+ 0x73, 0x49, 0x4d, 0xcb, 0x49, 0x2c, 0x49, 0x55, 0x00, 0x11, 0x00,
+ ];
+
+ let res = compress_to_vec(test_data, 1);
+ assert_eq!(&check[..], res.as_slice());
+
+ let res = compress_to_vec(test_data, 9);
+ assert_eq!(&check[..], res.as_slice());
+ }
+
+ #[test]
+ fn compress_huff_only() {
+ let test_data = b"Deflate late";
+
+ let res = compress_to_vec_inner(test_data, 1, 0, CompressionStrategy::HuffmanOnly as i32);
+ let d = decompress_to_vec(res.as_slice()).expect("Failed to decompress!");
+ assert_eq!(test_data, d.as_slice());
+ }
+
+ /// Test that a raw block compresses fine.
+ #[test]
+ fn compress_raw() {
+ let text = b"Hello, zlib!";
+ let encoded = {
+ let len = text.len();
+ let notlen = !len;
+ let mut encoded = vec![
+ 1,
+ len as u8,
+ (len >> 8) as u8,
+ notlen as u8,
+ (notlen >> 8) as u8,
+ ];
+ encoded.extend_from_slice(&text[..]);
+ encoded
+ };
+
+ let res = compress_to_vec(text, 0);
+ assert_eq!(encoded, res.as_slice());
+ }
+
+ #[test]
+ fn short() {
+ let test_data = [10, 10, 10, 10, 10, 55];
+ let c = compress_to_vec(&test_data, 9);
+
+ let d = decompress_to_vec(c.as_slice()).expect("Failed to decompress!");
+ assert_eq!(&test_data, d.as_slice());
+ // Check that a static block is used here, rather than a raw block
+ // , so the data is actually compressed.
+ // (The optimal compressed length would be 5, but neither miniz nor zlib manages that either
+ // as neither checks matches against the byte at index 0.)
+ assert!(c.len() <= 6);
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/deflate/stream.rs b/vendor/miniz_oxide-0.5.3/src/deflate/stream.rs
new file mode 100644
index 000000000..39aa82d92
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/deflate/stream.rs
@@ -0,0 +1,121 @@
+//! Extra streaming compression functionality.
+//!
+//! As of now this is mainly intended for use to build a higher-level wrapper.
+//!
+//! There is no DeflateState as the needed state is contained in the compressor struct itself.
+
+use crate::deflate::core::{compress, CompressorOxide, TDEFLFlush, TDEFLStatus};
+use crate::{MZError, MZFlush, MZStatus, StreamResult};
+
+/// Try to compress from input to output with the given [`CompressorOxide`].
+///
+/// # Errors
+///
+/// Returns [`MZError::Buf`] If the size of the `output` slice is empty or no progress was made due
+/// to lack of expected input data, or if called without [`MZFlush::Finish`] after the compression
+/// was already finished.
+///
+/// Returns [`MZError::Param`] if the compressor parameters are set wrong.
+///
+/// Returns [`MZError::Stream`] when lower-level decompressor returns a
+/// [`TDEFLStatus::PutBufFailed`]; may not actually be possible.
+pub fn deflate(
+ compressor: &mut CompressorOxide,
+ input: &[u8],
+ output: &mut [u8],
+ flush: MZFlush,
+) -> StreamResult {
+ if output.is_empty() {
+ return StreamResult::error(MZError::Buf);
+ }
+
+ if compressor.prev_return_status() == TDEFLStatus::Done {
+ return if flush == MZFlush::Finish {
+ StreamResult {
+ bytes_written: 0,
+ bytes_consumed: 0,
+ status: Ok(MZStatus::StreamEnd),
+ }
+ } else {
+ StreamResult::error(MZError::Buf)
+ };
+ }
+
+ let mut bytes_written = 0;
+ let mut bytes_consumed = 0;
+
+ let mut next_in = input;
+ let mut next_out = output;
+
+ let status = loop {
+ let in_bytes;
+ let out_bytes;
+ let defl_status = {
+ let res = compress(compressor, next_in, next_out, TDEFLFlush::from(flush));
+ in_bytes = res.1;
+ out_bytes = res.2;
+ res.0
+ };
+
+ next_in = &next_in[in_bytes..];
+ next_out = &mut next_out[out_bytes..];
+ bytes_consumed += in_bytes;
+ bytes_written += out_bytes;
+
+ // Check if we are done, or compression failed.
+ match defl_status {
+ TDEFLStatus::BadParam => break Err(MZError::Param),
+ // Don't think this can happen as we're not using a custom callback.
+ TDEFLStatus::PutBufFailed => break Err(MZError::Stream),
+ TDEFLStatus::Done => break Ok(MZStatus::StreamEnd),
+ _ => (),
+ };
+
+ // All the output space was used, so wait for more.
+ if next_out.is_empty() {
+ break Ok(MZStatus::Ok);
+ }
+
+ if next_in.is_empty() && (flush != MZFlush::Finish) {
+ let total_changed = bytes_written > 0 || bytes_consumed > 0;
+
+ break if (flush != MZFlush::None) || total_changed {
+ // We wrote or consumed something, and/or did a flush (sync/partial etc.).
+ Ok(MZStatus::Ok)
+ } else {
+ // No more input data, not flushing, and nothing was consumed or written,
+ // so couldn't make any progress.
+ Err(MZError::Buf)
+ };
+ }
+ };
+ StreamResult {
+ bytes_consumed,
+ bytes_written,
+ status,
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::deflate;
+ use crate::deflate::CompressorOxide;
+ use crate::inflate::decompress_to_vec_zlib;
+ use crate::{MZFlush, MZStatus};
+ use alloc::boxed::Box;
+ use alloc::vec;
+
+ #[test]
+ fn test_state() {
+ let data = b"Hello zlib!";
+ let mut compressed = vec![0; 50];
+ let mut compressor = Box::<CompressorOxide>::default();
+ let res = deflate(&mut compressor, data, &mut compressed, MZFlush::Finish);
+ let status = res.status.expect("Failed to compress!");
+ let decomp =
+ decompress_to_vec_zlib(&compressed).expect("Failed to decompress compressed data");
+ assert_eq!(status, MZStatus::StreamEnd);
+ assert_eq!(decomp[..], data[..]);
+ assert_eq!(res.bytes_consumed, data.len());
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/inflate/core.rs b/vendor/miniz_oxide-0.5.3/src/inflate/core.rs
new file mode 100644
index 000000000..38bdacbbd
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/inflate/core.rs
@@ -0,0 +1,1931 @@
+//! Streaming decompression functionality.
+
+use super::*;
+use crate::shared::{update_adler32, HUFFMAN_LENGTH_ORDER};
+
+use ::core::convert::TryInto;
+use ::core::{cmp, slice};
+
+use self::output_buffer::OutputBuffer;
+
+pub const TINFL_LZ_DICT_SIZE: usize = 32_768;
+
+/// A struct containing huffman code lengths and the huffman code tree used by the decompressor.
+struct HuffmanTable {
+ /// Length of the code at each index.
+ pub code_size: [u8; MAX_HUFF_SYMBOLS_0],
+ /// Fast lookup table for shorter huffman codes.
+ ///
+ /// See `HuffmanTable::fast_lookup`.
+ pub look_up: [i16; FAST_LOOKUP_SIZE as usize],
+ /// Full huffman tree.
+ ///
+ /// Positive values are edge nodes/symbols, negative values are
+ /// parent nodes/references to other nodes.
+ pub tree: [i16; MAX_HUFF_TREE_SIZE],
+}
+
+impl HuffmanTable {
+ const fn new() -> HuffmanTable {
+ HuffmanTable {
+ code_size: [0; MAX_HUFF_SYMBOLS_0],
+ look_up: [0; FAST_LOOKUP_SIZE as usize],
+ tree: [0; MAX_HUFF_TREE_SIZE],
+ }
+ }
+
+ /// Look for a symbol in the fast lookup table.
+ /// The symbol is stored in the lower 9 bits, the length in the next 6.
+ /// If the returned value is negative, the code wasn't found in the
+ /// fast lookup table and the full tree has to be traversed to find the code.
+ #[inline]
+ fn fast_lookup(&self, bit_buf: BitBuffer) -> i16 {
+ self.look_up[(bit_buf & BitBuffer::from(FAST_LOOKUP_SIZE - 1)) as usize]
+ }
+
+ /// Get the symbol and the code length from the huffman tree.
+ #[inline]
+ fn tree_lookup(&self, fast_symbol: i32, bit_buf: BitBuffer, mut code_len: u32) -> (i32, u32) {
+ let mut symbol = fast_symbol;
+ // We step through the tree until we encounter a positive value, which indicates a
+ // symbol.
+ loop {
+ // symbol here indicates the position of the left (0) node, if the next bit is 1
+ // we add 1 to the lookup position to get the right node.
+ symbol = i32::from(self.tree[(!symbol + ((bit_buf >> code_len) & 1) as i32) as usize]);
+ code_len += 1;
+ if symbol >= 0 {
+ break;
+ }
+ }
+ (symbol, code_len)
+ }
+
+ #[inline]
+ /// Look up a symbol and code length from the bits in the provided bit buffer.
+ ///
+ /// Returns Some(symbol, length) on success,
+ /// None if the length is 0.
+ ///
+ /// It's possible we could avoid checking for 0 if we can guarantee a sane table.
+ /// TODO: Check if a smaller type for code_len helps performance.
+ fn lookup(&self, bit_buf: BitBuffer) -> Option<(i32, u32)> {
+ let symbol = self.fast_lookup(bit_buf).into();
+ if symbol >= 0 {
+ if (symbol >> 9) as u32 != 0 {
+ Some((symbol, (symbol >> 9) as u32))
+ } else {
+ // Zero-length code.
+ None
+ }
+ } else {
+ // We didn't get a symbol from the fast lookup table, so check the tree instead.
+ Some(self.tree_lookup(symbol, bit_buf, FAST_LOOKUP_BITS.into()))
+ }
+ }
+}
+
+/// The number of huffman tables used.
+const MAX_HUFF_TABLES: usize = 3;
+/// The length of the first (literal/length) huffman table.
+const MAX_HUFF_SYMBOLS_0: usize = 288;
+/// The length of the second (distance) huffman table.
+const MAX_HUFF_SYMBOLS_1: usize = 32;
+/// The length of the last (huffman code length) huffman table.
+const _MAX_HUFF_SYMBOLS_2: usize = 19;
+/// The maximum length of a code that can be looked up in the fast lookup table.
+const FAST_LOOKUP_BITS: u8 = 10;
+/// The size of the fast lookup table.
+const FAST_LOOKUP_SIZE: u32 = 1 << FAST_LOOKUP_BITS;
+const MAX_HUFF_TREE_SIZE: usize = MAX_HUFF_SYMBOLS_0 * 2;
+const LITLEN_TABLE: usize = 0;
+const DIST_TABLE: usize = 1;
+const HUFFLEN_TABLE: usize = 2;
+
+/// Flags to [`decompress()`] to control how inflation works.
+///
+/// These define bits for a bitmask argument.
+pub mod inflate_flags {
+ /// Should we try to parse a zlib header?
+ ///
+ /// If unset, [`decompress()`] will expect an RFC1951 deflate stream. If set, it will expect an
+ /// RFC1950 zlib wrapper around the deflate stream.
+ pub const TINFL_FLAG_PARSE_ZLIB_HEADER: u32 = 1;
+
+ /// There will be more input that hasn't been given to the decompressor yet.
+ ///
+ /// This is useful when you want to decompress what you have so far,
+ /// even if you know there is probably more input that hasn't gotten here yet (_e.g._, over a
+ /// network connection). When [`decompress()`][super::decompress] reaches the end of the input
+ /// without finding the end of the compressed stream, it will return
+ /// [`TINFLStatus::NeedsMoreInput`][super::TINFLStatus::NeedsMoreInput] if this is set,
+ /// indicating that you should get more data before calling again. If not set, it will return
+ /// [`TINFLStatus::FailedCannotMakeProgress`][super::TINFLStatus::FailedCannotMakeProgress]
+ /// suggesting the stream is corrupt, since you claimed it was all there.
+ pub const TINFL_FLAG_HAS_MORE_INPUT: u32 = 2;
+
+ /// The output buffer should not wrap around.
+ pub const TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF: u32 = 4;
+
+ /// Calculate the adler32 checksum of the output data even if we're not inflating a zlib stream.
+ ///
+ /// If [`TINFL_FLAG_IGNORE_ADLER32`] is specified, it will override this.
+ ///
+ /// NOTE: Enabling/disabling this between calls to decompress will result in an incorect
+ /// checksum.
+ pub const TINFL_FLAG_COMPUTE_ADLER32: u32 = 8;
+
+ /// Ignore adler32 checksum even if we are inflating a zlib stream.
+ ///
+ /// Overrides [`TINFL_FLAG_COMPUTE_ADLER32`] if both are enabled.
+ ///
+ /// NOTE: This flag does not exist in miniz as it does not support this and is a
+ /// custom addition for miniz_oxide.
+ ///
+ /// NOTE: Should not be changed from enabled to disabled after decompression has started,
+ /// this will result in checksum failure (outside the unlikely event where the checksum happens
+ /// to match anyway).
+ pub const TINFL_FLAG_IGNORE_ADLER32: u32 = 64;
+}
+
+use self::inflate_flags::*;
+
+const MIN_TABLE_SIZES: [u16; 3] = [257, 1, 4];
+
+#[cfg(target_pointer_width = "64")]
+type BitBuffer = u64;
+
+#[cfg(not(target_pointer_width = "64"))]
+type BitBuffer = u32;
+
+/// Main decompression struct.
+///
+pub struct DecompressorOxide {
+ /// Current state of the decompressor.
+ state: core::State,
+ /// Number of bits in the bit buffer.
+ num_bits: u32,
+ /// Zlib CMF
+ z_header0: u32,
+ /// Zlib FLG
+ z_header1: u32,
+ /// Adler32 checksum from the zlib header.
+ z_adler32: u32,
+ /// 1 if the current block is the last block, 0 otherwise.
+ finish: u32,
+ /// The type of the current block.
+ block_type: u32,
+ /// 1 if the adler32 value should be checked.
+ check_adler32: u32,
+ /// Last match distance.
+ dist: u32,
+ /// Variable used for match length, symbols, and a number of other things.
+ counter: u32,
+ /// Number of extra bits for the last length or distance code.
+ num_extra: u32,
+ /// Number of entries in each huffman table.
+ table_sizes: [u32; MAX_HUFF_TABLES],
+ /// Buffer of input data.
+ bit_buf: BitBuffer,
+ /// Huffman tables.
+ tables: [HuffmanTable; MAX_HUFF_TABLES],
+ /// Raw block header.
+ raw_header: [u8; 4],
+ /// Huffman length codes.
+ len_codes: [u8; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1 + 137],
+}
+
+impl DecompressorOxide {
+ /// Create a new tinfl_decompressor with all fields set to 0.
+ pub fn new() -> DecompressorOxide {
+ DecompressorOxide::default()
+ }
+
+ /// Set the current state to `Start`.
+ #[inline]
+ pub fn init(&mut self) {
+ // The rest of the data is reset or overwritten when used.
+ self.state = core::State::Start;
+ }
+
+ /// Returns the adler32 checksum of the currently decompressed data.
+ /// Note: Will return Some(1) if decompressing zlib but ignoring adler32.
+ #[inline]
+ pub fn adler32(&self) -> Option<u32> {
+ if self.state != State::Start && !self.state.is_failure() && self.z_header0 != 0 {
+ Some(self.check_adler32)
+ } else {
+ None
+ }
+ }
+
+ /// Returns the adler32 that was read from the zlib header if it exists.
+ #[inline]
+ pub fn adler32_header(&self) -> Option<u32> {
+ if self.state != State::Start && self.state != State::BadZlibHeader && self.z_header0 != 0 {
+ Some(self.z_adler32)
+ } else {
+ None
+ }
+ }
+}
+
+impl Default for DecompressorOxide {
+ /// Create a new tinfl_decompressor with all fields set to 0.
+ #[inline(always)]
+ fn default() -> Self {
+ DecompressorOxide {
+ state: core::State::Start,
+ num_bits: 0,
+ z_header0: 0,
+ z_header1: 0,
+ z_adler32: 0,
+ finish: 0,
+ block_type: 0,
+ check_adler32: 0,
+ dist: 0,
+ counter: 0,
+ num_extra: 0,
+ table_sizes: [0; MAX_HUFF_TABLES],
+ bit_buf: 0,
+ // TODO:(oyvindln) Check that copies here are optimized out in release mode.
+ tables: [
+ HuffmanTable::new(),
+ HuffmanTable::new(),
+ HuffmanTable::new(),
+ ],
+ raw_header: [0; 4],
+ len_codes: [0; MAX_HUFF_SYMBOLS_0 + MAX_HUFF_SYMBOLS_1 + 137],
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+enum State {
+ Start = 0,
+ ReadZlibCmf,
+ ReadZlibFlg,
+ ReadBlockHeader,
+ BlockTypeNoCompression,
+ RawHeader,
+ RawMemcpy1,
+ RawMemcpy2,
+ ReadTableSizes,
+ ReadHufflenTableCodeSize,
+ ReadLitlenDistTablesCodeSize,
+ ReadExtraBitsCodeSize,
+ DecodeLitlen,
+ WriteSymbol,
+ ReadExtraBitsLitlen,
+ DecodeDistance,
+ ReadExtraBitsDistance,
+ RawReadFirstByte,
+ RawStoreFirstByte,
+ WriteLenBytesToEnd,
+ BlockDone,
+ HuffDecodeOuterLoop1,
+ HuffDecodeOuterLoop2,
+ ReadAdler32,
+
+ DoneForever,
+
+ // Failure states.
+ BlockTypeUnexpected,
+ BadCodeSizeSum,
+ BadTotalSymbols,
+ BadZlibHeader,
+ DistanceOutOfBounds,
+ BadRawLength,
+ BadCodeSizeDistPrevLookup,
+ InvalidLitlen,
+ InvalidDist,
+ InvalidCodeLen,
+}
+
+impl State {
+ fn is_failure(self) -> bool {
+ match self {
+ BlockTypeUnexpected => true,
+ BadCodeSizeSum => true,
+ BadTotalSymbols => true,
+ BadZlibHeader => true,
+ DistanceOutOfBounds => true,
+ BadRawLength => true,
+ BadCodeSizeDistPrevLookup => true,
+ InvalidLitlen => true,
+ InvalidDist => true,
+ _ => false,
+ }
+ }
+
+ #[inline]
+ fn begin(&mut self, new_state: State) {
+ *self = new_state;
+ }
+}
+
+use self::State::*;
+
+// Not sure why miniz uses 32-bit values for these, maybe alignment/cache again?
+// # Optimization
+// We add a extra value at the end and make the tables 32 elements long
+// so we can use a mask to avoid bounds checks.
+// The invalid values are set to something high enough to avoid underflowing
+// the match length.
+/// Base length for each length code.
+///
+/// The base is used together with the value of the extra bits to decode the actual
+/// length/distance values in a match.
+#[rustfmt::skip]
+const LENGTH_BASE: [u16; 32] = [
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
+ 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 512, 512, 512
+];
+
+/// Number of extra bits for each length code.
+#[rustfmt::skip]
+const LENGTH_EXTRA: [u8; 32] = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
+ 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 0, 0, 0
+];
+
+/// Base length for each distance code.
+#[rustfmt::skip]
+const DIST_BASE: [u16; 32] = [
+ 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33,
+ 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537,
+ 2049, 3073, 4097, 6145, 8193, 12_289, 16_385, 24_577, 32_768, 32_768
+];
+
+/// Number of extra bits for each distance code.
+#[rustfmt::skip]
+const DIST_EXTRA: [u8; 32] = [
+ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
+ 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 13, 13
+];
+
+/// The mask used when indexing the base/extra arrays.
+const BASE_EXTRA_MASK: usize = 32 - 1;
+
+/// Sets the value of all the elements of the slice to `val`.
+#[inline]
+fn memset<T: Copy>(slice: &mut [T], val: T) {
+ for x in slice {
+ *x = val
+ }
+}
+
+/// Read an le u16 value from the slice iterator.
+///
+/// # Panics
+/// Panics if there are less than two bytes left.
+#[inline]
+fn read_u16_le(iter: &mut slice::Iter<u8>) -> u16 {
+ let ret = {
+ let two_bytes = iter.as_ref()[..2].try_into().unwrap();
+ u16::from_le_bytes(two_bytes)
+ };
+ iter.nth(1);
+ ret
+}
+
+/// Read an le u32 value from the slice iterator.
+///
+/// # Panics
+/// Panics if there are less than four bytes left.
+#[inline(always)]
+#[cfg(target_pointer_width = "64")]
+fn read_u32_le(iter: &mut slice::Iter<u8>) -> u32 {
+ let ret = {
+ let four_bytes: [u8; 4] = iter.as_ref()[..4].try_into().unwrap();
+ u32::from_le_bytes(four_bytes)
+ };
+ iter.nth(3);
+ ret
+}
+
+/// Ensure that there is data in the bit buffer.
+///
+/// On 64-bit platform, we use a 64-bit value so this will
+/// result in there being at least 32 bits in the bit buffer.
+/// This function assumes that there is at least 4 bytes left in the input buffer.
+#[inline(always)]
+#[cfg(target_pointer_width = "64")]
+fn fill_bit_buffer(l: &mut LocalVars, in_iter: &mut slice::Iter<u8>) {
+ // Read four bytes into the buffer at once.
+ if l.num_bits < 30 {
+ l.bit_buf |= BitBuffer::from(read_u32_le(in_iter)) << l.num_bits;
+ l.num_bits += 32;
+ }
+}
+
+/// Same as previous, but for non-64-bit platforms.
+/// Ensures at least 16 bits are present, requires at least 2 bytes in the in buffer.
+#[inline(always)]
+#[cfg(not(target_pointer_width = "64"))]
+fn fill_bit_buffer(l: &mut LocalVars, in_iter: &mut slice::Iter<u8>) {
+ // If the buffer is 32-bit wide, read 2 bytes instead.
+ if l.num_bits < 15 {
+ l.bit_buf |= BitBuffer::from(read_u16_le(in_iter)) << l.num_bits;
+ l.num_bits += 16;
+ }
+}
+
+/// Check that the zlib header is correct and that there is enough space in the buffer
+/// for the window size specified in the header.
+///
+/// See https://tools.ietf.org/html/rfc1950
+#[inline]
+fn validate_zlib_header(cmf: u32, flg: u32, flags: u32, mask: usize) -> Action {
+ let mut failed =
+ // cmf + flg should be divisible by 31.
+ (((cmf * 256) + flg) % 31 != 0) ||
+ // If this flag is set, a dictionary was used for this zlib compressed data.
+ // This is currently not supported by miniz or miniz-oxide
+ ((flg & 0b0010_0000) != 0) ||
+ // Compression method. Only 8(DEFLATE) is defined by the standard.
+ ((cmf & 15) != 8);
+
+ let window_size = 1 << ((cmf >> 4) + 8);
+ if (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF) == 0 {
+ // Bail if the buffer is wrapping and the window size is larger than the buffer.
+ failed |= (mask + 1) < window_size;
+ }
+
+ // Zlib doesn't allow window sizes above 32 * 1024.
+ failed |= window_size > 32_768;
+
+ if failed {
+ Action::Jump(BadZlibHeader)
+ } else {
+ Action::Jump(ReadBlockHeader)
+ }
+}
+
+enum Action {
+ None,
+ Jump(State),
+ End(TINFLStatus),
+}
+
+/// Try to decode the next huffman code, and puts it in the counter field of the decompressor
+/// if successful.
+///
+/// # Returns
+/// The specified action returned from `f` on success,
+/// `Action::End` if there are not enough data left to decode a symbol.
+fn decode_huffman_code<F>(
+ r: &mut DecompressorOxide,
+ l: &mut LocalVars,
+ table: usize,
+ flags: u32,
+ in_iter: &mut slice::Iter<u8>,
+ f: F,
+) -> Action
+where
+ F: FnOnce(&mut DecompressorOxide, &mut LocalVars, i32) -> Action,
+{
+ // As the huffman codes can be up to 15 bits long we need at least 15 bits
+ // ready in the bit buffer to start decoding the next huffman code.
+ if l.num_bits < 15 {
+ // First, make sure there is enough data in the bit buffer to decode a huffman code.
+ if in_iter.len() < 2 {
+ // If there is less than 2 bytes left in the input buffer, we try to look up
+ // the huffman code with what's available, and return if that doesn't succeed.
+ // Original explanation in miniz:
+ // /* TINFL_HUFF_BITBUF_FILL() is only used rarely, when the number of bytes
+ // * remaining in the input buffer falls below 2. */
+ // /* It reads just enough bytes from the input stream that are needed to decode
+ // * the next Huffman code (and absolutely no more). It works by trying to fully
+ // * decode a */
+ // /* Huffman code by using whatever bits are currently present in the bit buffer.
+ // * If this fails, it reads another byte, and tries again until it succeeds or
+ // * until the */
+ // /* bit buffer contains >=15 bits (deflate's max. Huffman code size). */
+ loop {
+ let mut temp = i32::from(r.tables[table].fast_lookup(l.bit_buf));
+
+ if temp >= 0 {
+ let code_len = (temp >> 9) as u32;
+ if (code_len != 0) && (l.num_bits >= code_len) {
+ break;
+ }
+ } else if l.num_bits > FAST_LOOKUP_BITS.into() {
+ let mut code_len = u32::from(FAST_LOOKUP_BITS);
+ loop {
+ temp = i32::from(
+ r.tables[table].tree
+ [(!temp + ((l.bit_buf >> code_len) & 1) as i32) as usize],
+ );
+ code_len += 1;
+ if temp >= 0 || l.num_bits < code_len + 1 {
+ break;
+ }
+ }
+ if temp >= 0 {
+ break;
+ }
+ }
+
+ // TODO: miniz jumps straight to here after getting here again after failing to read
+ // a byte.
+ // Doing that lets miniz avoid re-doing the lookup that that was done in the
+ // previous call.
+ let mut byte = 0;
+ if let a @ Action::End(_) = read_byte(in_iter, flags, |b| {
+ byte = b;
+ Action::None
+ }) {
+ return a;
+ };
+
+ // Do this outside closure for now to avoid borrowing r.
+ l.bit_buf |= BitBuffer::from(byte) << l.num_bits;
+ l.num_bits += 8;
+
+ if l.num_bits >= 15 {
+ break;
+ }
+ }
+ } else {
+ // There is enough data in the input buffer, so read the next two bytes
+ // and add them to the bit buffer.
+ // Unwrapping here is fine since we just checked that there are at least two
+ // bytes left.
+ l.bit_buf |= BitBuffer::from(read_u16_le(in_iter)) << l.num_bits;
+ l.num_bits += 16;
+ }
+ }
+
+ // We now have at least 15 bits in the input buffer.
+ let mut symbol = i32::from(r.tables[table].fast_lookup(l.bit_buf));
+ let code_len;
+ // If the symbol was found in the fast lookup table.
+ if symbol >= 0 {
+ // Get the length value from the top bits.
+ // As we shift down the sign bit, converting to an unsigned value
+ // shouldn't overflow.
+ code_len = (symbol >> 9) as u32;
+ // Mask out the length value.
+ symbol &= 511;
+ } else {
+ let res = r.tables[table].tree_lookup(symbol, l.bit_buf, u32::from(FAST_LOOKUP_BITS));
+ symbol = res.0;
+ code_len = res.1 as u32;
+ };
+
+ if code_len == 0 {
+ return Action::Jump(InvalidCodeLen);
+ }
+
+ l.bit_buf >>= code_len as u32;
+ l.num_bits -= code_len;
+ f(r, l, symbol)
+}
+
+/// Try to read one byte from `in_iter` and call `f` with the read byte as an argument,
+/// returning the result.
+/// If reading fails, `Action::End is returned`
+#[inline]
+fn read_byte<F>(in_iter: &mut slice::Iter<u8>, flags: u32, f: F) -> Action
+where
+ F: FnOnce(u8) -> Action,
+{
+ match in_iter.next() {
+ None => end_of_input(flags),
+ Some(&byte) => f(byte),
+ }
+}
+
+// TODO: `l: &mut LocalVars` may be slow similar to decompress_fast (even with inline(always))
+/// Try to read `amount` number of bits from `in_iter` and call the function `f` with the bits as an
+/// an argument after reading, returning the result of that function, or `Action::End` if there are
+/// not enough bytes left.
+#[inline]
+#[allow(clippy::while_immutable_condition)]
+fn read_bits<F>(
+ l: &mut LocalVars,
+ amount: u32,
+ in_iter: &mut slice::Iter<u8>,
+ flags: u32,
+ f: F,
+) -> Action
+where
+ F: FnOnce(&mut LocalVars, BitBuffer) -> Action,
+{
+ // Clippy gives a false positive warning here due to the closure.
+ // Read enough bytes from the input iterator to cover the number of bits we want.
+ while l.num_bits < amount {
+ match read_byte(in_iter, flags, |byte| {
+ l.bit_buf |= BitBuffer::from(byte) << l.num_bits;
+ l.num_bits += 8;
+ Action::None
+ }) {
+ Action::None => (),
+ // If there are not enough bytes in the input iterator, return and signal that we need
+ // more.
+ action => return action,
+ }
+ }
+
+ let bits = l.bit_buf & ((1 << amount) - 1);
+ l.bit_buf >>= amount;
+ l.num_bits -= amount;
+ f(l, bits)
+}
+
+#[inline]
+fn pad_to_bytes<F>(l: &mut LocalVars, in_iter: &mut slice::Iter<u8>, flags: u32, f: F) -> Action
+where
+ F: FnOnce(&mut LocalVars) -> Action,
+{
+ let num_bits = l.num_bits & 7;
+ read_bits(l, num_bits, in_iter, flags, |l, _| f(l))
+}
+
+#[inline]
+fn end_of_input(flags: u32) -> Action {
+ Action::End(if flags & TINFL_FLAG_HAS_MORE_INPUT != 0 {
+ TINFLStatus::NeedsMoreInput
+ } else {
+ TINFLStatus::FailedCannotMakeProgress
+ })
+}
+
+#[inline]
+fn undo_bytes(l: &mut LocalVars, max: u32) -> u32 {
+ let res = cmp::min(l.num_bits >> 3, max);
+ l.num_bits -= res << 3;
+ res
+}
+
+fn start_static_table(r: &mut DecompressorOxide) {
+ r.table_sizes[LITLEN_TABLE] = 288;
+ r.table_sizes[DIST_TABLE] = 32;
+ memset(&mut r.tables[LITLEN_TABLE].code_size[0..144], 8);
+ memset(&mut r.tables[LITLEN_TABLE].code_size[144..256], 9);
+ memset(&mut r.tables[LITLEN_TABLE].code_size[256..280], 7);
+ memset(&mut r.tables[LITLEN_TABLE].code_size[280..288], 8);
+ memset(&mut r.tables[DIST_TABLE].code_size[0..32], 5);
+}
+
+fn init_tree(r: &mut DecompressorOxide, l: &mut LocalVars) -> Action {
+ loop {
+ let table = &mut r.tables[r.block_type as usize];
+ let table_size = r.table_sizes[r.block_type as usize] as usize;
+ let mut total_symbols = [0u32; 16];
+ let mut next_code = [0u32; 17];
+ memset(&mut table.look_up[..], 0);
+ memset(&mut table.tree[..], 0);
+
+ for &code_size in &table.code_size[..table_size] {
+ total_symbols[code_size as usize] += 1;
+ }
+
+ let mut used_symbols = 0;
+ let mut total = 0;
+ for i in 1..16 {
+ used_symbols += total_symbols[i];
+ total += total_symbols[i];
+ total <<= 1;
+ next_code[i + 1] = total;
+ }
+
+ if total != 65_536 && used_symbols > 1 {
+ return Action::Jump(BadTotalSymbols);
+ }
+
+ let mut tree_next = -1;
+ for symbol_index in 0..table_size {
+ let mut rev_code = 0;
+ let code_size = table.code_size[symbol_index];
+ if code_size == 0 {
+ continue;
+ }
+
+ let mut cur_code = next_code[code_size as usize];
+ next_code[code_size as usize] += 1;
+
+ for _ in 0..code_size {
+ rev_code = (rev_code << 1) | (cur_code & 1);
+ cur_code >>= 1;
+ }
+
+ if code_size <= FAST_LOOKUP_BITS {
+ let k = (i16::from(code_size) << 9) | symbol_index as i16;
+ while rev_code < FAST_LOOKUP_SIZE {
+ table.look_up[rev_code as usize] = k;
+ rev_code += 1 << code_size;
+ }
+ continue;
+ }
+
+ let mut tree_cur = table.look_up[(rev_code & (FAST_LOOKUP_SIZE - 1)) as usize];
+ if tree_cur == 0 {
+ table.look_up[(rev_code & (FAST_LOOKUP_SIZE - 1)) as usize] = tree_next as i16;
+ tree_cur = tree_next;
+ tree_next -= 2;
+ }
+
+ rev_code >>= FAST_LOOKUP_BITS - 1;
+ for _ in FAST_LOOKUP_BITS + 1..code_size {
+ rev_code >>= 1;
+ tree_cur -= (rev_code & 1) as i16;
+ if table.tree[(-tree_cur - 1) as usize] == 0 {
+ table.tree[(-tree_cur - 1) as usize] = tree_next as i16;
+ tree_cur = tree_next;
+ tree_next -= 2;
+ } else {
+ tree_cur = table.tree[(-tree_cur - 1) as usize];
+ }
+ }
+
+ rev_code >>= 1;
+ tree_cur -= (rev_code & 1) as i16;
+ table.tree[(-tree_cur - 1) as usize] = symbol_index as i16;
+ }
+
+ if r.block_type == 2 {
+ l.counter = 0;
+ return Action::Jump(ReadLitlenDistTablesCodeSize);
+ }
+
+ if r.block_type == 0 {
+ break;
+ }
+ r.block_type -= 1;
+ }
+
+ l.counter = 0;
+ Action::Jump(DecodeLitlen)
+}
+
+// A helper macro for generating the state machine.
+//
+// As Rust doesn't have fallthrough on matches, we have to return to the match statement
+// and jump for each state change. (Which would ideally be optimized away, but often isn't.)
+macro_rules! generate_state {
+ ($state: ident, $state_machine: tt, $f: expr) => {
+ loop {
+ match $f {
+ Action::None => continue,
+ Action::Jump(new_state) => {
+ $state = new_state;
+ continue $state_machine;
+ },
+ Action::End(result) => break $state_machine result,
+ }
+ }
+ };
+}
+
+#[derive(Copy, Clone)]
+struct LocalVars {
+ pub bit_buf: BitBuffer,
+ pub num_bits: u32,
+ pub dist: u32,
+ pub counter: u32,
+ pub num_extra: u32,
+}
+
+#[inline]
+fn transfer(
+ out_slice: &mut [u8],
+ mut source_pos: usize,
+ mut out_pos: usize,
+ match_len: usize,
+ out_buf_size_mask: usize,
+) {
+ for _ in 0..match_len >> 2 {
+ out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
+ out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
+ out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];
+ out_slice[out_pos + 3] = out_slice[(source_pos + 3) & out_buf_size_mask];
+ source_pos += 4;
+ out_pos += 4;
+ }
+
+ match match_len & 3 {
+ 0 => (),
+ 1 => out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask],
+ 2 => {
+ out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
+ out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
+ }
+ 3 => {
+ out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];
+ out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
+ out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];
+ }
+ _ => unreachable!(),
+ }
+}
+
+/// Presumes that there is at least match_len bytes in output left.
+#[inline]
+fn apply_match(
+ out_slice: &mut [u8],
+ out_pos: usize,
+ dist: usize,
+ match_len: usize,
+ out_buf_size_mask: usize,
+) {
+ debug_assert!(out_pos + match_len <= out_slice.len());
+
+ let source_pos = out_pos.wrapping_sub(dist) & out_buf_size_mask;
+
+ if match_len == 3 {
+ // Fast path for match len 3.
+ out_slice[out_pos] = out_slice[source_pos];
+ out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];
+ out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];
+ return;
+ }
+
+ if cfg!(not(any(target_arch = "x86", target_arch = "x86_64"))) {
+ // We are not on x86 so copy manually.
+ transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
+ return;
+ }
+
+ if source_pos >= out_pos && (source_pos - out_pos) < match_len {
+ transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
+ } else if match_len <= dist && source_pos + match_len < out_slice.len() {
+ // Destination and source segments does not intersect and source does not wrap.
+ if source_pos < out_pos {
+ let (from_slice, to_slice) = out_slice.split_at_mut(out_pos);
+ to_slice[..match_len].copy_from_slice(&from_slice[source_pos..source_pos + match_len]);
+ } else {
+ let (to_slice, from_slice) = out_slice.split_at_mut(source_pos);
+ to_slice[out_pos..out_pos + match_len].copy_from_slice(&from_slice[..match_len]);
+ }
+ } else {
+ transfer(out_slice, source_pos, out_pos, match_len, out_buf_size_mask);
+ }
+}
+
+/// Fast inner decompression loop which is run while there is at least
+/// 259 bytes left in the output buffer, and at least 6 bytes left in the input buffer
+/// (The maximum one match would need + 1).
+///
+/// This was inspired by a similar optimization in zlib, which uses this info to do
+/// faster unchecked copies of multiple bytes at a time.
+/// Currently we don't do this here, but this function does avoid having to jump through the
+/// big match loop on each state change(as rust does not have fallthrough or gotos at the moment),
+/// and already improves decompression speed a fair bit.
+fn decompress_fast(
+ r: &mut DecompressorOxide,
+ in_iter: &mut slice::Iter<u8>,
+ out_buf: &mut OutputBuffer,
+ flags: u32,
+ local_vars: &mut LocalVars,
+ out_buf_size_mask: usize,
+) -> (TINFLStatus, State) {
+ // Make a local copy of the most used variables, to avoid having to update and read from values
+ // in a random memory location and to encourage more register use.
+ let mut l = *local_vars;
+ let mut state;
+
+ let status: TINFLStatus = 'o: loop {
+ state = State::DecodeLitlen;
+ loop {
+ // This function assumes that there is at least 259 bytes left in the output buffer,
+ // and that there is at least 14 bytes left in the input buffer. 14 input bytes:
+ // 15 (prev lit) + 15 (length) + 5 (length extra) + 15 (dist)
+ // + 29 + 32 (left in bit buf, including last 13 dist extra) = 111 bits < 14 bytes
+ // We need the one extra byte as we may write one length and one full match
+ // before checking again.
+ if out_buf.bytes_left() < 259 || in_iter.len() < 14 {
+ state = State::DecodeLitlen;
+ break 'o TINFLStatus::Done;
+ }
+
+ fill_bit_buffer(&mut l, in_iter);
+
+ if let Some((symbol, code_len)) = r.tables[LITLEN_TABLE].lookup(l.bit_buf) {
+ l.counter = symbol as u32;
+ l.bit_buf >>= code_len;
+ l.num_bits -= code_len;
+
+ if (l.counter & 256) != 0 {
+ // The symbol is not a literal.
+ break;
+ } else {
+ // If we have a 32-bit buffer we need to read another two bytes now
+ // to have enough bits to keep going.
+ if cfg!(not(target_pointer_width = "64")) {
+ fill_bit_buffer(&mut l, in_iter);
+ }
+
+ if let Some((symbol, code_len)) = r.tables[LITLEN_TABLE].lookup(l.bit_buf) {
+ l.bit_buf >>= code_len;
+ l.num_bits -= code_len;
+ // The previous symbol was a literal, so write it directly and check
+ // the next one.
+ out_buf.write_byte(l.counter as u8);
+ if (symbol & 256) != 0 {
+ l.counter = symbol as u32;
+ // The symbol is a length value.
+ break;
+ } else {
+ // The symbol is a literal, so write it directly and continue.
+ out_buf.write_byte(symbol as u8);
+ }
+ } else {
+ state.begin(InvalidCodeLen);
+ break 'o TINFLStatus::Failed;
+ }
+ }
+ } else {
+ state.begin(InvalidCodeLen);
+ break 'o TINFLStatus::Failed;
+ }
+ }
+
+ // Mask the top bits since they may contain length info.
+ l.counter &= 511;
+ if l.counter == 256 {
+ // We hit the end of block symbol.
+ state.begin(BlockDone);
+ break 'o TINFLStatus::Done;
+ } else if l.counter > 285 {
+ // Invalid code.
+ // We already verified earlier that the code is > 256.
+ state.begin(InvalidLitlen);
+ break 'o TINFLStatus::Failed;
+ } else {
+ // The symbol was a length code.
+ // # Optimization
+ // Mask the value to avoid bounds checks
+ // We could use get_unchecked later if can statically verify that
+ // this will never go out of bounds.
+ l.num_extra = u32::from(LENGTH_EXTRA[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
+ l.counter = u32::from(LENGTH_BASE[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
+ // Length and distance codes have a number of extra bits depending on
+ // the base, which together with the base gives us the exact value.
+
+ fill_bit_buffer(&mut l, in_iter);
+ if l.num_extra != 0 {
+ let extra_bits = l.bit_buf & ((1 << l.num_extra) - 1);
+ l.bit_buf >>= l.num_extra;
+ l.num_bits -= l.num_extra;
+ l.counter += extra_bits as u32;
+ }
+
+ // We found a length code, so a distance code should follow.
+
+ if cfg!(not(target_pointer_width = "64")) {
+ fill_bit_buffer(&mut l, in_iter);
+ }
+
+ if let Some((mut symbol, code_len)) = r.tables[DIST_TABLE].lookup(l.bit_buf) {
+ symbol &= 511;
+ l.bit_buf >>= code_len;
+ l.num_bits -= code_len;
+ if symbol > 29 {
+ state.begin(InvalidDist);
+ break 'o TINFLStatus::Failed;
+ }
+
+ l.num_extra = u32::from(DIST_EXTRA[symbol as usize]);
+ l.dist = u32::from(DIST_BASE[symbol as usize]);
+ } else {
+ state.begin(InvalidCodeLen);
+ break 'o TINFLStatus::Failed;
+ }
+
+ if l.num_extra != 0 {
+ fill_bit_buffer(&mut l, in_iter);
+ let extra_bits = l.bit_buf & ((1 << l.num_extra) - 1);
+ l.bit_buf >>= l.num_extra;
+ l.num_bits -= l.num_extra;
+ l.dist += extra_bits as u32;
+ }
+
+ let position = out_buf.position();
+ if l.dist as usize > out_buf.position()
+ && (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0)
+ {
+ // We encountered a distance that refers a position before
+ // the start of the decoded data, so we can't continue.
+ state.begin(DistanceOutOfBounds);
+ break TINFLStatus::Failed;
+ }
+
+ apply_match(
+ out_buf.get_mut(),
+ position,
+ l.dist as usize,
+ l.counter as usize,
+ out_buf_size_mask,
+ );
+
+ out_buf.set_position(position + l.counter as usize);
+ }
+ };
+
+ *local_vars = l;
+ (status, state)
+}
+
+/// Main decompression function. Keeps decompressing data from `in_buf` until the `in_buf` is
+/// empty, `out` is full, the end of the deflate stream is hit, or there is an error in the
+/// deflate stream.
+///
+/// # Arguments
+///
+/// `r` is a [`DecompressorOxide`] struct with the state of this stream.
+///
+/// `in_buf` is a reference to the compressed data that is to be decompressed. The decompressor will
+/// start at the first byte of this buffer.
+///
+/// `out` is a reference to the buffer that will store the decompressed data, and that
+/// stores previously decompressed data if any.
+///
+/// * The offset given by `out_pos` indicates where in the output buffer slice writing should start.
+/// * If [`TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF`] is not set, the output buffer is used in a
+/// wrapping manner, and it's size is required to be a power of 2.
+/// * The decompression function normally needs access to 32KiB of the previously decompressed data
+///(or to the beginning of the decompressed data if less than 32KiB has been decompressed.)
+/// - If this data is not available, decompression may fail.
+/// - Some deflate compressors allow specifying a window size which limits match distances to
+/// less than this, or alternatively an RLE mode where matches will only refer to the previous byte
+/// and thus allows a smaller output buffer. The window size can be specified in the zlib
+/// header structure, however, the header data should not be relied on to be correct.
+///
+/// `flags` indicates settings and status to the decompression function.
+/// * The [`TINFL_FLAG_HAS_MORE_INPUT`] has to be specified if more compressed data is to be provided
+/// in a subsequent call to this function.
+/// * See the the [`inflate_flags`] module for details on other flags.
+///
+/// # Returns
+///
+/// Returns a tuple containing the status of the compressor, the number of input bytes read, and the
+/// number of bytes output to `out`.
+///
+/// This function shouldn't panic pending any bugs.
+pub fn decompress(
+ r: &mut DecompressorOxide,
+ in_buf: &[u8],
+ out: &mut [u8],
+ out_pos: usize,
+ flags: u32,
+) -> (TINFLStatus, usize, usize) {
+ let out_buf_size_mask = if flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0 {
+ usize::max_value()
+ } else {
+ // In the case of zero len, any attempt to write would produce HasMoreOutput,
+ // so to gracefully process the case of there really being no output,
+ // set the mask to all zeros.
+ out.len().saturating_sub(1)
+ };
+
+ // Ensure the output buffer's size is a power of 2, unless the output buffer
+ // is large enough to hold the entire output file (in which case it doesn't
+ // matter).
+ // Also make sure that the output buffer position is not past the end of the output buffer.
+ if (out_buf_size_mask.wrapping_add(1) & out_buf_size_mask) != 0 || out_pos > out.len() {
+ return (TINFLStatus::BadParam, 0, 0);
+ }
+
+ let mut in_iter = in_buf.iter();
+
+ let mut state = r.state;
+
+ let mut out_buf = OutputBuffer::from_slice_and_pos(out, out_pos);
+
+ // Make a local copy of the important variables here so we can work with them on the stack.
+ let mut l = LocalVars {
+ bit_buf: r.bit_buf,
+ num_bits: r.num_bits,
+ dist: r.dist,
+ counter: r.counter,
+ num_extra: r.num_extra,
+ };
+
+ let mut status = 'state_machine: loop {
+ match state {
+ Start => generate_state!(state, 'state_machine, {
+ l.bit_buf = 0;
+ l.num_bits = 0;
+ l.dist = 0;
+ l.counter = 0;
+ l.num_extra = 0;
+ r.z_header0 = 0;
+ r.z_header1 = 0;
+ r.z_adler32 = 1;
+ r.check_adler32 = 1;
+ if flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0 {
+ Action::Jump(State::ReadZlibCmf)
+ } else {
+ Action::Jump(State::ReadBlockHeader)
+ }
+ }),
+
+ ReadZlibCmf => generate_state!(state, 'state_machine, {
+ read_byte(&mut in_iter, flags, |cmf| {
+ r.z_header0 = u32::from(cmf);
+ Action::Jump(State::ReadZlibFlg)
+ })
+ }),
+
+ ReadZlibFlg => generate_state!(state, 'state_machine, {
+ read_byte(&mut in_iter, flags, |flg| {
+ r.z_header1 = u32::from(flg);
+ validate_zlib_header(r.z_header0, r.z_header1, flags, out_buf_size_mask)
+ })
+ }),
+
+ // Read the block header and jump to the relevant section depending on the block type.
+ ReadBlockHeader => generate_state!(state, 'state_machine, {
+ read_bits(&mut l, 3, &mut in_iter, flags, |l, bits| {
+ r.finish = (bits & 1) as u32;
+ r.block_type = (bits >> 1) as u32 & 3;
+ match r.block_type {
+ 0 => Action::Jump(BlockTypeNoCompression),
+ 1 => {
+ start_static_table(r);
+ init_tree(r, l)
+ },
+ 2 => {
+ l.counter = 0;
+ Action::Jump(ReadTableSizes)
+ },
+ 3 => Action::Jump(BlockTypeUnexpected),
+ _ => unreachable!()
+ }
+ })
+ }),
+
+ // Raw/Stored/uncompressed block.
+ BlockTypeNoCompression => generate_state!(state, 'state_machine, {
+ pad_to_bytes(&mut l, &mut in_iter, flags, |l| {
+ l.counter = 0;
+ Action::Jump(RawHeader)
+ })
+ }),
+
+ // Check that the raw block header is correct.
+ RawHeader => generate_state!(state, 'state_machine, {
+ if l.counter < 4 {
+ // Read block length and block length check.
+ if l.num_bits != 0 {
+ read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
+ r.raw_header[l.counter as usize] = bits as u8;
+ l.counter += 1;
+ Action::None
+ })
+ } else {
+ read_byte(&mut in_iter, flags, |byte| {
+ r.raw_header[l.counter as usize] = byte;
+ l.counter += 1;
+ Action::None
+ })
+ }
+ } else {
+ // Check if the length value of a raw block is correct.
+ // The 2 first (2-byte) words in a raw header are the length and the
+ // ones complement of the length.
+ let length = u16::from(r.raw_header[0]) | (u16::from(r.raw_header[1]) << 8);
+ let check = u16::from(r.raw_header[2]) | (u16::from(r.raw_header[3]) << 8);
+ let valid = length == !check;
+ l.counter = length.into();
+
+ if !valid {
+ Action::Jump(BadRawLength)
+ } else if l.counter == 0 {
+ // Empty raw block. Sometimes used for synchronization.
+ Action::Jump(BlockDone)
+ } else if l.num_bits != 0 {
+ // There is some data in the bit buffer, so we need to write that first.
+ Action::Jump(RawReadFirstByte)
+ } else {
+ // The bit buffer is empty, so memcpy the rest of the uncompressed data from
+ // the block.
+ Action::Jump(RawMemcpy1)
+ }
+ }
+ }),
+
+ // Read the byte from the bit buffer.
+ RawReadFirstByte => generate_state!(state, 'state_machine, {
+ read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
+ l.dist = bits as u32;
+ Action::Jump(RawStoreFirstByte)
+ })
+ }),
+
+ // Write the byte we just read to the output buffer.
+ RawStoreFirstByte => generate_state!(state, 'state_machine, {
+ if out_buf.bytes_left() == 0 {
+ Action::End(TINFLStatus::HasMoreOutput)
+ } else {
+ out_buf.write_byte(l.dist as u8);
+ l.counter -= 1;
+ if l.counter == 0 || l.num_bits == 0 {
+ Action::Jump(RawMemcpy1)
+ } else {
+ // There is still some data left in the bit buffer that needs to be output.
+ // TODO: Changed this to jump to `RawReadfirstbyte` rather than
+ // `RawStoreFirstByte` as that seemed to be the correct path, but this
+ // needs testing.
+ Action::Jump(RawReadFirstByte)
+ }
+ }
+ }),
+
+ RawMemcpy1 => generate_state!(state, 'state_machine, {
+ if l.counter == 0 {
+ Action::Jump(BlockDone)
+ } else if out_buf.bytes_left() == 0 {
+ Action::End(TINFLStatus::HasMoreOutput)
+ } else {
+ Action::Jump(RawMemcpy2)
+ }
+ }),
+
+ RawMemcpy2 => generate_state!(state, 'state_machine, {
+ if in_iter.len() > 0 {
+ // Copy as many raw bytes as possible from the input to the output using memcpy.
+ // Raw block lengths are limited to 64 * 1024, so casting through usize and u32
+ // is not an issue.
+ let space_left = out_buf.bytes_left();
+ let bytes_to_copy = cmp::min(cmp::min(
+ space_left,
+ in_iter.len()),
+ l.counter as usize
+ );
+
+ out_buf.write_slice(&in_iter.as_slice()[..bytes_to_copy]);
+
+ (&mut in_iter).nth(bytes_to_copy - 1);
+ l.counter -= bytes_to_copy as u32;
+ Action::Jump(RawMemcpy1)
+ } else {
+ end_of_input(flags)
+ }
+ }),
+
+ // Read how many huffman codes/symbols are used for each table.
+ ReadTableSizes => generate_state!(state, 'state_machine, {
+ if l.counter < 3 {
+ let num_bits = [5, 5, 4][l.counter as usize];
+ read_bits(&mut l, num_bits, &mut in_iter, flags, |l, bits| {
+ r.table_sizes[l.counter as usize] =
+ bits as u32 + u32::from(MIN_TABLE_SIZES[l.counter as usize]);
+ l.counter += 1;
+ Action::None
+ })
+ } else {
+ memset(&mut r.tables[HUFFLEN_TABLE].code_size[..], 0);
+ l.counter = 0;
+ Action::Jump(ReadHufflenTableCodeSize)
+ }
+ }),
+
+ // Read the 3-bit lengths of the huffman codes describing the huffman code lengths used
+ // to decode the lengths of the main tables.
+ ReadHufflenTableCodeSize => generate_state!(state, 'state_machine, {
+ if l.counter < r.table_sizes[HUFFLEN_TABLE] {
+ read_bits(&mut l, 3, &mut in_iter, flags, |l, bits| {
+ // These lengths are not stored in a normal ascending order, but rather one
+ // specified by the deflate specification intended to put the most used
+ // values at the front as trailing zero lengths do not have to be stored.
+ r.tables[HUFFLEN_TABLE]
+ .code_size[HUFFMAN_LENGTH_ORDER[l.counter as usize] as usize] =
+ bits as u8;
+ l.counter += 1;
+ Action::None
+ })
+ } else {
+ r.table_sizes[HUFFLEN_TABLE] = 19;
+ init_tree(r, &mut l)
+ }
+ }),
+
+ ReadLitlenDistTablesCodeSize => generate_state!(state, 'state_machine, {
+ if l.counter < r.table_sizes[LITLEN_TABLE] + r.table_sizes[DIST_TABLE] {
+ decode_huffman_code(
+ r, &mut l, HUFFLEN_TABLE,
+ flags, &mut in_iter, |r, l, symbol| {
+ l.dist = symbol as u32;
+ if l.dist < 16 {
+ r.len_codes[l.counter as usize] = l.dist as u8;
+ l.counter += 1;
+ Action::None
+ } else if l.dist == 16 && l.counter == 0 {
+ Action::Jump(BadCodeSizeDistPrevLookup)
+ } else {
+ l.num_extra = [2, 3, 7][l.dist as usize - 16];
+ Action::Jump(ReadExtraBitsCodeSize)
+ }
+ }
+ )
+ } else if l.counter != r.table_sizes[LITLEN_TABLE] + r.table_sizes[DIST_TABLE] {
+ Action::Jump(BadCodeSizeSum)
+ } else {
+ r.tables[LITLEN_TABLE].code_size[..r.table_sizes[LITLEN_TABLE] as usize]
+ .copy_from_slice(&r.len_codes[..r.table_sizes[LITLEN_TABLE] as usize]);
+
+ let dist_table_start = r.table_sizes[LITLEN_TABLE] as usize;
+ let dist_table_end = (r.table_sizes[LITLEN_TABLE] +
+ r.table_sizes[DIST_TABLE]) as usize;
+ r.tables[DIST_TABLE].code_size[..r.table_sizes[DIST_TABLE] as usize]
+ .copy_from_slice(&r.len_codes[dist_table_start..dist_table_end]);
+
+ r.block_type -= 1;
+ init_tree(r, &mut l)
+ }
+ }),
+
+ ReadExtraBitsCodeSize => generate_state!(state, 'state_machine, {
+ let num_extra = l.num_extra;
+ read_bits(&mut l, num_extra, &mut in_iter, flags, |l, mut extra_bits| {
+ // Mask to avoid a bounds check.
+ extra_bits += [3, 3, 11][(l.dist as usize - 16) & 3];
+ let val = if l.dist == 16 {
+ r.len_codes[l.counter as usize - 1]
+ } else {
+ 0
+ };
+
+ memset(
+ &mut r.len_codes[
+ l.counter as usize..l.counter as usize + extra_bits as usize
+ ],
+ val,
+ );
+ l.counter += extra_bits as u32;
+ Action::Jump(ReadLitlenDistTablesCodeSize)
+ })
+ }),
+
+ DecodeLitlen => generate_state!(state, 'state_machine, {
+ if in_iter.len() < 4 || out_buf.bytes_left() < 2 {
+ // See if we can decode a literal with the data we have left.
+ // Jumps to next state (WriteSymbol) if successful.
+ decode_huffman_code(
+ r,
+ &mut l,
+ LITLEN_TABLE,
+ flags,
+ &mut in_iter,
+ |_r, l, symbol| {
+ l.counter = symbol as u32;
+ Action::Jump(WriteSymbol)
+ },
+ )
+ } else if
+ // If there is enough space, use the fast inner decompression
+ // function.
+ out_buf.bytes_left() >= 259 &&
+ in_iter.len() >= 14
+ {
+ let (status, new_state) = decompress_fast(
+ r,
+ &mut in_iter,
+ &mut out_buf,
+ flags,
+ &mut l,
+ out_buf_size_mask,
+ );
+
+ state = new_state;
+ if status == TINFLStatus::Done {
+ Action::Jump(new_state)
+ } else {
+ Action::End(status)
+ }
+ } else {
+ fill_bit_buffer(&mut l, &mut in_iter);
+
+ if let Some((symbol, code_len)) = r.tables[LITLEN_TABLE].lookup(l.bit_buf) {
+
+ l.counter = symbol as u32;
+ l.bit_buf >>= code_len;
+ l.num_bits -= code_len;
+
+ if (l.counter & 256) != 0 {
+ // The symbol is not a literal.
+ Action::Jump(HuffDecodeOuterLoop1)
+ } else {
+ // If we have a 32-bit buffer we need to read another two bytes now
+ // to have enough bits to keep going.
+ if cfg!(not(target_pointer_width = "64")) {
+ fill_bit_buffer(&mut l, &mut in_iter);
+ }
+
+ if let Some((symbol, code_len)) = r.tables[LITLEN_TABLE].lookup(l.bit_buf) {
+
+ l.bit_buf >>= code_len;
+ l.num_bits -= code_len;
+ // The previous symbol was a literal, so write it directly and check
+ // the next one.
+ out_buf.write_byte(l.counter as u8);
+ if (symbol & 256) != 0 {
+ l.counter = symbol as u32;
+ // The symbol is a length value.
+ Action::Jump(HuffDecodeOuterLoop1)
+ } else {
+ // The symbol is a literal, so write it directly and continue.
+ out_buf.write_byte(symbol as u8);
+ Action::None
+ }
+ } else {
+ Action::Jump(InvalidCodeLen)
+ }
+ }
+ } else {
+ Action::Jump(InvalidCodeLen)
+ }
+ }
+ }),
+
+ WriteSymbol => generate_state!(state, 'state_machine, {
+ if l.counter >= 256 {
+ Action::Jump(HuffDecodeOuterLoop1)
+ } else if out_buf.bytes_left() > 0 {
+ out_buf.write_byte(l.counter as u8);
+ Action::Jump(DecodeLitlen)
+ } else {
+ Action::End(TINFLStatus::HasMoreOutput)
+ }
+ }),
+
+ HuffDecodeOuterLoop1 => generate_state!(state, 'state_machine, {
+ // Mask the top bits since they may contain length info.
+ l.counter &= 511;
+
+ if l.counter == 256 {
+ // We hit the end of block symbol.
+ Action::Jump(BlockDone)
+ } else if l.counter > 285 {
+ // Invalid code.
+ // We already verified earlier that the code is > 256.
+ Action::Jump(InvalidLitlen)
+ } else {
+ // # Optimization
+ // Mask the value to avoid bounds checks
+ // We could use get_unchecked later if can statically verify that
+ // this will never go out of bounds.
+ l.num_extra =
+ u32::from(LENGTH_EXTRA[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
+ l.counter = u32::from(LENGTH_BASE[(l.counter - 257) as usize & BASE_EXTRA_MASK]);
+ // Length and distance codes have a number of extra bits depending on
+ // the base, which together with the base gives us the exact value.
+ if l.num_extra != 0 {
+ Action::Jump(ReadExtraBitsLitlen)
+ } else {
+ Action::Jump(DecodeDistance)
+ }
+ }
+ }),
+
+ ReadExtraBitsLitlen => generate_state!(state, 'state_machine, {
+ let num_extra = l.num_extra;
+ read_bits(&mut l, num_extra, &mut in_iter, flags, |l, extra_bits| {
+ l.counter += extra_bits as u32;
+ Action::Jump(DecodeDistance)
+ })
+ }),
+
+ DecodeDistance => generate_state!(state, 'state_machine, {
+ // Try to read a huffman code from the input buffer and look up what
+ // length code the decoded symbol refers to.
+ decode_huffman_code(r, &mut l, DIST_TABLE, flags, &mut in_iter, |_r, l, symbol| {
+ if symbol > 29 {
+ // Invalid distance code.
+ return Action::Jump(InvalidDist)
+ }
+ // # Optimization
+ // Mask the value to avoid bounds checks
+ // We could use get_unchecked later if can statically verify that
+ // this will never go out of bounds.
+ l.num_extra = u32::from(DIST_EXTRA[symbol as usize & BASE_EXTRA_MASK]);
+ l.dist = u32::from(DIST_BASE[symbol as usize & BASE_EXTRA_MASK]);
+ if l.num_extra != 0 {
+ // ReadEXTRA_BITS_DISTACNE
+ Action::Jump(ReadExtraBitsDistance)
+ } else {
+ Action::Jump(HuffDecodeOuterLoop2)
+ }
+ })
+ }),
+
+ ReadExtraBitsDistance => generate_state!(state, 'state_machine, {
+ let num_extra = l.num_extra;
+ read_bits(&mut l, num_extra, &mut in_iter, flags, |l, extra_bits| {
+ l.dist += extra_bits as u32;
+ Action::Jump(HuffDecodeOuterLoop2)
+ })
+ }),
+
+ HuffDecodeOuterLoop2 => generate_state!(state, 'state_machine, {
+ if l.dist as usize > out_buf.position() &&
+ (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0)
+ {
+ // We encountered a distance that refers a position before
+ // the start of the decoded data, so we can't continue.
+ Action::Jump(DistanceOutOfBounds)
+ } else {
+ let out_pos = out_buf.position();
+ let source_pos = out_buf.position()
+ .wrapping_sub(l.dist as usize) & out_buf_size_mask;
+
+ let out_len = out_buf.get_ref().len() as usize;
+ let match_end_pos = out_buf.position() + l.counter as usize;
+
+ if match_end_pos > out_len ||
+ // miniz doesn't do this check here. Not sure how it makes sure
+ // that this case doesn't happen.
+ (source_pos >= out_pos && (source_pos - out_pos) < l.counter as usize)
+ {
+ // Not enough space for all of the data in the output buffer,
+ // so copy what we have space for.
+ if l.counter == 0 {
+ Action::Jump(DecodeLitlen)
+ } else {
+ Action::Jump(WriteLenBytesToEnd)
+ }
+ } else {
+ apply_match(
+ out_buf.get_mut(),
+ out_pos,
+ l.dist as usize,
+ l.counter as usize,
+ out_buf_size_mask
+ );
+ out_buf.set_position(out_pos + l.counter as usize);
+ Action::Jump(DecodeLitlen)
+ }
+ }
+ }),
+
+ WriteLenBytesToEnd => generate_state!(state, 'state_machine, {
+ if out_buf.bytes_left() > 0 {
+ let out_pos = out_buf.position();
+ let source_pos = out_buf.position()
+ .wrapping_sub(l.dist as usize) & out_buf_size_mask;
+
+
+ let len = cmp::min(out_buf.bytes_left(), l.counter as usize);
+
+ transfer(out_buf.get_mut(), source_pos, out_pos, len, out_buf_size_mask);
+
+ out_buf.set_position(out_pos + len);
+ l.counter -= len as u32;
+ if l.counter == 0 {
+ Action::Jump(DecodeLitlen)
+ } else {
+ Action::None
+ }
+ } else {
+ Action::End(TINFLStatus::HasMoreOutput)
+ }
+ }),
+
+ BlockDone => generate_state!(state, 'state_machine, {
+ // End once we've read the last block.
+ if r.finish != 0 {
+ pad_to_bytes(&mut l, &mut in_iter, flags, |_| Action::None);
+
+ let in_consumed = in_buf.len() - in_iter.len();
+ let undo = undo_bytes(&mut l, in_consumed as u32) as usize;
+ in_iter = in_buf[in_consumed - undo..].iter();
+
+ l.bit_buf &= ((1 as BitBuffer) << l.num_bits) - 1;
+ debug_assert_eq!(l.num_bits, 0);
+
+ if flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0 {
+ l.counter = 0;
+ Action::Jump(ReadAdler32)
+ } else {
+ Action::Jump(DoneForever)
+ }
+ } else {
+ Action::Jump(ReadBlockHeader)
+ }
+ }),
+
+ ReadAdler32 => generate_state!(state, 'state_machine, {
+ if l.counter < 4 {
+ if l.num_bits != 0 {
+ read_bits(&mut l, 8, &mut in_iter, flags, |l, bits| {
+ r.z_adler32 <<= 8;
+ r.z_adler32 |= bits as u32;
+ l.counter += 1;
+ Action::None
+ })
+ } else {
+ read_byte(&mut in_iter, flags, |byte| {
+ r.z_adler32 <<= 8;
+ r.z_adler32 |= u32::from(byte);
+ l.counter += 1;
+ Action::None
+ })
+ }
+ } else {
+ Action::Jump(DoneForever)
+ }
+ }),
+
+ // We are done.
+ DoneForever => break TINFLStatus::Done,
+
+ // Anything else indicates failure.
+ // BadZlibHeader | BadRawLength | BlockTypeUnexpected | DistanceOutOfBounds |
+ // BadTotalSymbols | BadCodeSizeDistPrevLookup | BadCodeSizeSum | InvalidLitlen |
+ // InvalidDist | InvalidCodeLen
+ _ => break TINFLStatus::Failed,
+ };
+ };
+
+ let in_undo = if status != TINFLStatus::NeedsMoreInput
+ && status != TINFLStatus::FailedCannotMakeProgress
+ {
+ undo_bytes(&mut l, (in_buf.len() - in_iter.len()) as u32) as usize
+ } else {
+ 0
+ };
+
+ // Make sure HasMoreOutput overrides NeedsMoreInput if the output buffer is full.
+ // (Unless the missing input is the adler32 value in which case we don't need to write anything.)
+ // TODO: May want to see if we can do this in a better way.
+ if status == TINFLStatus::NeedsMoreInput
+ && out_buf.bytes_left() == 0
+ && state != State::ReadAdler32
+ {
+ status = TINFLStatus::HasMoreOutput
+ }
+
+ r.state = state;
+ r.bit_buf = l.bit_buf;
+ r.num_bits = l.num_bits;
+ r.dist = l.dist;
+ r.counter = l.counter;
+ r.num_extra = l.num_extra;
+
+ r.bit_buf &= ((1 as BitBuffer) << r.num_bits) - 1;
+
+ // If this is a zlib stream, and update the adler32 checksum with the decompressed bytes if
+ // requested.
+ let need_adler = if (flags & TINFL_FLAG_IGNORE_ADLER32) == 0 {
+ flags & (TINFL_FLAG_PARSE_ZLIB_HEADER | TINFL_FLAG_COMPUTE_ADLER32) != 0
+ } else {
+ // If TINFL_FLAG_IGNORE_ADLER32 is enabled, ignore the checksum.
+ false
+ };
+ if need_adler && status as i32 >= 0 {
+ let out_buf_pos = out_buf.position();
+ r.check_adler32 = update_adler32(r.check_adler32, &out_buf.get_ref()[out_pos..out_buf_pos]);
+
+ // disabled so that random input from fuzzer would not be rejected early,
+ // before it has a chance to reach interesting parts of code
+ if !cfg!(fuzzing) {
+ // Once we are done, check if the checksum matches with the one provided in the zlib header.
+ if status == TINFLStatus::Done
+ && flags & TINFL_FLAG_PARSE_ZLIB_HEADER != 0
+ && r.check_adler32 != r.z_adler32
+ {
+ status = TINFLStatus::Adler32Mismatch;
+ }
+ }
+ }
+
+ (
+ status,
+ in_buf.len() - in_iter.len() - in_undo,
+ out_buf.position() - out_pos,
+ )
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ //TODO: Fix these.
+
+ fn tinfl_decompress_oxide<'i>(
+ r: &mut DecompressorOxide,
+ input_buffer: &'i [u8],
+ output_buffer: &mut [u8],
+ flags: u32,
+ ) -> (TINFLStatus, &'i [u8], usize) {
+ let (status, in_pos, out_pos) = decompress(r, input_buffer, output_buffer, 0, flags);
+ (status, &input_buffer[in_pos..], out_pos)
+ }
+
+ #[test]
+ fn decompress_zlib() {
+ let encoded = [
+ 120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
+ ];
+ let flags = TINFL_FLAG_COMPUTE_ADLER32 | TINFL_FLAG_PARSE_ZLIB_HEADER;
+
+ let mut b = DecompressorOxide::new();
+ const LEN: usize = 32;
+ let mut b_buf = vec![0; LEN];
+
+ // This should fail with the out buffer being to small.
+ let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], b_buf.as_mut_slice(), flags);
+
+ assert_eq!(b_status.0, TINFLStatus::Failed);
+
+ let flags = flags | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+
+ b = DecompressorOxide::new();
+
+ // With TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF set this should no longer fail.
+ let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], b_buf.as_mut_slice(), flags);
+
+ assert_eq!(b_buf[..b_status.2], b"Hello, zlib!"[..]);
+ assert_eq!(b_status.0, TINFLStatus::Done);
+ }
+
+ #[test]
+ fn raw_block() {
+ const LEN: usize = 64;
+
+ let text = b"Hello, zlib!";
+ let encoded = {
+ let len = text.len();
+ let notlen = !len;
+ let mut encoded = vec![
+ 1,
+ len as u8,
+ (len >> 8) as u8,
+ notlen as u8,
+ (notlen >> 8) as u8,
+ ];
+ encoded.extend_from_slice(&text[..]);
+ encoded
+ };
+
+ //let flags = TINFL_FLAG_COMPUTE_ADLER32 | TINFL_FLAG_PARSE_ZLIB_HEADER |
+ let flags = TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+
+ let mut b = DecompressorOxide::new();
+
+ let mut b_buf = vec![0; LEN];
+
+ let b_status = tinfl_decompress_oxide(&mut b, &encoded[..], b_buf.as_mut_slice(), flags);
+ assert_eq!(b_buf[..b_status.2], text[..]);
+ assert_eq!(b_status.0, TINFLStatus::Done);
+ }
+
+ fn masked_lookup(table: &HuffmanTable, bit_buf: BitBuffer) -> (i32, u32) {
+ let ret = table.lookup(bit_buf).unwrap();
+ (ret.0 & 511, ret.1)
+ }
+
+ #[test]
+ fn fixed_table_lookup() {
+ let mut d = DecompressorOxide::new();
+ d.block_type = 1;
+ start_static_table(&mut d);
+ let mut l = LocalVars {
+ bit_buf: d.bit_buf,
+ num_bits: d.num_bits,
+ dist: d.dist,
+ counter: d.counter,
+ num_extra: d.num_extra,
+ };
+ init_tree(&mut d, &mut l);
+ let llt = &d.tables[LITLEN_TABLE];
+ let dt = &d.tables[DIST_TABLE];
+ assert_eq!(masked_lookup(llt, 0b00001100), (0, 8));
+ assert_eq!(masked_lookup(llt, 0b00011110), (72, 8));
+ assert_eq!(masked_lookup(llt, 0b01011110), (74, 8));
+ assert_eq!(masked_lookup(llt, 0b11111101), (143, 8));
+ assert_eq!(masked_lookup(llt, 0b000010011), (144, 9));
+ assert_eq!(masked_lookup(llt, 0b111111111), (255, 9));
+ assert_eq!(masked_lookup(llt, 0b00000000), (256, 7));
+ assert_eq!(masked_lookup(llt, 0b1110100), (279, 7));
+ assert_eq!(masked_lookup(llt, 0b00000011), (280, 8));
+ assert_eq!(masked_lookup(llt, 0b11100011), (287, 8));
+
+ assert_eq!(masked_lookup(dt, 0), (0, 5));
+ assert_eq!(masked_lookup(dt, 20), (5, 5));
+ }
+
+ fn check_result(input: &[u8], expected_status: TINFLStatus, expected_state: State, zlib: bool) {
+ let mut r = DecompressorOxide::default();
+ let mut output_buf = vec![0; 1024 * 32];
+ let flags = if zlib {
+ inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER
+ } else {
+ 0
+ } | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF
+ | TINFL_FLAG_HAS_MORE_INPUT;
+ let (d_status, _in_bytes, _out_bytes) =
+ decompress(&mut r, input, &mut output_buf, 0, flags);
+ assert_eq!(expected_status, d_status);
+ assert_eq!(expected_state, r.state);
+ }
+
+ #[test]
+ fn bogus_input() {
+ use self::check_result as cr;
+ const F: TINFLStatus = TINFLStatus::Failed;
+ const OK: TINFLStatus = TINFLStatus::Done;
+ // Bad CM.
+ cr(&[0x77, 0x85], F, State::BadZlibHeader, true);
+ // Bad window size (but check is correct).
+ cr(&[0x88, 0x98], F, State::BadZlibHeader, true);
+ // Bad check bits.
+ cr(&[0x78, 0x98], F, State::BadZlibHeader, true);
+
+ // Too many code lengths. (From inflate library issues)
+ cr(
+ b"M\xff\xffM*\xad\xad\xad\xad\xad\xad\xad\xcd\xcd\xcdM",
+ F,
+ State::BadTotalSymbols,
+ false,
+ );
+ // Bad CLEN (also from inflate library issues)
+ cr(
+ b"\xdd\xff\xff*M\x94ffffffffff",
+ F,
+ State::BadTotalSymbols,
+ false,
+ );
+
+ // Port of inflate coverage tests from zlib-ng
+ // https://github.com/Dead2/zlib-ng/blob/develop/test/infcover.c
+ let c = |a, b, c| cr(a, b, c, false);
+
+ // Invalid uncompressed/raw block length.
+ c(&[0, 0, 0, 0, 0], F, State::BadRawLength);
+ // Ok empty uncompressed block.
+ c(&[3, 0], OK, State::DoneForever);
+ // Invalid block type.
+ c(&[6], F, State::BlockTypeUnexpected);
+ // Ok uncompressed block.
+ c(&[1, 1, 0, 0xfe, 0xff, 0], OK, State::DoneForever);
+ // Too many litlens, we handle this later than zlib, so this test won't
+ // give the same result.
+ // c(&[0xfc, 0, 0], F, State::BadTotalSymbols);
+ // Invalid set of code lengths - TODO Check if this is the correct error for this.
+ c(&[4, 0, 0xfe, 0xff], F, State::BadTotalSymbols);
+ // Invalid repeat in list of code lengths.
+ // (Try to repeat a non-existant code.)
+ c(&[4, 0, 0x24, 0x49, 0], F, State::BadCodeSizeDistPrevLookup);
+ // Missing end of block code (should we have a separate error for this?) - fails on futher input
+ // c(&[4, 0, 0x24, 0xe9, 0xff, 0x6d], F, State::BadTotalSymbols);
+ // Invalid set of literals/lengths
+ c(
+ &[
+ 4, 0x80, 0x49, 0x92, 0x24, 0x49, 0x92, 0x24, 0x71, 0xff, 0xff, 0x93, 0x11, 0,
+ ],
+ F,
+ State::BadTotalSymbols,
+ );
+ // Invalid set of distances _ needsmoreinput
+ // c(&[4, 0x80, 0x49, 0x92, 0x24, 0x49, 0x92, 0x24, 0x0f, 0xb4, 0xff, 0xff, 0xc3, 0x84], F, State::BadTotalSymbols);
+ // Invalid distance code
+ c(&[2, 0x7e, 0xff, 0xff], F, State::InvalidDist);
+
+ // Distance refers to position before the start
+ c(
+ &[0x0c, 0xc0, 0x81, 0, 0, 0, 0, 0, 0x90, 0xff, 0x6b, 0x4, 0],
+ F,
+ State::DistanceOutOfBounds,
+ );
+
+ // Trailer
+ // Bad gzip trailer checksum GZip header not handled by miniz_oxide
+ //cr(&[0x1f, 0x8b, 0x08 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0x03, 0, 0, 0, 0, 0x01], F, State::BadCRC, false)
+ // Bad gzip trailer length
+ //cr(&[0x1f, 0x8b, 0x08 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0x03, 0, 0, 0, 0, 0, 0, 0, 0, 0x01], F, State::BadCRC, false)
+ }
+
+ #[test]
+ fn empty_output_buffer_non_wrapping() {
+ let encoded = [
+ 120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
+ ];
+ let flags = TINFL_FLAG_COMPUTE_ADLER32
+ | TINFL_FLAG_PARSE_ZLIB_HEADER
+ | TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+ let mut r = DecompressorOxide::new();
+ let mut output_buf = vec![];
+ // Check that we handle an empty buffer properly and not panicking.
+ // https://github.com/Frommi/miniz_oxide/issues/23
+ let res = decompress(&mut r, &encoded, &mut output_buf, 0, flags);
+ assert_eq!(res, (TINFLStatus::HasMoreOutput, 4, 0));
+ }
+
+ #[test]
+ fn empty_output_buffer_wrapping() {
+ let encoded = [
+ 0x73, 0x49, 0x4d, 0xcb, 0x49, 0x2c, 0x49, 0x55, 0x00, 0x11, 0x00,
+ ];
+ let flags = TINFL_FLAG_COMPUTE_ADLER32;
+ let mut r = DecompressorOxide::new();
+ let mut output_buf = vec![];
+ // Check that we handle an empty buffer properly and not panicking.
+ // https://github.com/Frommi/miniz_oxide/issues/23
+ let res = decompress(&mut r, &encoded, &mut output_buf, 0, flags);
+ assert_eq!(res, (TINFLStatus::HasMoreOutput, 2, 0));
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/inflate/mod.rs b/vendor/miniz_oxide-0.5.3/src/inflate/mod.rs
new file mode 100644
index 000000000..535392327
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/inflate/mod.rs
@@ -0,0 +1,279 @@
+//! This module contains functionality for decompression.
+
+use crate::alloc::boxed::Box;
+use crate::alloc::vec;
+use crate::alloc::vec::Vec;
+use ::core::cmp::min;
+use ::core::usize;
+
+pub mod core;
+mod output_buffer;
+pub mod stream;
+use self::core::*;
+
+const TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS: i32 = -4;
+const TINFL_STATUS_BAD_PARAM: i32 = -3;
+const TINFL_STATUS_ADLER32_MISMATCH: i32 = -2;
+const TINFL_STATUS_FAILED: i32 = -1;
+const TINFL_STATUS_DONE: i32 = 0;
+const TINFL_STATUS_NEEDS_MORE_INPUT: i32 = 1;
+const TINFL_STATUS_HAS_MORE_OUTPUT: i32 = 2;
+
+/// Return status codes.
+#[repr(i8)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TINFLStatus {
+ /// More input data was expected, but the caller indicated that there was no more data, so the
+ /// input stream is likely truncated.
+ ///
+ /// This can't happen if you have provided the
+ /// [`TINFL_FLAG_HAS_MORE_INPUT`][core::inflate_flags::TINFL_FLAG_HAS_MORE_INPUT] flag to the
+ /// decompression. By setting that flag, you indicate more input exists but is not provided,
+ /// and so reaching the end of the input data without finding the end of the compressed stream
+ /// would instead return a [`NeedsMoreInput`][Self::NeedsMoreInput] status.
+ FailedCannotMakeProgress = TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS as i8,
+
+ /// The output buffer is an invalid size; consider the `flags` parameter.
+ BadParam = TINFL_STATUS_BAD_PARAM as i8,
+
+ /// The decompression went fine, but the adler32 checksum did not match the one
+ /// provided in the header.
+ Adler32Mismatch = TINFL_STATUS_ADLER32_MISMATCH as i8,
+
+ /// Failed to decompress due to invalid data.
+ Failed = TINFL_STATUS_FAILED as i8,
+
+ /// Finished decompression without issues.
+ ///
+ /// This indicates the end of the compressed stream has been reached.
+ Done = TINFL_STATUS_DONE as i8,
+
+ /// The decompressor needs more input data to continue decompressing.
+ ///
+ /// This occurs when there's no more consumable input, but the end of the stream hasn't been
+ /// reached, and you have supplied the
+ /// [`TINFL_FLAG_HAS_MORE_INPUT`][core::inflate_flags::TINFL_FLAG_HAS_MORE_INPUT] flag to the
+ /// decompressor. Had you not supplied that flag (which would mean you were asserting that you
+ /// believed all the data was available) you would have gotten a
+ /// [`FailedCannotMakeProcess`][Self::FailedCannotMakeProgress] instead.
+ NeedsMoreInput = TINFL_STATUS_NEEDS_MORE_INPUT as i8,
+
+ /// There is still pending data that didn't fit in the output buffer.
+ HasMoreOutput = TINFL_STATUS_HAS_MORE_OUTPUT as i8,
+}
+
+impl TINFLStatus {
+ pub fn from_i32(value: i32) -> Option<TINFLStatus> {
+ use self::TINFLStatus::*;
+ match value {
+ TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS => Some(FailedCannotMakeProgress),
+ TINFL_STATUS_BAD_PARAM => Some(BadParam),
+ TINFL_STATUS_ADLER32_MISMATCH => Some(Adler32Mismatch),
+ TINFL_STATUS_FAILED => Some(Failed),
+ TINFL_STATUS_DONE => Some(Done),
+ TINFL_STATUS_NEEDS_MORE_INPUT => Some(NeedsMoreInput),
+ TINFL_STATUS_HAS_MORE_OUTPUT => Some(HasMoreOutput),
+ _ => None,
+ }
+ }
+}
+
+/// Decompress the deflate-encoded data in `input` to a vector.
+///
+/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+#[inline]
+pub fn decompress_to_vec(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> {
+ decompress_to_vec_inner(input, 0, usize::max_value())
+}
+
+/// Decompress the deflate-encoded data (with a zlib wrapper) in `input` to a vector.
+///
+/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+#[inline]
+pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> {
+ decompress_to_vec_inner(
+ input,
+ inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER,
+ usize::max_value(),
+ )
+}
+
+/// Decompress the deflate-encoded data in `input` to a vector.
+/// The vector is grown to at most `max_size` bytes; if the data does not fit in that size,
+/// [`TINFLStatus::HasMoreOutput`] error is returned.
+///
+/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+#[inline]
+pub fn decompress_to_vec_with_limit(input: &[u8], max_size: usize) -> Result<Vec<u8>, TINFLStatus> {
+ decompress_to_vec_inner(input, 0, max_size)
+}
+
+/// Decompress the deflate-encoded data (with a zlib wrapper) in `input` to a vector.
+/// The vector is grown to at most `max_size` bytes; if the data does not fit in that size,
+/// [`TINFLStatus::HasMoreOutput`] error is returned.
+///
+/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+#[inline]
+pub fn decompress_to_vec_zlib_with_limit(
+ input: &[u8],
+ max_size: usize,
+) -> Result<Vec<u8>, TINFLStatus> {
+ decompress_to_vec_inner(input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, max_size)
+}
+
+/// Backend of various to-[`Vec`] decompressions.
+///
+/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+fn decompress_to_vec_inner(
+ input: &[u8],
+ flags: u32,
+ max_output_size: usize,
+) -> Result<Vec<u8>, TINFLStatus> {
+ let flags = flags | inflate_flags::TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+ let mut ret: Vec<u8> = vec![0; min(input.len().saturating_mul(2), max_output_size)];
+
+ let mut decomp = Box::<DecompressorOxide>::default();
+
+ let mut in_pos = 0;
+ let mut out_pos = 0;
+ loop {
+ // Wrap the whole output slice so we know we have enough of the
+ // decompressed data for matches.
+ let (status, in_consumed, out_consumed) =
+ decompress(&mut decomp, &input[in_pos..], &mut ret, out_pos, flags);
+ in_pos += in_consumed;
+ out_pos += out_consumed;
+
+ match status {
+ TINFLStatus::Done => {
+ ret.truncate(out_pos);
+ return Ok(ret);
+ }
+
+ TINFLStatus::HasMoreOutput => {
+ // We need more space, so check if we can resize the buffer and do it.
+ let new_len = ret
+ .len()
+ .checked_add(out_pos)
+ .ok_or(TINFLStatus::HasMoreOutput)?;
+ if new_len > max_output_size {
+ return Err(TINFLStatus::HasMoreOutput);
+ };
+ ret.resize(new_len, 0);
+ }
+
+ _ => return Err(status),
+ }
+ }
+}
+
+/// Decompress one or more source slices from an iterator into the output slice.
+///
+/// * On success, returns the number of bytes that were written.
+/// * On failure, returns the failure status code.
+///
+/// This will fail if the output buffer is not large enough, but in that case
+/// the output buffer will still contain the partial decompression.
+///
+/// * `out` the output buffer.
+/// * `it` the iterator of input slices.
+/// * `zlib_header` if the first slice out of the iterator is expected to have a
+/// Zlib header. Otherwise the slices are assumed to be the deflate data only.
+/// * `ignore_adler32` if the adler32 checksum should be calculated or not.
+pub fn decompress_slice_iter_to_slice<'out, 'inp>(
+ out: &'out mut [u8],
+ it: impl Iterator<Item = &'inp [u8]>,
+ zlib_header: bool,
+ ignore_adler32: bool,
+) -> Result<usize, TINFLStatus> {
+ use self::core::inflate_flags::*;
+
+ let mut it = it.peekable();
+ let r = &mut DecompressorOxide::new();
+ let mut out_pos = 0;
+ while let Some(in_buf) = it.next() {
+ let has_more = it.peek().is_some();
+ let flags = {
+ let mut f = TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+ if zlib_header {
+ f |= TINFL_FLAG_PARSE_ZLIB_HEADER;
+ }
+ if ignore_adler32 {
+ f |= TINFL_FLAG_IGNORE_ADLER32;
+ }
+ if has_more {
+ f |= TINFL_FLAG_HAS_MORE_INPUT;
+ }
+ f
+ };
+ let (status, _input_read, bytes_written) = decompress(r, in_buf, out, out_pos, flags);
+ out_pos += bytes_written;
+ match status {
+ TINFLStatus::NeedsMoreInput => continue,
+ TINFLStatus::Done => return Ok(out_pos),
+ e => return Err(e),
+ }
+ }
+ // If we ran out of source slices without getting a `Done` from the
+ // decompression we can call it a failure.
+ Err(TINFLStatus::FailedCannotMakeProgress)
+}
+
+#[cfg(test)]
+mod test {
+ use super::{
+ decompress_slice_iter_to_slice, decompress_to_vec_zlib, decompress_to_vec_zlib_with_limit,
+ TINFLStatus,
+ };
+ const ENCODED: [u8; 20] = [
+ 120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
+ ];
+
+ #[test]
+ fn decompress_vec() {
+ let res = decompress_to_vec_zlib(&ENCODED[..]).unwrap();
+ assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]);
+ }
+
+ #[test]
+ fn decompress_vec_with_high_limit() {
+ let res = decompress_to_vec_zlib_with_limit(&ENCODED[..], 100_000).unwrap();
+ assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]);
+ }
+
+ #[test]
+ fn fail_to_decompress_with_limit() {
+ let res = decompress_to_vec_zlib_with_limit(&ENCODED[..], 8);
+ match res {
+ Err(TINFLStatus::HasMoreOutput) => (), // expected result
+ _ => panic!("Decompression output size limit was not enforced"),
+ }
+ }
+
+ #[test]
+ fn test_decompress_slice_iter_to_slice() {
+ // one slice
+ let mut out = [0_u8; 12_usize];
+ let r =
+ decompress_slice_iter_to_slice(&mut out, Some(&ENCODED[..]).into_iter(), true, false);
+ assert_eq!(r, Ok(12));
+ assert_eq!(&out[..12], &b"Hello, zlib!"[..]);
+
+ // some chunks at a time
+ for chunk_size in 1..13 {
+ // Note: because of https://github.com/Frommi/miniz_oxide/issues/110 our
+ // out buffer needs to have +1 byte available when the chunk size cuts
+ // the adler32 data off from the last actual data.
+ let mut out = [0_u8; 12_usize + 1];
+ let r =
+ decompress_slice_iter_to_slice(&mut out, ENCODED.chunks(chunk_size), true, false);
+ assert_eq!(r, Ok(12));
+ assert_eq!(&out[..12], &b"Hello, zlib!"[..]);
+ }
+
+ // output buffer too small
+ let mut out = [0_u8; 3_usize];
+ let r = decompress_slice_iter_to_slice(&mut out, ENCODED.chunks(7), true, false);
+ assert!(r.is_err());
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/inflate/output_buffer.rs b/vendor/miniz_oxide-0.5.3/src/inflate/output_buffer.rs
new file mode 100644
index 000000000..5218a807d
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/inflate/output_buffer.rs
@@ -0,0 +1,60 @@
+/// A wrapper for the output slice used when decompressing.
+///
+/// Using this rather than `Cursor` lets us implement the writing methods directly on
+/// the buffer and lets us use a usize rather than u64 for the position which helps with
+/// performance on 32-bit systems.
+pub struct OutputBuffer<'a> {
+ slice: &'a mut [u8],
+ position: usize,
+}
+
+impl<'a> OutputBuffer<'a> {
+ #[inline]
+ pub fn from_slice_and_pos(slice: &'a mut [u8], position: usize) -> OutputBuffer<'a> {
+ OutputBuffer { slice, position }
+ }
+
+ #[inline]
+ pub const fn position(&self) -> usize {
+ self.position
+ }
+
+ #[inline]
+ pub fn set_position(&mut self, position: usize) {
+ self.position = position;
+ }
+
+ /// Write a byte to the current position and increment
+ ///
+ /// Assumes that there is space.
+ #[inline]
+ pub fn write_byte(&mut self, byte: u8) {
+ self.slice[self.position] = byte;
+ self.position += 1;
+ }
+
+ /// Write a slice to the current position and increment
+ ///
+ /// Assumes that there is space.
+ #[inline]
+ pub fn write_slice(&mut self, data: &[u8]) {
+ let len = data.len();
+ self.slice[self.position..self.position + len].copy_from_slice(data);
+ self.position += data.len();
+ }
+
+ #[inline]
+ pub const fn bytes_left(&self) -> usize {
+ self.slice.len() - self.position
+ }
+
+ #[inline]
+ pub const fn get_ref(&self) -> &[u8] {
+ self.slice
+ }
+
+ #[inline]
+ pub fn get_mut(&mut self) -> &mut [u8] {
+ self.slice
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/inflate/stream.rs b/vendor/miniz_oxide-0.5.3/src/inflate/stream.rs
new file mode 100644
index 000000000..715747166
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/inflate/stream.rs
@@ -0,0 +1,415 @@
+//! Extra streaming decompression functionality.
+//!
+//! As of now this is mainly intended for use to build a higher-level wrapper.
+use crate::alloc::boxed::Box;
+use core::{cmp, mem};
+
+use crate::inflate::core::{decompress, inflate_flags, DecompressorOxide, TINFL_LZ_DICT_SIZE};
+use crate::inflate::TINFLStatus;
+use crate::{DataFormat, MZError, MZFlush, MZResult, MZStatus, StreamResult};
+
+/// Tag that determines reset policy of [InflateState](struct.InflateState.html)
+pub trait ResetPolicy {
+ /// Performs reset
+ fn reset(&self, state: &mut InflateState);
+}
+
+/// Resets state, without performing expensive ops (e.g. zeroing buffer)
+///
+/// Note that not zeroing buffer can lead to security issues when dealing with untrusted input.
+pub struct MinReset;
+
+impl ResetPolicy for MinReset {
+ fn reset(&self, state: &mut InflateState) {
+ state.decompressor().init();
+ state.dict_ofs = 0;
+ state.dict_avail = 0;
+ state.first_call = true;
+ state.has_flushed = false;
+ state.last_status = TINFLStatus::NeedsMoreInput;
+ }
+}
+
+/// Resets state and zero memory, continuing to use the same data format.
+pub struct ZeroReset;
+
+impl ResetPolicy for ZeroReset {
+ #[inline]
+ fn reset(&self, state: &mut InflateState) {
+ MinReset.reset(state);
+ state.dict = [0; TINFL_LZ_DICT_SIZE];
+ }
+}
+
+/// Full reset of the state, including zeroing memory.
+///
+/// Requires to provide new data format.
+pub struct FullReset(pub DataFormat);
+
+impl ResetPolicy for FullReset {
+ #[inline]
+ fn reset(&self, state: &mut InflateState) {
+ ZeroReset.reset(state);
+ state.data_format = self.0;
+ }
+}
+
+/// A struct that compbines a decompressor with extra data for streaming decompression.
+///
+pub struct InflateState {
+ /// Inner decompressor struct
+ decomp: DecompressorOxide,
+
+ /// Buffer of input bytes for matches.
+ /// TODO: Could probably do this a bit cleaner with some
+ /// Cursor-like class.
+ /// We may also look into whether we need to keep a buffer here, or just one in the
+ /// decompressor struct.
+ dict: [u8; TINFL_LZ_DICT_SIZE],
+ /// Where in the buffer are we currently at?
+ dict_ofs: usize,
+ /// How many bytes of data to be flushed is there currently in the buffer?
+ dict_avail: usize,
+
+ first_call: bool,
+ has_flushed: bool,
+
+ /// Whether the input data is wrapped in a zlib header and checksum.
+ /// TODO: This should be stored in the decompressor.
+ data_format: DataFormat,
+ last_status: TINFLStatus,
+}
+
+impl Default for InflateState {
+ fn default() -> Self {
+ InflateState {
+ decomp: DecompressorOxide::default(),
+ dict: [0; TINFL_LZ_DICT_SIZE],
+ dict_ofs: 0,
+ dict_avail: 0,
+ first_call: true,
+ has_flushed: false,
+ data_format: DataFormat::Raw,
+ last_status: TINFLStatus::NeedsMoreInput,
+ }
+ }
+}
+impl InflateState {
+ /// Create a new state.
+ ///
+ /// Note that this struct is quite large due to internal buffers, and as such storing it on
+ /// the stack is not recommended.
+ ///
+ /// # Parameters
+ /// `data_format`: Determines whether the compressed data is assumed to wrapped with zlib
+ /// metadata.
+ pub fn new(data_format: DataFormat) -> InflateState {
+ InflateState {
+ data_format,
+ ..Default::default()
+ }
+ }
+
+ /// Create a new state on the heap.
+ ///
+ /// # Parameters
+ /// `data_format`: Determines whether the compressed data is assumed to wrapped with zlib
+ /// metadata.
+ pub fn new_boxed(data_format: DataFormat) -> Box<InflateState> {
+ let mut b: Box<InflateState> = Box::default();
+ b.data_format = data_format;
+ b
+ }
+
+ /// Access the innner decompressor.
+ pub fn decompressor(&mut self) -> &mut DecompressorOxide {
+ &mut self.decomp
+ }
+
+ /// Return the status of the last call to `inflate` with this `InflateState`.
+ pub const fn last_status(&self) -> TINFLStatus {
+ self.last_status
+ }
+
+ /// Create a new state using miniz/zlib style window bits parameter.
+ ///
+ /// The decompressor does not support different window sizes. As such,
+ /// any positive (>0) value will set the zlib header flag, while a negative one
+ /// will not.
+ pub fn new_boxed_with_window_bits(window_bits: i32) -> Box<InflateState> {
+ let mut b: Box<InflateState> = Box::default();
+ b.data_format = DataFormat::from_window_bits(window_bits);
+ b
+ }
+
+ #[inline]
+ /// Reset the decompressor without re-allocating memory, using the given
+ /// data format.
+ pub fn reset(&mut self, data_format: DataFormat) {
+ self.reset_as(FullReset(data_format));
+ }
+
+ #[inline]
+ /// Resets the state according to specified policy.
+ pub fn reset_as<T: ResetPolicy>(&mut self, policy: T) {
+ policy.reset(self)
+ }
+}
+
+/// Try to decompress from `input` to `output` with the given [`InflateState`]
+///
+/// # `flush`
+///
+/// Generally, the various [`MZFlush`] flags have meaning only on the compression side. They can be
+/// supplied here, but the only one that has any semantic meaning is [`MZFlush::Finish`], which is a
+/// signal that the stream is expected to finish, and failing to do so is an error. It isn't
+/// necessary to specify it when the stream ends; you'll still get returned a
+/// [`MZStatus::StreamEnd`] anyway. Other values either have no effect or cause errors. It's
+/// likely that you'll almost always just want to use [`MZFlush::None`].
+///
+/// # Errors
+///
+/// Returns [`MZError::Buf`] if the size of the `output` slice is empty or no progress was made due
+/// to lack of expected input data, or if called with [`MZFlush::Finish`] and input wasn't all
+/// consumed.
+///
+/// Returns [`MZError::Data`] if this or a a previous call failed with an error return from
+/// [`TINFLStatus`]; probably indicates corrupted data.
+///
+/// Returns [`MZError::Stream`] when called with [`MZFlush::Full`] (meaningless on
+/// decompression), or when called without [`MZFlush::Finish`] after an earlier call with
+/// [`MZFlush::Finish`] has been made.
+pub fn inflate(
+ state: &mut InflateState,
+ input: &[u8],
+ output: &mut [u8],
+ flush: MZFlush,
+) -> StreamResult {
+ let mut bytes_consumed = 0;
+ let mut bytes_written = 0;
+ let mut next_in = input;
+ let mut next_out = output;
+
+ if flush == MZFlush::Full {
+ return StreamResult::error(MZError::Stream);
+ }
+
+ let mut decomp_flags = if state.data_format == DataFormat::Zlib {
+ inflate_flags::TINFL_FLAG_COMPUTE_ADLER32
+ } else {
+ inflate_flags::TINFL_FLAG_IGNORE_ADLER32
+ };
+
+ if (state.data_format == DataFormat::Zlib)
+ | (state.data_format == DataFormat::ZLibIgnoreChecksum)
+ {
+ decomp_flags |= inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER;
+ }
+
+ let first_call = state.first_call;
+ state.first_call = false;
+ if (state.last_status as i32) < 0 {
+ return StreamResult::error(MZError::Data);
+ }
+
+ if state.has_flushed && (flush != MZFlush::Finish) {
+ return StreamResult::error(MZError::Stream);
+ }
+ state.has_flushed |= flush == MZFlush::Finish;
+
+ if (flush == MZFlush::Finish) && first_call {
+ decomp_flags |= inflate_flags::TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
+
+ let status = decompress(&mut state.decomp, next_in, next_out, 0, decomp_flags);
+ let in_bytes = status.1;
+ let out_bytes = status.2;
+ let status = status.0;
+
+ state.last_status = status;
+
+ bytes_consumed += in_bytes;
+ bytes_written += out_bytes;
+
+ let ret_status = {
+ if (status as i32) < 0 {
+ Err(MZError::Data)
+ } else if status != TINFLStatus::Done {
+ state.last_status = TINFLStatus::Failed;
+ Err(MZError::Buf)
+ } else {
+ Ok(MZStatus::StreamEnd)
+ }
+ };
+ return StreamResult {
+ bytes_consumed,
+ bytes_written,
+ status: ret_status,
+ };
+ }
+
+ if flush != MZFlush::Finish {
+ decomp_flags |= inflate_flags::TINFL_FLAG_HAS_MORE_INPUT;
+ }
+
+ if state.dict_avail != 0 {
+ bytes_written += push_dict_out(state, &mut next_out);
+ return StreamResult {
+ bytes_consumed,
+ bytes_written,
+ status: Ok(
+ if (state.last_status == TINFLStatus::Done) && (state.dict_avail == 0) {
+ MZStatus::StreamEnd
+ } else {
+ MZStatus::Ok
+ },
+ ),
+ };
+ }
+
+ let status = inflate_loop(
+ state,
+ &mut next_in,
+ &mut next_out,
+ &mut bytes_consumed,
+ &mut bytes_written,
+ decomp_flags,
+ flush,
+ );
+ StreamResult {
+ bytes_consumed,
+ bytes_written,
+ status,
+ }
+}
+
+fn inflate_loop(
+ state: &mut InflateState,
+ next_in: &mut &[u8],
+ next_out: &mut &mut [u8],
+ total_in: &mut usize,
+ total_out: &mut usize,
+ decomp_flags: u32,
+ flush: MZFlush,
+) -> MZResult {
+ let orig_in_len = next_in.len();
+ loop {
+ let status = decompress(
+ &mut state.decomp,
+ *next_in,
+ &mut state.dict,
+ state.dict_ofs,
+ decomp_flags,
+ );
+
+ let in_bytes = status.1;
+ let out_bytes = status.2;
+ let status = status.0;
+
+ state.last_status = status;
+
+ *next_in = &next_in[in_bytes..];
+ *total_in += in_bytes;
+
+ state.dict_avail = out_bytes;
+ *total_out += push_dict_out(state, next_out);
+
+ // The stream was corrupted, and decompression failed.
+ if (status as i32) < 0 {
+ return Err(MZError::Data);
+ }
+
+ // The decompressor has flushed all it's data and is waiting for more input, but
+ // there was no more input provided.
+ if (status == TINFLStatus::NeedsMoreInput) && orig_in_len == 0 {
+ return Err(MZError::Buf);
+ }
+
+ if flush == MZFlush::Finish {
+ if status == TINFLStatus::Done {
+ // There is not enough space in the output buffer to flush the remaining
+ // decompressed data in the internal buffer.
+ return if state.dict_avail != 0 {
+ Err(MZError::Buf)
+ } else {
+ Ok(MZStatus::StreamEnd)
+ };
+ // No more space in the output buffer, but we're not done.
+ } else if next_out.is_empty() {
+ return Err(MZError::Buf);
+ }
+ } else {
+ // We're not expected to finish, so it's fine if we can't flush everything yet.
+ let empty_buf = next_in.is_empty() || next_out.is_empty();
+ if (status == TINFLStatus::Done) || empty_buf || (state.dict_avail != 0) {
+ return if (status == TINFLStatus::Done) && (state.dict_avail == 0) {
+ // No more data left, we're done.
+ Ok(MZStatus::StreamEnd)
+ } else {
+ // Ok for now, still waiting for more input data or output space.
+ Ok(MZStatus::Ok)
+ };
+ }
+ }
+ }
+}
+
+fn push_dict_out(state: &mut InflateState, next_out: &mut &mut [u8]) -> usize {
+ let n = cmp::min(state.dict_avail as usize, next_out.len());
+ (next_out[..n]).copy_from_slice(&state.dict[state.dict_ofs..state.dict_ofs + n]);
+ *next_out = &mut mem::take(next_out)[n..];
+ state.dict_avail -= n;
+ state.dict_ofs = (state.dict_ofs + (n)) & (TINFL_LZ_DICT_SIZE - 1);
+ n
+}
+
+#[cfg(test)]
+mod test {
+ use super::{inflate, InflateState};
+ use crate::{DataFormat, MZFlush, MZStatus};
+ use alloc::vec;
+
+ #[test]
+ fn test_state() {
+ let encoded = [
+ 120u8, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4,
+ 19,
+ ];
+ let mut out = vec![0; 50];
+ let mut state = InflateState::new_boxed(DataFormat::Zlib);
+ let res = inflate(&mut state, &encoded, &mut out, MZFlush::Finish);
+ let status = res.status.expect("Failed to decompress!");
+ assert_eq!(status, MZStatus::StreamEnd);
+ assert_eq!(out[..res.bytes_written as usize], b"Hello, zlib!"[..]);
+ assert_eq!(res.bytes_consumed, encoded.len());
+
+ state.reset_as(super::ZeroReset);
+ out.iter_mut().map(|x| *x = 0).count();
+ let res = inflate(&mut state, &encoded, &mut out, MZFlush::Finish);
+ let status = res.status.expect("Failed to decompress!");
+ assert_eq!(status, MZStatus::StreamEnd);
+ assert_eq!(out[..res.bytes_written as usize], b"Hello, zlib!"[..]);
+ assert_eq!(res.bytes_consumed, encoded.len());
+
+ state.reset_as(super::MinReset);
+ out.iter_mut().map(|x| *x = 0).count();
+ let res = inflate(&mut state, &encoded, &mut out, MZFlush::Finish);
+ let status = res.status.expect("Failed to decompress!");
+ assert_eq!(status, MZStatus::StreamEnd);
+ assert_eq!(out[..res.bytes_written as usize], b"Hello, zlib!"[..]);
+ assert_eq!(res.bytes_consumed, encoded.len());
+ assert_eq!(state.decompressor().adler32(), Some(459605011));
+
+ // Test state when not computing adler.
+ state = InflateState::new_boxed(DataFormat::ZLibIgnoreChecksum);
+ out.iter_mut().map(|x| *x = 0).count();
+ let res = inflate(&mut state, &encoded, &mut out, MZFlush::Finish);
+ let status = res.status.expect("Failed to decompress!");
+ assert_eq!(status, MZStatus::StreamEnd);
+ assert_eq!(out[..res.bytes_written as usize], b"Hello, zlib!"[..]);
+ assert_eq!(res.bytes_consumed, encoded.len());
+ // Not computed, so should be Some(1)
+ assert_eq!(state.decompressor().adler32(), Some(1));
+ // Should still have the checksum read from the header file.
+ assert_eq!(state.decompressor().adler32_header(), Some(459605011))
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/lib.rs b/vendor/miniz_oxide-0.5.3/src/lib.rs
new file mode 100644
index 000000000..8357c5200
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/lib.rs
@@ -0,0 +1,208 @@
+//! A pure rust replacement for the [miniz](https://github.com/richgel999/miniz)
+//! DEFLATE/zlib encoder/decoder.
+//! The plan for this crate is to be used as a back-end for the
+//! [flate2](https://github.com/alexcrichton/flate2-rs) crate and eventually remove the
+//! need to depend on a C library.
+//!
+//! # Usage
+//! ## Simple compression/decompression:
+//! ``` rust
+//!
+//! use miniz_oxide::inflate::decompress_to_vec;
+//! use miniz_oxide::deflate::compress_to_vec;
+//!
+//! fn roundtrip(data: &[u8]) {
+//! let compressed = compress_to_vec(data, 6);
+//! let decompressed = decompress_to_vec(compressed.as_slice()).expect("Failed to decompress!");
+//! # let _ = decompressed;
+//! }
+//!
+//! # roundtrip(b"Test_data test data lalalal blabla");
+//!
+//! ```
+
+#![forbid(unsafe_code)]
+#![no_std]
+
+extern crate alloc;
+
+pub mod deflate;
+pub mod inflate;
+mod shared;
+
+pub use crate::shared::update_adler32 as mz_adler32_oxide;
+pub use crate::shared::{MZ_ADLER32_INIT, MZ_DEFAULT_WINDOW_BITS};
+
+/// A list of flush types.
+///
+/// See <http://www.bolet.org/~pornin/deflate-flush.html> for more in-depth info.
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum MZFlush {
+ /// Don't force any flushing.
+ /// Used when more input data is expected.
+ None = 0,
+ /// Zlib partial flush.
+ /// Currently treated as [`Sync`].
+ Partial = 1,
+ /// Finish compressing the currently buffered data, and output an empty raw block.
+ /// Has no use in decompression.
+ Sync = 2,
+ /// Same as [`Sync`], but resets the compression dictionary so that further compressed
+ /// data does not depend on data compressed before the flush.
+ ///
+ /// Has no use in decompression, and is an error to supply in that case.
+ Full = 3,
+ /// Attempt to flush the remaining data and end the stream.
+ Finish = 4,
+ /// Not implemented.
+ Block = 5,
+}
+
+impl MZFlush {
+ /// Create an MZFlush value from an integer value.
+ ///
+ /// Returns `MZError::Param` on invalid values.
+ pub fn new(flush: i32) -> Result<Self, MZError> {
+ match flush {
+ 0 => Ok(MZFlush::None),
+ 1 | 2 => Ok(MZFlush::Sync),
+ 3 => Ok(MZFlush::Full),
+ 4 => Ok(MZFlush::Finish),
+ _ => Err(MZError::Param),
+ }
+ }
+}
+
+/// A list of miniz successful status codes.
+///
+/// These are emitted as the [`Ok`] side of a [`MZResult`] in the [`StreamResult`] returned from
+/// [`deflate::stream::deflate()`] or [`inflate::stream::inflate()`].
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum MZStatus {
+ /// Operation succeeded.
+ ///
+ /// Some data was decompressed or compressed; see the byte counters in the [`StreamResult`] for
+ /// details.
+ Ok = 0,
+
+ /// Operation succeeded and end of deflate stream was found.
+ ///
+ /// X-ref [`TINFLStatus::Done`][inflate::TINFLStatus::Done] or
+ /// [`TDEFLStatus::Done`][deflate::core::TDEFLStatus::Done] for `inflate` or `deflate`
+ /// respectively.
+ StreamEnd = 1,
+
+ /// Unused
+ NeedDict = 2,
+}
+
+/// A list of miniz failed status codes.
+///
+/// These are emitted as the [`Err`] side of a [`MZResult`] in the [`StreamResult`] returned from
+/// [`deflate::stream::deflate()`] or [`inflate::stream::inflate()`].
+#[repr(i32)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum MZError {
+ /// Unused
+ ErrNo = -1,
+
+ /// General stream error.
+ ///
+ /// See [`inflate::stream::inflate()`] docs for details of how it can occur there.
+ ///
+ /// See [`deflate::stream::deflate()`] docs for how it can in principle occur there, though it's
+ /// believed impossible in practice.
+ Stream = -2,
+
+ /// Error in inflation; see [`inflate::stream::inflate()`] for details.
+ ///
+ /// Not returned from [`deflate::stream::deflate()`].
+ Data = -3,
+
+ /// Unused
+ Mem = -4,
+
+ /// Buffer-related error.
+ ///
+ /// See the docs of [`deflate::stream::deflate()`] or [`inflate::stream::inflate()`] for details
+ /// of when it would trigger in the one you're using.
+ Buf = -5,
+
+ /// Unused
+ Version = -6,
+
+ /// Bad parameters.
+ ///
+ /// This can be returned from [`deflate::stream::deflate()`] in the case of bad parameters. See
+ /// [`TDEFLStatus::BadParam`][deflate::core::TDEFLStatus::BadParam].
+ Param = -10_000,
+}
+
+/// How compressed data is wrapped.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[non_exhaustive]
+pub enum DataFormat {
+ /// Wrapped using the [zlib](http://www.zlib.org/rfc-zlib.html) format.
+ Zlib,
+ /// Zlib wrapped but ignore and don't compute the adler32 checksum.
+ /// Currently only used for inflate, behaves the same as Zlib for compression.
+ ZLibIgnoreChecksum,
+ /// Raw DEFLATE.
+ Raw,
+}
+
+impl DataFormat {
+ pub(crate) fn from_window_bits(window_bits: i32) -> DataFormat {
+ if window_bits > 0 {
+ DataFormat::Zlib
+ } else {
+ DataFormat::Raw
+ }
+ }
+
+ pub(crate) fn to_window_bits(self) -> i32 {
+ match self {
+ DataFormat::Zlib | DataFormat::ZLibIgnoreChecksum => shared::MZ_DEFAULT_WINDOW_BITS,
+ DataFormat::Raw => -shared::MZ_DEFAULT_WINDOW_BITS,
+ }
+ }
+}
+
+/// `Result` alias for all miniz status codes both successful and failed.
+pub type MZResult = Result<MZStatus, MZError>;
+
+/// A structure containg the result of a call to the inflate or deflate streaming functions.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct StreamResult {
+ /// The number of bytes consumed from the input slice.
+ pub bytes_consumed: usize,
+ /// The number of bytes written to the output slice.
+ pub bytes_written: usize,
+ /// The return status of the call.
+ pub status: MZResult,
+}
+
+impl StreamResult {
+ #[inline]
+ pub(crate) const fn error(error: MZError) -> StreamResult {
+ StreamResult {
+ bytes_consumed: 0,
+ bytes_written: 0,
+ status: Err(error),
+ }
+ }
+}
+
+impl core::convert::From<StreamResult> for MZResult {
+ fn from(res: StreamResult) -> Self {
+ res.status
+ }
+}
+
+impl core::convert::From<&StreamResult> for MZResult {
+ fn from(res: &StreamResult) -> Self {
+ res.status
+ }
+}
diff --git a/vendor/miniz_oxide-0.5.3/src/shared.rs b/vendor/miniz_oxide-0.5.3/src/shared.rs
new file mode 100644
index 000000000..8b81fb112
--- /dev/null
+++ b/vendor/miniz_oxide-0.5.3/src/shared.rs
@@ -0,0 +1,25 @@
+#[doc(hidden)]
+pub const MZ_ADLER32_INIT: u32 = 1;
+
+#[doc(hidden)]
+pub const MZ_DEFAULT_WINDOW_BITS: i32 = 15;
+
+pub const HUFFMAN_LENGTH_ORDER: [u8; 19] = [
+ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
+];
+
+#[doc(hidden)]
+#[cfg(not(feature = "simd"))]
+pub fn update_adler32(adler: u32, data: &[u8]) -> u32 {
+ let mut hash = adler::Adler32::from_checksum(adler);
+ hash.write_slice(data);
+ hash.checksum()
+}
+
+#[doc(hidden)]
+#[cfg(feature = "simd")]
+pub fn update_adler32(adler: u32, data: &[u8]) -> u32 {
+ let mut hash = simd_adler32::Adler32::from_checksum(adler);
+ hash.write(data);
+ hash.finish()
+}
diff --git a/vendor/miniz_oxide/.cargo-checksum.json b/vendor/miniz_oxide/.cargo-checksum.json
index 16e89c510..e237f121c 100644
--- a/vendor/miniz_oxide/.cargo-checksum.json
+++ b/vendor/miniz_oxide/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"14ec71e104b90decb8b84170557138a51cc39a8b83b721d74eb476fb13f6bdd2","LICENSE":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-APACHE.md":"0d542e0c8804e39aa7f37eb00da5a762149dc682d7829451287e11b938e94594","LICENSE-MIT.md":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-ZLIB.md":"c89bcc058da12a0fb24402b8ea4542a21515dd1da2e8c67bba4ed9bd269f1c96","Readme.md":"b6a6668b073a3356748b642ce51b31233b6408ffcca3e52801ef473a9f7925c7","src/deflate/buffer.rs":"76bcca4e79bef412eeebdd06d2d0a4348ed9ee17edbdaa6d451d8bf03b1cde85","src/deflate/core.rs":"8087c155cb47f57a9747565857dcef59fff0a7a499abbfdb0c60e694d3234db8","src/deflate/mod.rs":"8ade5b9683b8d728fe5e8f5c23e0630165bfdbef3e56a18b1b729f9bbd4a4b1d","src/deflate/stream.rs":"016c82b09a989492c8c8ea89027d339fcf59a5ca2155e7026ac094ca74344712","src/inflate/core.rs":"49bd596d5255ac88b486f6f978ab7b26663cdab01a6ebaa41bf4559f12b0fed8","src/inflate/mod.rs":"690a8cd50a7da88672b750bb2c62d52d2a58efa41e6ddb2084589a17095cb875","src/inflate/output_buffer.rs":"1ae90d03ba8c9d667fe248b6066731774afdf93cc79cd3bf90e0711b963b0b72","src/inflate/stream.rs":"f82c44ffdff054aff05307ed5709e432b54d5997bb4bbfff8f760171c33c76c3","src/lib.rs":"a9d6a889415ffe3d800c8516fb0ac0bae3585010966d1fdf3b06a85330c36854","src/shared.rs":"a8c47fcb566591e39fcd50d44f3b4d0f567318b8ca36c8d732ee0d8c99a14906"},"package":"96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"} \ No newline at end of file
+{"files":{"Cargo.toml":"1a05b1f1b1cee9093e2d261931b86dcd92057289d5d8cd675381df52e029c797","LICENSE":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-APACHE.md":"0d542e0c8804e39aa7f37eb00da5a762149dc682d7829451287e11b938e94594","LICENSE-MIT.md":"e190940e8ad3cdd4fca962a6508ed6865d589d314b1cb055f86000e124b88d8d","LICENSE-ZLIB.md":"c89bcc058da12a0fb24402b8ea4542a21515dd1da2e8c67bba4ed9bd269f1c96","Readme.md":"d9ae0e4192de8809293672397459f90bdb3cc6a6bd92f235edafbb0530181efb","src/deflate/buffer.rs":"76bcca4e79bef412eeebdd06d2d0a4348ed9ee17edbdaa6d451d8bf03b1cde85","src/deflate/core.rs":"8087c155cb47f57a9747565857dcef59fff0a7a499abbfdb0c60e694d3234db8","src/deflate/mod.rs":"8ade5b9683b8d728fe5e8f5c23e0630165bfdbef3e56a18b1b729f9bbd4a4b1d","src/deflate/stream.rs":"016c82b09a989492c8c8ea89027d339fcf59a5ca2155e7026ac094ca74344712","src/inflate/core.rs":"e7b8946db6a56834311b382fa1f8a3aba21a9ca42cf880c4ae1c97f699d22092","src/inflate/mod.rs":"6a6f658cd44c47f1ba402328fb78c27b24b8700a909ddac4c0c472b12046d1fa","src/inflate/output_buffer.rs":"1ae90d03ba8c9d667fe248b6066731774afdf93cc79cd3bf90e0711b963b0b72","src/inflate/stream.rs":"b1d96270d89b549bdc09352bfbd5a4fea24b479c0057c1f50b66b30ce2eb9cc1","src/lib.rs":"7cb7c0ebc61141f0e43a6edca97a22c93ca3b1550893007359007d4dfdeaa883","src/shared.rs":"a8c47fcb566591e39fcd50d44f3b4d0f567318b8ca36c8d732ee0d8c99a14906"},"package":"b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"} \ No newline at end of file
diff --git a/vendor/miniz_oxide/Cargo.toml b/vendor/miniz_oxide/Cargo.toml
index 7fa9d3e06..bbb3e9adb 100644
--- a/vendor/miniz_oxide/Cargo.toml
+++ b/vendor/miniz_oxide/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "miniz_oxide"
-version = "0.5.4"
+version = "0.6.2"
authors = [
"Frommi <daniil.liferenko@gmail.com>",
"oyvindln <oyvindln@users.noreply.github.com>",
@@ -62,7 +62,7 @@ optional = true
default-features = false
[features]
-default = []
+default = ["with-alloc"]
rustc-dep-of-std = [
"core",
"alloc",
@@ -70,3 +70,5 @@ rustc-dep-of-std = [
"adler/rustc-dep-of-std",
]
simd = ["simd-adler32"]
+std = []
+with-alloc = []
diff --git a/vendor/miniz_oxide/Readme.md b/vendor/miniz_oxide/Readme.md
index 0eac176e8..51579f071 100644
--- a/vendor/miniz_oxide/Readme.md
+++ b/vendor/miniz_oxide/Readme.md
@@ -3,9 +3,18 @@
A fully safe, pure rust replacement for the [miniz](https://github.com/richgel999/miniz) DEFLATE/zlib encoder/decoder.
The main intention of this crate is to be used as a back-end for the [flate2](https://github.com/alexcrichton/flate2-rs), but it can also be used on it's own. Using flate2 with the ```rust_backend``` feature provides an easy to use streaming API for miniz_oxide.
-The library is fully [no_std](https://docs.rust-embedded.org/book/intro/no-std.html), though it requires the use of the `alloc` and `collection` crates as it allocates memory.
+The library is fully [no_std](https://docs.rust-embedded.org/book/intro/no-std.html). By default, the `with-alloc` feature is enabled, which requires the use of the `alloc` and `collection` crates as it allocates memory.
-miniz_oxide 0.5.x Requires at least rust 1.40.0 0.3.x requires at least rust 0.36.0.
+The `std` feature additionally turns on things only available if `no_std` is not used. Currently this only means implementing [Error](https://doc.rust-lang.org/stable/std/error/trait.Error.html) for the `DecompressError` error struct returned by the simple decompression functions if enabled together with `with-alloc`.
+
+Using the library with `default-features = false` removes the dependency on `alloc`
+and `collection` crates, making it suitable for systems without an allocator.
+Running without allocation reduces crate functionality:
+
+- The `deflate` module is removed complete
+- Some `inflate` functions which return a `Vec` are removed
+
+miniz_oxide 0.5.x and 0.6.x Requires at least rust 1.40.0 0.3.x requires at least rust 0.36.0.
miniz_oxide features no use of unsafe code.
@@ -21,8 +30,8 @@ use miniz_oxide::inflate::decompress_to_vec;
fn roundtrip(data: &[u8]) {
// Compress the input
let compressed = compress_to_vec(data, 6);
- // Decompress the compressed input
- let decompressed = decompress_to_vec(compressed.as_slice()).expect("Failed to decompress!");
+ // Decompress the compressed input and limit max output size to avoid going out of memory on large/malformed input.
+ let decompressed = decompress_to_vec_with_limit(compressed.as_slice(), 60000).expect("Failed to decompress!");
// Check roundtrip succeeded
assert_eq!(data, decompressed);
}
@@ -32,4 +41,4 @@ fn main() {
}
```
-These simple functions will do everything in one go and are thus not recommended for use cases where the input size may be large or unknown, for that use case consider using miniz_oxide via flate2 or the low-level streaming functions instead.
+These simple functions will do everything in one go and are thus not recommended for use cases outside of prototyping/testing as real world data can have any size and thus result in very large memory allocations for the output Vector. Consider using miniz_oxide via [flate2](https://github.com/alexcrichton/flate2-rs) which makes it easy to do streaming (de)compression or the low-level streaming functions instead.
diff --git a/vendor/miniz_oxide/src/inflate/core.rs b/vendor/miniz_oxide/src/inflate/core.rs
index 38bdacbbd..630e5e6fd 100644
--- a/vendor/miniz_oxide/src/inflate/core.rs
+++ b/vendor/miniz_oxide/src/inflate/core.rs
@@ -108,7 +108,7 @@ const HUFFLEN_TABLE: usize = 2;
pub mod inflate_flags {
/// Should we try to parse a zlib header?
///
- /// If unset, [`decompress()`] will expect an RFC1951 deflate stream. If set, it will expect an
+ /// If unset, the function will expect an RFC1951 deflate stream. If set, it will expect a
/// RFC1950 zlib wrapper around the deflate stream.
pub const TINFL_FLAG_PARSE_ZLIB_HEADER: u32 = 1;
@@ -1456,7 +1456,8 @@ pub fn decompress(
// Mask the top bits since they may contain length info.
l.counter &= 511;
- if l.counter == 256 {
+ if l.counter
+ == 256 {
// We hit the end of block symbol.
Action::Jump(BlockDone)
} else if l.counter > 285 {
diff --git a/vendor/miniz_oxide/src/inflate/mod.rs b/vendor/miniz_oxide/src/inflate/mod.rs
index 03b9dc988..bb19e379c 100644
--- a/vendor/miniz_oxide/src/inflate/mod.rs
+++ b/vendor/miniz_oxide/src/inflate/mod.rs
@@ -1,10 +1,10 @@
//! This module contains functionality for decompression.
-use crate::alloc::boxed::Box;
-use crate::alloc::vec;
-use crate::alloc::vec::Vec;
-use ::core::cmp::min;
+#[cfg(feature = "with-alloc")]
+use crate::alloc::{boxed::Box, vec, vec::Vec};
use ::core::usize;
+#[cfg(all(feature = "std", feature = "with-alloc"))]
+use std::error::Error;
pub mod core;
mod output_buffer;
@@ -78,19 +78,63 @@ impl TINFLStatus {
}
}
+/// Struct return when decompress_to_vec functions fail.
+#[cfg(feature = "with-alloc")]
+#[derive(Debug)]
+pub struct DecompressError {
+ /// Decompressor status on failure. See [TINFLStatus] for details.
+ pub status: TINFLStatus,
+ /// The currently decompressed data if any.
+ pub output: Vec<u8>,
+}
+
+#[cfg(feature = "with-alloc")]
+impl alloc::fmt::Display for DecompressError {
+ fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
+ f.write_str(match self.status {
+ TINFLStatus::FailedCannotMakeProgress => "Truncated input stream",
+ TINFLStatus::BadParam => "Invalid output buffer size",
+ TINFLStatus::Adler32Mismatch => "Adler32 checksum mismatch",
+ TINFLStatus::Failed => "Invalid input data",
+ TINFLStatus::Done => unreachable!(),
+ TINFLStatus::NeedsMoreInput => "Truncated input stream",
+ TINFLStatus::HasMoreOutput => "Output size exceeded the specified limit",
+ })
+ }
+}
+
+/// Implement Error trait only if std feature is requested as it requires std.
+#[cfg(all(feature = "std", feature = "with-alloc"))]
+impl Error for DecompressError {}
+
+#[cfg(feature = "with-alloc")]
+fn decompress_error(status: TINFLStatus, output: Vec<u8>) -> Result<Vec<u8>, DecompressError> {
+ Err(DecompressError { status, output })
+}
+
/// Decompress the deflate-encoded data in `input` to a vector.
///
-/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+/// NOTE: This function will not bound the output, so if the output is large enough it can result in an out of memory error.
+/// It is therefore suggested to not use this for anything other than test programs, use the functions with a specified limit, or
+/// ideally streaming decompression via the [flate2](https://github.com/alexcrichton/flate2-rs) library instead.
+///
+/// Returns a [`Result`] containing the [`Vec`] of decompressed data on success, and a [struct][DecompressError] containing the status and so far decompressed data if any on failure.
#[inline]
-pub fn decompress_to_vec(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> {
+#[cfg(feature = "with-alloc")]
+pub fn decompress_to_vec(input: &[u8]) -> Result<Vec<u8>, DecompressError> {
decompress_to_vec_inner(input, 0, usize::max_value())
}
/// Decompress the deflate-encoded data (with a zlib wrapper) in `input` to a vector.
///
-/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+/// NOTE: This function will not bound the output, so if the output is large enough it can result in an out of memory error.
+/// It is therefore suggested to not use this for anything other than test programs, use the functions with a specified limit, or
+/// ideally streaming decompression via the [flate2](https://github.com/alexcrichton/flate2-rs) library instead.
+///
+/// Returns a [`Result`] containing the [`Vec`] of decompressed data on success, and a [struct][DecompressError] containing the status and so far decompressed data if any on failure.
#[inline]
-pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> {
+#[cfg(feature = "with-alloc")]
+pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, DecompressError> {
decompress_to_vec_inner(
input,
inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER,
@@ -99,38 +143,51 @@ pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> {
}
/// Decompress the deflate-encoded data in `input` to a vector.
+///
/// The vector is grown to at most `max_size` bytes; if the data does not fit in that size,
-/// [`TINFLStatus::HasMoreOutput`] error is returned.
+/// the error [struct][DecompressError] will contain the status [`TINFLStatus::HasMoreOutput`] and the data that was decompressed on failure.
+///
+/// As this function tries to decompress everything in one go, it's not ideal for general use outside of tests or where the output size is expected to be small.
+/// It is suggested to use streaming decompression via the [flate2](https://github.com/alexcrichton/flate2-rs) library instead.
///
-/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+/// Returns a [`Result`] containing the [`Vec`] of decompressed data on success, and a [struct][DecompressError] on failure.
#[inline]
-pub fn decompress_to_vec_with_limit(input: &[u8], max_size: usize) -> Result<Vec<u8>, TINFLStatus> {
+#[cfg(feature = "with-alloc")]
+pub fn decompress_to_vec_with_limit(
+ input: &[u8],
+ max_size: usize,
+) -> Result<Vec<u8>, DecompressError> {
decompress_to_vec_inner(input, 0, max_size)
}
/// Decompress the deflate-encoded data (with a zlib wrapper) in `input` to a vector.
/// The vector is grown to at most `max_size` bytes; if the data does not fit in that size,
-/// [`TINFLStatus::HasMoreOutput`] error is returned.
+/// the error [struct][DecompressError] will contain the status [`TINFLStatus::HasMoreOutput`] and the data that was decompressed on failure.
+///
+/// As this function tries to decompress everything in one go, it's not ideal for general use outside of tests or where the output size is expected to be small.
+/// It is suggested to use streaming decompression via the [flate2](https://github.com/alexcrichton/flate2-rs) library instead.
///
-/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+/// Returns a [`Result`] containing the [`Vec`] of decompressed data on success, and a [struct][DecompressError] on failure.
#[inline]
+#[cfg(feature = "with-alloc")]
pub fn decompress_to_vec_zlib_with_limit(
input: &[u8],
max_size: usize,
-) -> Result<Vec<u8>, TINFLStatus> {
+) -> Result<Vec<u8>, DecompressError> {
decompress_to_vec_inner(input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, max_size)
}
/// Backend of various to-[`Vec`] decompressions.
///
-/// Returns a tuple of the [`Vec`] of decompressed data and the [status result][TINFLStatus].
+/// Returns [`Vec`] of decompressed data on success and the [error struct][DecompressError] with details on failure.
+#[cfg(feature = "with-alloc")]
fn decompress_to_vec_inner(
input: &[u8],
flags: u32,
max_output_size: usize,
-) -> Result<Vec<u8>, TINFLStatus> {
+) -> Result<Vec<u8>, DecompressError> {
let flags = flags | inflate_flags::TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF;
- let mut ret: Vec<u8> = vec![0; min(input.len().saturating_mul(2), max_output_size)];
+ let mut ret: Vec<u8> = vec![0; input.len().saturating_mul(2).min(max_output_size)];
let mut decomp = Box::<DecompressorOxide>::default();
@@ -153,14 +210,14 @@ fn decompress_to_vec_inner(
TINFLStatus::HasMoreOutput => {
// if the buffer has already reached the size limit, return an error
if ret.len() >= max_output_size {
- return Err(TINFLStatus::HasMoreOutput);
+ return decompress_error(TINFLStatus::HasMoreOutput, ret);
}
// calculate the new length, capped at `max_output_size`
let new_len = ret.len().saturating_mul(2).min(max_output_size);
ret.resize(new_len, 0);
}
- _ => return Err(status),
+ _ => return decompress_error(status, ret),
}
}
}
@@ -221,7 +278,7 @@ pub fn decompress_slice_iter_to_slice<'out, 'inp>(
mod test {
use super::{
decompress_slice_iter_to_slice, decompress_to_vec_zlib, decompress_to_vec_zlib_with_limit,
- TINFLStatus,
+ DecompressError, TINFLStatus,
};
const ENCODED: [u8; 20] = [
120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19,
@@ -243,7 +300,10 @@ mod test {
fn fail_to_decompress_with_limit() {
let res = decompress_to_vec_zlib_with_limit(&ENCODED[..], 8);
match res {
- Err(TINFLStatus::HasMoreOutput) => (), // expected result
+ Err(DecompressError {
+ status: TINFLStatus::HasMoreOutput,
+ ..
+ }) => (), // expected result
_ => panic!("Decompression output size limit was not enforced"),
}
}
diff --git a/vendor/miniz_oxide/src/inflate/stream.rs b/vendor/miniz_oxide/src/inflate/stream.rs
index 715747166..ee681b67b 100644
--- a/vendor/miniz_oxide/src/inflate/stream.rs
+++ b/vendor/miniz_oxide/src/inflate/stream.rs
@@ -1,6 +1,7 @@
//! Extra streaming decompression functionality.
//!
//! As of now this is mainly intended for use to build a higher-level wrapper.
+#[cfg(feature = "with-alloc")]
use crate::alloc::boxed::Box;
use core::{cmp, mem};
@@ -115,6 +116,7 @@ impl InflateState {
/// # Parameters
/// `data_format`: Determines whether the compressed data is assumed to wrapped with zlib
/// metadata.
+ #[cfg(feature = "with-alloc")]
pub fn new_boxed(data_format: DataFormat) -> Box<InflateState> {
let mut b: Box<InflateState> = Box::default();
b.data_format = data_format;
@@ -136,6 +138,7 @@ impl InflateState {
/// The decompressor does not support different window sizes. As such,
/// any positive (>0) value will set the zlib header flag, while a negative one
/// will not.
+ #[cfg(feature = "with-alloc")]
pub fn new_boxed_with_window_bits(window_bits: i32) -> Box<InflateState> {
let mut b: Box<InflateState> = Box::default();
b.data_format = DataFormat::from_window_bits(window_bits);
diff --git a/vendor/miniz_oxide/src/lib.rs b/vendor/miniz_oxide/src/lib.rs
index 8357c5200..fd64932b0 100644
--- a/vendor/miniz_oxide/src/lib.rs
+++ b/vendor/miniz_oxide/src/lib.rs
@@ -22,10 +22,12 @@
//! ```
#![forbid(unsafe_code)]
-#![no_std]
+#![cfg_attr(not(feature = "std"), no_std)]
+#[cfg(feature = "with-alloc")]
extern crate alloc;
+#[cfg(feature = "with-alloc")]
pub mod deflate;
pub mod inflate;
mod shared;
@@ -154,7 +156,7 @@ pub enum DataFormat {
}
impl DataFormat {
- pub(crate) fn from_window_bits(window_bits: i32) -> DataFormat {
+ pub fn from_window_bits(window_bits: i32) -> DataFormat {
if window_bits > 0 {
DataFormat::Zlib
} else {
@@ -162,7 +164,7 @@ impl DataFormat {
}
}
- pub(crate) fn to_window_bits(self) -> i32 {
+ pub fn to_window_bits(self) -> i32 {
match self {
DataFormat::Zlib | DataFormat::ZLibIgnoreChecksum => shared::MZ_DEFAULT_WINDOW_BITS,
DataFormat::Raw => -shared::MZ_DEFAULT_WINDOW_BITS,
@@ -186,7 +188,7 @@ pub struct StreamResult {
impl StreamResult {
#[inline]
- pub(crate) const fn error(error: MZError) -> StreamResult {
+ pub const fn error(error: MZError) -> StreamResult {
StreamResult {
bytes_consumed: 0,
bytes_written: 0,
diff --git a/vendor/nu-ansi-term/.cargo-checksum.json b/vendor/nu-ansi-term/.cargo-checksum.json
new file mode 100644
index 000000000..c60ea220b
--- /dev/null
+++ b/vendor/nu-ansi-term/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"e8baa9206eac2b73eb3113774c173fc7901755e546d856968bc7e58754a933c9","Cargo.toml":"9d27d7698b15616188d02e77f724750e79906eedb77e5c4ef53b5b5179517011","LICENCE":"cfdb7491dfa8800a1d561957d6a072ad63a6305ffa48386d04eae8eac8d65e8a","README.md":"5c4b8a36f0d7d8faa41084d3b7506cf63cfe8a21fc307ff0721ab4bd9b2d79c9","examples/256_colors.rs":"11caedd6910b1364ed993d6e025b68bae848de495c06feab748ec3a5ed1f8eda","examples/basic_colors.rs":"70bae755aff194fd1dcc080dcb07df386347e391d09ea009e15ff2349189b43a","examples/gradient_colors.rs":"8f907b2f69a9e28bd62d2e459723c6c305e32caf398643e78a579b64a275a23a","examples/rgb_colors.rs":"10067365199a5832de093e199bb925e18f8b53f82ede7188f121532620af2bf9","src/ansi.rs":"cf1adf5347cc4497479f7e72f8382288bc2ea8dd8b40e5844d3113f775a22135","src/debug.rs":"ab14e28dc68d04263bf70b1bc79681ae3951fd5b836583d267d43e2a9799bb38","src/difference.rs":"a71797564e7d152c2c67f1c2a04f1fee6cea0a2a44babd91bb3e60463f89f9ca","src/display.rs":"9f4aeb98033a3854f59e6e4d03b6d6aebb487f6d7dda80c2b131bb6cdf14e79f","src/gradient.rs":"f5b4e2a90ea4fdfa63286fd353d6624cd8255826bac26e1e463f867b0f21874e","src/lib.rs":"a2a2b4cfadb0983dd18347b946cb3bdccf1eb343cf58cbbf5e9f573c7313b691","src/rgb.rs":"68ade941086450f2c86ba6697d6847b1b745cf4014aeb583b17d9e9a09ae20eb","src/style.rs":"4a0ec456a384b15340f65845f33ad989c955289d30858de169ce802089f8b51f","src/util.rs":"4a80587db74309c17d583cb72d08d2326a8550f59ce1543003ca8acacbd9a1dc","src/windows.rs":"4a869d522c1b087936260d1b5e4f8ed824a774b8db8bb8bb136d1a8ba460d089","src/write.rs":"b104f6b6d5e20002399065c7ed1f9e9b44b006ec72f73a6ad847bfab68f8f09a"},"package":"77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"} \ No newline at end of file
diff --git a/vendor/nu-ansi-term/Cargo.lock b/vendor/nu-ansi-term/Cargo.lock
new file mode 100644
index 000000000..b29bf12b5
--- /dev/null
+++ b/vendor/nu-ansi-term/Cargo.lock
@@ -0,0 +1,159 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "doc-comment"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+dependencies = [
+ "doc-comment",
+ "overload",
+ "regex",
+ "serde",
+ "serde_json",
+ "winapi",
+]
+
+[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "serde"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.87"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e59d925cf59d8151f25a3bedf97c9c157597c9df7324d32d68991cc399ed08b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/vendor/nu-ansi-term/Cargo.toml b/vendor/nu-ansi-term/Cargo.toml
new file mode 100644
index 000000000..209e05511
--- /dev/null
+++ b/vendor/nu-ansi-term/Cargo.toml
@@ -0,0 +1,57 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "nu-ansi-term"
+version = "0.46.0"
+authors = [
+ "ogham@bsago.me",
+ "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>",
+ "Josh Triplett <josh@joshtriplett.org>",
+ "The Nushell Project Developers",
+]
+description = "Library for ANSI terminal colors and styles (bold, underline)"
+license = "MIT"
+repository = "https://github.com/nushell/nu-ansi-term"
+
+[lib]
+doctest = true
+
+[dependencies.overload]
+version = "0.1.1"
+
+[dependencies.serde]
+version = "1.0.90"
+features = ["derive"]
+optional = true
+
+[dev-dependencies.doc-comment]
+version = "0.3"
+
+[dev-dependencies.regex]
+version = "1.1.9"
+
+[dev-dependencies.serde_json]
+version = "1.0.39"
+
+[features]
+derive_serde_style = ["serde"]
+
+[target."cfg(target_os=\"windows\")".dependencies.winapi]
+version = "0.3.4"
+features = [
+ "consoleapi",
+ "errhandlingapi",
+ "fileapi",
+ "handleapi",
+ "processenv",
+]
diff --git a/vendor/ansi_term/LICENCE b/vendor/nu-ansi-term/LICENCE
index 3228cc99b..f392dfc93 100644
--- a/vendor/ansi_term/LICENCE
+++ b/vendor/nu-ansi-term/LICENCE
@@ -1,6 +1,7 @@
The MIT License (MIT)
Copyright (c) 2014 Benjamin Sago
+Copyright (c) 2021-2022 The Nushell Project Developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/ansi_term/README.md b/vendor/nu-ansi-term/README.md
index 30d52ab5f..51c6cc5f6 100644
--- a/vendor/ansi_term/README.md
+++ b/vendor/nu-ansi-term/README.md
@@ -1,47 +1,47 @@
-# rust-ansi-term [![ansi-term on crates.io](http://meritbadge.herokuapp.com/ansi-term)](https://crates.io/crates/ansi_term) [![Build status](https://img.shields.io/travis/ogham/rust-ansi-term/master.svg?style=flat)](https://travis-ci.org/ogham/rust-ansi-term) [![Build status](https://img.shields.io/appveyor/ci/ogham/rust-ansi-term/master.svg?style=flat&logo=AppVeyor&logoColor=silver)](https://ci.appveyor.com/project/ogham/rust-ansi-term) [![Coverage status](https://coveralls.io/repos/ogham/rust-ansi-term/badge.svg?branch=master&service=github)](https://coveralls.io/github/ogham/rust-ansi-term?branch=master)
+# nu-ansi-term
-This is a library for controlling colours and formatting, such as red bold text or blue underlined text, on ANSI terminals.
+> This is a copy of rust-ansi-term but with Color change to Color and light foreground colors added (90-97) as well as light background colors added (100-107).
-### [View the Rustdoc](https://docs.rs/ansi_term/)
+This is a library for controlling colors and formatting, such as red bold text or blue underlined text, on ANSI terminals.
+## [View the Rustdoc](https://docs.rs/nu_ansi_term/)
-# Installation
+## Installation
This crate works with [Cargo](http://crates.io). Add the following to your `Cargo.toml` dependencies section:
```toml
[dependencies]
-ansi_term = "0.12"
+nu_ansi_term = "0.46"
```
-
## Basic usage
-There are three main types in this crate that you need to be concerned with: `ANSIString`, `Style`, and `Colour`.
+There are three main types in this crate that you need to be concerned with: `AnsiString`, `Style`, and `Color`.
-A `Style` holds stylistic information: foreground and background colours, whether the text should be bold, or blinking, or other properties.
-The `Colour` enum represents the available colours.
-And an `ANSIString` is a string paired with a `Style`.
+A `Style` holds stylistic information: foreground and background colors, whether the text should be bold, or blinking, or other properties.
+The `Color` enum represents the available colors.
+And an `AnsiString` is a string paired with a `Style`.
-`Color` is also available as an alias to `Colour`.
+`Color` is also available as an alias to `Color`.
-To format a string, call the `paint` method on a `Style` or a `Colour`, passing in the string you want to format as the argument.
+To format a string, call the `paint` method on a `Style` or a `Color`, passing in the string you want to format as the argument.
For example, here’s how to get some red text:
```rust
-use ansi_term::Colour::Red;
+use nu_ansi_term::Color::Red;
println!("This is in red: {}", Red.paint("a red string"));
```
-It’s important to note that the `paint` method does *not* actually return a string with the ANSI control characters surrounding it.
-Instead, it returns an `ANSIString` value that has a `Display` implementation that, when formatted, returns the characters.
+It’s important to note that the `paint` method does _not_ actually return a string with the ANSI control characters surrounding it.
+Instead, it returns an `AnsiString` value that has a `Display` implementation that, when formatted, returns the characters.
This allows strings to be printed with a minimum of `String` allocations being performed behind the scenes.
-If you *do* want to get at the escape codes, then you can convert the `ANSIString` to a string as you would any other `Display` value:
+If you _do_ want to get at the escape codes, then you can convert the `AnsiString` to a string as you would any other `Display` value:
```rust
-use ansi_term::Colour::Red;
+use nu_ansi_term::Color::Red;
let red_string = Red.paint("a red string").to_string();
```
@@ -49,28 +49,28 @@ let red_string = Red.paint("a red string").to_string();
**Note for Windows 10 users:** On Windows 10, the application must enable ANSI support first:
```rust,ignore
-let enabled = ansi_term::enable_ansi_support();
+let enabled = nu_ansi_term::enable_ansi_support();
```
## Bold, underline, background, and other styles
-For anything more complex than plain foreground colour changes, you need to construct `Style` values themselves, rather than beginning with a `Colour`.
+For anything more complex than plain foreground color changes, you need to construct `Style` values themselves, rather than beginning with a `Color`.
You can do this by chaining methods based on a new `Style`, created with `Style::new()`.
Each method creates a new style that has that specific property set.
For example:
```rust
-use ansi_term::Style;
+use nu_ansi_term::Style;
println!("How about some {} and {}?",
Style::new().bold().paint("bold"),
Style::new().underline().paint("underline"));
```
-For brevity, these methods have also been implemented for `Colour` values, so you can give your styles a foreground colour without having to begin with an empty `Style` value:
+For brevity, these methods have also been implemented for `Color` values, so you can give your styles a foreground color without having to begin with an empty `Style` value:
```rust
-use ansi_term::Colour::{Blue, Yellow};
+use nu_ansi_term::Color::{Blue, Yellow};
println!("Demonstrating {} and {}!",
Blue.bold().paint("blue bold"),
@@ -80,103 +80,102 @@ println!("Yellow on blue: {}", Yellow.on(Blue).paint("wow!"));
```
The complete list of styles you can use are:
-`bold`, `dimmed`, `italic`, `underline`, `blink`, `reverse`, `hidden`, and `on` for background colours.
+`bold`, `dimmed`, `italic`, `underline`, `blink`, `reverse`, `hidden`, and `on` for background colors.
-In some cases, you may find it easier to change the foreground on an existing `Style` rather than starting from the appropriate `Colour`.
+In some cases, you may find it easier to change the foreground on an existing `Style` rather than starting from the appropriate `Color`.
You can do this using the `fg` method:
```rust
-use ansi_term::Style;
-use ansi_term::Colour::{Blue, Cyan, Yellow};
+use nu_ansi_term::Style;
+use nu_ansi_term::Color::{Blue, Cyan, Yellow};
println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!"));
println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!"));
```
-You can turn a `Colour` into a `Style` with the `normal` method.
-This will produce the exact same `ANSIString` as if you just used the `paint` method on the `Colour` directly, but it’s useful in certain cases: for example, you may have a method that returns `Styles`, and need to represent both the “red bold” and “red, but not bold” styles with values of the same type. The `Style` struct also has a `Default` implementation if you want to have a style with *nothing* set.
+You can turn a `Color` into a `Style` with the `normal` method.
+This will produce the exact same `AnsiString` as if you just used the `paint` method on the `Color` directly, but it’s useful in certain cases: for example, you may have a method that returns `Styles`, and need to represent both the “red bold” and “red, but not bold” styles with values of the same type. The `Style` struct also has a `Default` implementation if you want to have a style with _nothing_ set.
```rust
-use ansi_term::Style;
-use ansi_term::Colour::Red;
+use nu_ansi_term::Style;
+use nu_ansi_term::Color::Red;
Red.normal().paint("yet another red string");
Style::default().paint("a completely regular string");
```
+## Extended colors
-## Extended colours
-
-You can access the extended range of 256 colours by using the `Colour::Fixed` variant, which takes an argument of the colour number to use.
-This can be included wherever you would use a `Colour`:
+You can access the extended range of 256 colors by using the `Color::Fixed` variant, which takes an argument of the color number to use.
+This can be included wherever you would use a `Color`:
```rust
-use ansi_term::Colour::Fixed;
+use nu_ansi_term::Color::Fixed;
Fixed(134).paint("A sort of light purple");
Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup");
```
-The first sixteen of these values are the same as the normal and bold standard colour variants.
-There’s nothing stopping you from using these as `Fixed` colours instead, but there’s nothing to be gained by doing so either.
+The first sixteen of these values are the same as the normal and bold standard color variants.
+There’s nothing stopping you from using these as `Fixed` colors instead, but there’s nothing to be gained by doing so either.
-You can also access full 24-bit colour by using the `Colour::RGB` variant, which takes separate `u8` arguments for red, green, and blue:
+You can also access full 24-bit color by using the `Color::RGB` variant, which takes separate `u8` arguments for red, green, and blue:
```rust
-use ansi_term::Colour::RGB;
+use nu_ansi_term::Color::RGB;
RGB(70, 130, 180).paint("Steel blue");
```
## Combining successive coloured strings
-The benefit of writing ANSI escape codes to the terminal is that they *stack*: you do not need to end every coloured string with a reset code if the text that follows it is of a similar style.
+The benefit of writing ANSI escape codes to the terminal is that they _stack_: you do not need to end every coloured string with a reset code if the text that follows it is of a similar style.
For example, if you want to have some blue text followed by some blue bold text, it’s possible to send the ANSI code for blue, followed by the ANSI code for bold, and finishing with a reset code without having to have an extra one between the two strings.
This crate can optimise the ANSI codes that get printed in situations like this, making life easier for your terminal renderer.
-The `ANSIStrings` struct takes a slice of several `ANSIString` values, and will iterate over each of them, printing only the codes for the styles that need to be updated as part of its formatting routine.
+The `AnsiStrings` struct takes a slice of several `AnsiString` values, and will iterate over each of them, printing only the codes for the styles that need to be updated as part of its formatting routine.
The following code snippet uses this to enclose a binary number displayed in red bold text inside some red, but not bold, brackets:
```rust
-use ansi_term::Colour::Red;
-use ansi_term::{ANSIString, ANSIStrings};
+use nu_ansi_term::Color::Red;
+use nu_ansi_term::{AnsiString, AnsiStrings};
let some_value = format!("{:b}", 42);
-let strings: &[ANSIString<'static>] = &[
+let strings: &[AnsiString<'static>] = &[
Red.paint("["),
Red.bold().paint(some_value),
Red.paint("]"),
];
-println!("Value: {}", ANSIStrings(strings));
+println!("Value: {}", AnsiStrings(strings));
```
There are several things to note here.
-Firstly, the `paint` method can take *either* an owned `String` or a borrowed `&str`.
-Internally, an `ANSIString` holds a copy-on-write (`Cow`) string value to deal with both owned and borrowed strings at the same time.
+Firstly, the `paint` method can take _either_ an owned `String` or a borrowed `&str`.
+Internally, an `AnsiString` holds a copy-on-write (`Cow`) string value to deal with both owned and borrowed strings at the same time.
This is used here to display a `String`, the result of the `format!` call, using the same mechanism as some statically-available `&str` slices.
-Secondly, that the `ANSIStrings` value works in the same way as its singular counterpart, with a `Display` implementation that only performs the formatting when required.
+Secondly, that the `AnsiStrings` value works in the same way as its singular counterpart, with a `Display` implementation that only performs the formatting when required.
## Byte strings
This library also supports formatting `[u8]` byte strings; this supports applications working with text in an unknown encoding.
-`Style` and `Colour` support painting `[u8]` values, resulting in an `ANSIByteString`.
+`Style` and `Color` support painting `[u8]` values, resulting in an `AnsiByteString`.
This type does not implement `Display`, as it may not contain UTF-8, but it does provide a method `write_to` to write the result to any value that implements `Write`:
```rust
-use ansi_term::Colour::Green;
+use nu_ansi_term::Color::Green;
Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap();
```
-Similarly, the type `ANSIByteStrings` supports writing a list of `ANSIByteString` values with minimal escape sequences:
+Similarly, the type `AnsiByteStrings` supports writing a list of `AnsiByteString` values with minimal escape sequences:
```rust
-use ansi_term::Colour::Green;
-use ansi_term::ANSIByteStrings;
+use nu_ansi_term::Color::Green;
+use nu_ansi_term::AnsiByteStrings;
-ANSIByteStrings(&[
+AnsiByteStrings(&[
Green.paint("user data 1\n".as_bytes()),
Green.bold().paint("user data 2\n".as_bytes()),
]).write_to(&mut std::io::stdout()).unwrap();
diff --git a/vendor/ansi_term/examples/256_colours.rs b/vendor/nu-ansi-term/examples/256_colors.rs
index 92fe2f1c1..4766dcdb6 100644
--- a/vendor/ansi_term/examples/256_colours.rs
+++ b/vendor/nu-ansi-term/examples/256_colors.rs
@@ -1,27 +1,26 @@
-extern crate ansi_term;
-use ansi_term::Colour;
+extern crate nu_ansi_term;
+use nu_ansi_term::Color;
-// This example prints out the 256 colours.
+// This example prints out the 256 colors.
// They're arranged like this:
//
-// - 0 to 8 are the eight standard colours.
-// - 9 to 15 are the eight bold colours.
-// - 16 to 231 are six blocks of six-by-six colour squares.
+// - 0 to 8 are the eight standard colors.
+// - 9 to 15 are the eight bold colors.
+// - 16 to 231 are six blocks of six-by-six color squares.
// - 232 to 255 are shades of grey.
fn main() {
-
// First two lines
for c in 0..8 {
glow(c, c != 0);
print!(" ");
}
- print!("\n");
+ println!();
for c in 8..16 {
glow(c, c != 8);
print!(" ");
}
- print!("\n\n");
+ println!("\n");
// Six lines of the first three squares
for row in 0..6 {
@@ -34,9 +33,9 @@ fn main() {
print!(" ");
}
- print!("\n");
+ println!();
}
- print!("\n");
+ println!();
// Six more lines of the other three squares
for row in 0..6 {
@@ -49,25 +48,25 @@ fn main() {
print!(" ");
}
- print!("\n");
+ println!();
}
- print!("\n");
+ println!();
// The last greyscale lines
for c in 232..=243 {
glow(c, false);
print!(" ");
}
- print!("\n");
+ println!();
for c in 244..=255 {
glow(c, true);
print!(" ");
}
- print!("\n");
+ println!();
}
fn glow(c: u8, light_bg: bool) {
- let base = if light_bg { Colour::Black } else { Colour::White };
- let style = base.on(Colour::Fixed(c));
+ let base = if light_bg { Color::Black } else { Color::White };
+ let style = base.on(Color::Fixed(c));
print!("{}", style.paint(&format!(" {:3} ", c)));
}
diff --git a/vendor/nu-ansi-term/examples/basic_colors.rs b/vendor/nu-ansi-term/examples/basic_colors.rs
new file mode 100644
index 000000000..3c2b6817f
--- /dev/null
+++ b/vendor/nu-ansi-term/examples/basic_colors.rs
@@ -0,0 +1,18 @@
+extern crate nu_ansi_term;
+use nu_ansi_term::{Color::*, Style};
+
+// This example prints out the 16 basic colors.
+
+fn main() {
+ let normal = Style::default();
+
+ println!("{} {}", normal.paint("Normal"), normal.bold().paint("bold"));
+ println!("{} {}", Black.paint("Black"), Black.bold().paint("bold"));
+ println!("{} {}", Red.paint("Red"), Red.bold().paint("bold"));
+ println!("{} {}", Green.paint("Green"), Green.bold().paint("bold"));
+ println!("{} {}", Yellow.paint("Yellow"), Yellow.bold().paint("bold"));
+ println!("{} {}", Blue.paint("Blue"), Blue.bold().paint("bold"));
+ println!("{} {}", Purple.paint("Purple"), Purple.bold().paint("bold"));
+ println!("{} {}", Cyan.paint("Cyan"), Cyan.bold().paint("bold"));
+ println!("{} {}", White.paint("White"), White.bold().paint("bold"));
+}
diff --git a/vendor/nu-ansi-term/examples/gradient_colors.rs b/vendor/nu-ansi-term/examples/gradient_colors.rs
new file mode 100644
index 000000000..1c9583865
--- /dev/null
+++ b/vendor/nu-ansi-term/examples/gradient_colors.rs
@@ -0,0 +1,37 @@
+use nu_ansi_term::{build_all_gradient_text, Color, Gradient, Rgb, TargetGround};
+
+fn main() {
+ let text = "lorem ipsum quia dolor sit amet, consectetur, adipisci velit";
+
+ // a gradient from hex colors
+ let start = Rgb::from_hex(0x40c9ff);
+ let end = Rgb::from_hex(0xe81cff);
+ let grad0 = Gradient::new(start, end);
+
+ // a gradient from color::rgb()
+ let start = Color::Rgb(64, 201, 255);
+ let end = Color::Rgb(232, 28, 255);
+ let gradient = Gradient::from_color_rgb(start, end);
+
+ // a slightly different gradient
+ let start2 = Color::Rgb(128, 64, 255);
+ let end2 = Color::Rgb(0, 28, 255);
+ let gradient2 = Gradient::from_color_rgb(start2, end2);
+
+ // reverse the gradient
+ let gradient3 = gradient.reverse();
+
+ let build_fg = gradient.build(text, TargetGround::Foreground);
+ println!("{}", build_fg);
+ let build_bg = gradient.build(text, TargetGround::Background);
+ println!("{}", build_bg);
+ let bgt = build_all_gradient_text(text, gradient, gradient2);
+ println!("{}", bgt);
+ let bgt2 = build_all_gradient_text(text, gradient, gradient3);
+ println!("{}", bgt2);
+
+ println!(
+ "{}",
+ grad0.build("nushell is awesome", TargetGround::Foreground)
+ );
+}
diff --git a/vendor/nu-ansi-term/examples/rgb_colors.rs b/vendor/nu-ansi-term/examples/rgb_colors.rs
new file mode 100644
index 000000000..4657d401f
--- /dev/null
+++ b/vendor/nu-ansi-term/examples/rgb_colors.rs
@@ -0,0 +1,23 @@
+extern crate nu_ansi_term;
+use nu_ansi_term::{Color, Style};
+
+// This example prints out a color gradient in a grid by calculating each
+// character’s red, green, and blue components, and using 24-bit color codes
+// to display them.
+
+const WIDTH: i32 = 80;
+const HEIGHT: i32 = 24;
+
+fn main() {
+ for row in 0..HEIGHT {
+ for col in 0..WIDTH {
+ let r = (row * 255 / HEIGHT) as u8;
+ let g = (col * 255 / WIDTH) as u8;
+ let b = 128;
+
+ print!("{}", Style::default().on(Color::Rgb(r, g, b)).paint(" "));
+ }
+
+ println!();
+ }
+}
diff --git a/vendor/ansi_term/src/ansi.rs b/vendor/nu-ansi-term/src/ansi.rs
index aaf215234..8f393fcdc 100644
--- a/vendor/ansi_term/src/ansi.rs
+++ b/vendor/nu-ansi-term/src/ansi.rs
@@ -1,17 +1,11 @@
-use style::{Colour, Style};
-
+#![allow(missing_docs)]
+use crate::style::{Color, Style};
+use crate::write::AnyWrite;
use std::fmt;
-use write::AnyWrite;
-
-
-// ---- generating ANSI codes ----
-
impl Style {
-
/// Write any bytes that go *before* a piece of text to the given writer.
fn write_prefix<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
-
// If there are actually no styles here, then don’t write *any* codes
// as the prefix. An empty ANSI code may not affect the terminal
// output at all, but a user may just want a code-free string.
@@ -26,33 +20,55 @@ impl Style {
{
let mut write_char = |c| {
- if written_anything { write!(f, ";")?; }
+ if written_anything {
+ write!(f, ";")?;
+ }
written_anything = true;
write!(f, "{}", c)?;
Ok(())
};
- if self.is_bold { write_char('1')? }
- if self.is_dimmed { write_char('2')? }
- if self.is_italic { write_char('3')? }
- if self.is_underline { write_char('4')? }
- if self.is_blink { write_char('5')? }
- if self.is_reverse { write_char('7')? }
- if self.is_hidden { write_char('8')? }
- if self.is_strikethrough { write_char('9')? }
+ if self.is_bold {
+ write_char('1')?
+ }
+ if self.is_dimmed {
+ write_char('2')?
+ }
+ if self.is_italic {
+ write_char('3')?
+ }
+ if self.is_underline {
+ write_char('4')?
+ }
+ if self.is_blink {
+ write_char('5')?
+ }
+ if self.is_reverse {
+ write_char('7')?
+ }
+ if self.is_hidden {
+ write_char('8')?
+ }
+ if self.is_strikethrough {
+ write_char('9')?
+ }
}
- // The foreground and background colours, if specified, need to be
+ // The foreground and background colors, if specified, need to be
// handled specially because the number codes are more complicated.
// (see `write_background_code` and `write_foreground_code`)
if let Some(bg) = self.background {
- if written_anything { write!(f, ";")?; }
+ if written_anything {
+ write!(f, ";")?;
+ }
written_anything = true;
bg.write_background_code(f)?;
}
if let Some(fg) = self.foreground {
- if written_anything { write!(f, ";")?; }
+ if written_anything {
+ write!(f, ";")?;
+ }
fg.write_foreground_code(f)?;
}
@@ -66,53 +82,70 @@ impl Style {
fn write_suffix<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
if self.is_plain() {
Ok(())
- }
- else {
+ } else {
write!(f, "{}", RESET)
}
}
}
-
/// The code to send to reset all styles and return to `Style::default()`.
pub static RESET: &str = "\x1B[0m";
-
-
-impl Colour {
+impl Color {
fn write_foreground_code<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
- match *self {
- Colour::Black => write!(f, "30"),
- Colour::Red => write!(f, "31"),
- Colour::Green => write!(f, "32"),
- Colour::Yellow => write!(f, "33"),
- Colour::Blue => write!(f, "34"),
- Colour::Purple => write!(f, "35"),
- Colour::Cyan => write!(f, "36"),
- Colour::White => write!(f, "37"),
- Colour::Fixed(num) => write!(f, "38;5;{}", &num),
- Colour::RGB(r,g,b) => write!(f, "38;2;{};{};{}", &r, &g, &b),
+ match self {
+ Color::Black => write!(f, "30"),
+ Color::Red => write!(f, "31"),
+ Color::Green => write!(f, "32"),
+ Color::Yellow => write!(f, "33"),
+ Color::Blue => write!(f, "34"),
+ Color::Purple => write!(f, "35"),
+ Color::Magenta => write!(f, "35"),
+ Color::Cyan => write!(f, "36"),
+ Color::White => write!(f, "37"),
+ Color::Fixed(num) => write!(f, "38;5;{}", num),
+ Color::Rgb(r, g, b) => write!(f, "38;2;{};{};{}", r, g, b),
+ Color::Default => write!(f, "39"),
+ Color::DarkGray => write!(f, "90"),
+ Color::LightRed => write!(f, "91"),
+ Color::LightGreen => write!(f, "92"),
+ Color::LightYellow => write!(f, "93"),
+ Color::LightBlue => write!(f, "94"),
+ Color::LightPurple => write!(f, "95"),
+ Color::LightMagenta => write!(f, "95"),
+ Color::LightCyan => write!(f, "96"),
+ Color::LightGray => write!(f, "97"),
}
}
fn write_background_code<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
- match *self {
- Colour::Black => write!(f, "40"),
- Colour::Red => write!(f, "41"),
- Colour::Green => write!(f, "42"),
- Colour::Yellow => write!(f, "43"),
- Colour::Blue => write!(f, "44"),
- Colour::Purple => write!(f, "45"),
- Colour::Cyan => write!(f, "46"),
- Colour::White => write!(f, "47"),
- Colour::Fixed(num) => write!(f, "48;5;{}", &num),
- Colour::RGB(r,g,b) => write!(f, "48;2;{};{};{}", &r, &g, &b),
+ match self {
+ Color::Black => write!(f, "40"),
+ Color::Red => write!(f, "41"),
+ Color::Green => write!(f, "42"),
+ Color::Yellow => write!(f, "43"),
+ Color::Blue => write!(f, "44"),
+ Color::Purple => write!(f, "45"),
+ Color::Magenta => write!(f, "45"),
+ Color::Cyan => write!(f, "46"),
+ Color::White => write!(f, "47"),
+ Color::Fixed(num) => write!(f, "48;5;{}", num),
+ Color::Rgb(r, g, b) => write!(f, "48;2;{};{};{}", r, g, b),
+ Color::Default => write!(f, "49"),
+ Color::DarkGray => write!(f, "100"),
+ Color::LightRed => write!(f, "101"),
+ Color::LightGreen => write!(f, "102"),
+ Color::LightYellow => write!(f, "103"),
+ Color::LightBlue => write!(f, "104"),
+ Color::LightPurple => write!(f, "105"),
+ Color::LightMagenta => write!(f, "105"),
+ Color::LightCyan => write!(f, "106"),
+ Color::LightGray => write!(f, "107"),
}
}
}
-
-/// Like `ANSIString`, but only displays the style prefix.
+/// Like `AnsiString`, but only displays the style prefix.
///
/// This type implements the `Display` trait, meaning it can be written to a
/// `std::fmt` formatting without doing any extra allocation, and written to a
@@ -121,7 +154,7 @@ impl Colour {
#[derive(Clone, Copy, Debug)]
pub struct Prefix(Style);
-/// Like `ANSIString`, but only displays the difference between two
+/// Like `AnsiString`, but only displays the difference between two
/// styles.
///
/// This type implements the `Display` trait, meaning it can be written to a
@@ -131,7 +164,7 @@ pub struct Prefix(Style);
#[derive(Clone, Copy, Debug)]
pub struct Infix(Style, Style);
-/// Like `ANSIString`, but only displays the style suffix.
+/// Like `AnsiString`, but only displays the style suffix.
///
/// This type implements the `Display` trait, meaning it can be written to a
/// `std::fmt` formatting without doing any extra allocation, and written to a
@@ -140,16 +173,14 @@ pub struct Infix(Style, Style);
#[derive(Clone, Copy, Debug)]
pub struct Suffix(Style);
-
impl Style {
-
/// The prefix bytes for this style. These are the bytes that tell the
- /// terminal to use a different colour or font style.
+ /// terminal to use a different color or font style.
///
/// # Examples
///
/// ```
- /// use ansi_term::{Style, Colour::Blue};
+ /// use nu_ansi_term::{Style, Color::Blue};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[1m",
@@ -169,12 +200,12 @@ impl Style {
/// The infix bytes between this style and `next` style. These are the bytes
/// that tell the terminal to change the style to `next`. These may include
- /// a reset followed by the next colour and style, depending on the two styles.
+ /// a reset followed by the next color and style, depending on the two styles.
///
/// # Examples
///
/// ```
- /// use ansi_term::{Style, Colour::Green};
+ /// use nu_ansi_term::{Style, Color::Green};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[32m",
@@ -193,12 +224,12 @@ impl Style {
}
/// The suffix for this style. These are the bytes that tell the terminal
- /// to reset back to its normal colour and font style.
+ /// to reset back to its normal color and font style.
///
/// # Examples
///
/// ```
- /// use ansi_term::{Style, Colour::Green};
+ /// use nu_ansi_term::{Style, Color::Green};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[0m",
@@ -217,18 +248,16 @@ impl Style {
}
}
-
-impl Colour {
-
- /// The prefix bytes for this colour as a `Style`. These are the bytes
- /// that tell the terminal to use a different colour or font style.
+impl Color {
+ /// The prefix bytes for this color as a `Style`. These are the bytes
+ /// that tell the terminal to use a different color or font style.
///
/// See also [`Style::prefix`](struct.Style.html#method.prefix).
///
/// # Examples
///
/// ```
- /// use ansi_term::Colour::Green;
+ /// use nu_ansi_term::Color::Green;
///
/// assert_eq!("\x1b[0m",
/// Green.suffix().to_string());
@@ -237,33 +266,33 @@ impl Colour {
Prefix(self.normal())
}
- /// The infix bytes between this colour and `next` colour. These are the bytes
- /// that tell the terminal to use the `next` colour, or to do nothing if
- /// the two colours are equal.
+ /// The infix bytes between this color and `next` color. These are the bytes
+ /// that tell the terminal to use the `next` color, or to do nothing if
+ /// the two colors are equal.
///
/// See also [`Style::infix`](struct.Style.html#method.infix).
///
/// # Examples
///
/// ```
- /// use ansi_term::Colour::{Red, Yellow};
+ /// use nu_ansi_term::Color::{Red, Yellow};
///
/// assert_eq!("\x1b[33m",
/// Red.infix(Yellow).to_string());
/// ```
- pub fn infix(self, next: Colour) -> Infix {
+ pub fn infix(self, next: Color) -> Infix {
Infix(self.normal(), next.normal())
}
- /// The suffix for this colour as a `Style`. These are the bytes that
- /// tell the terminal to reset back to its normal colour and font style.
+ /// The suffix for this color as a `Style`. These are the bytes that
+ /// tell the terminal to reset back to its normal color and font style.
///
/// See also [`Style::suffix`](struct.Style.html#method.suffix).
///
/// # Examples
///
/// ```
- /// use ansi_term::Colour::Purple;
+ /// use nu_ansi_term::Color::Purple;
///
/// assert_eq!("\x1b[0m",
/// Purple.suffix().to_string());
@@ -273,49 +302,44 @@ impl Colour {
}
}
-
impl fmt::Display for Prefix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let f: &mut fmt::Write = f;
+ let f: &mut dyn fmt::Write = f;
self.0.write_prefix(f)
}
}
-
impl fmt::Display for Infix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use difference::Difference;
+ use crate::difference::Difference;
match Difference::between(&self.0, &self.1) {
Difference::ExtraStyles(style) => {
- let f: &mut fmt::Write = f;
+ let f: &mut dyn fmt::Write = f;
style.write_prefix(f)
- },
+ }
Difference::Reset => {
- let f: &mut fmt::Write = f;
+ let f: &mut dyn fmt::Write = f;
write!(f, "{}{}", RESET, self.1.prefix())
- },
- Difference::NoDifference => {
- Ok(()) // nothing to write
- },
+ }
+ Difference::Empty => {
+ Ok(()) // nothing to write
+ }
}
}
}
-
impl fmt::Display for Suffix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let f: &mut fmt::Write = f;
+ let f: &mut dyn fmt::Write = f;
self.0.write_suffix(f)
}
}
-
-
#[cfg(test)]
mod test {
- use style::Style;
- use style::Colour::*;
+ use crate::style::Color::*;
+ use crate::style::Style;
macro_rules! test {
($name: ident: $style: expr; $input: expr => $result: expr) => {
@@ -341,6 +365,8 @@ mod test {
test!(purple_on_white: Purple.on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
test!(purple_on_white_2: Purple.normal().on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
test!(yellow_on_blue: Style::new().on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m");
+ test!(magenta_on_white: Magenta.on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
+ test!(magenta_on_white_2: Magenta.normal().on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
test!(yellow_on_blue_2: Cyan.on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m");
test!(cyan_bold_on_white: Cyan.bold().on(White); "hi" => "\x1B[1;47;36mhi\x1B[0m");
test!(cyan_ul_on_white: Cyan.underline().on(White); "hi" => "\x1B[4;47;36mhi\x1B[0m");
@@ -349,10 +375,10 @@ mod test {
test!(fixed: Fixed(100); "hi" => "\x1B[38;5;100mhi\x1B[0m");
test!(fixed_on_purple: Fixed(100).on(Purple); "hi" => "\x1B[45;38;5;100mhi\x1B[0m");
test!(fixed_on_fixed: Fixed(100).on(Fixed(200)); "hi" => "\x1B[48;5;200;38;5;100mhi\x1B[0m");
- test!(rgb: RGB(70,130,180); "hi" => "\x1B[38;2;70;130;180mhi\x1B[0m");
- test!(rgb_on_blue: RGB(70,130,180).on(Blue); "hi" => "\x1B[44;38;2;70;130;180mhi\x1B[0m");
- test!(blue_on_rgb: Blue.on(RGB(70,130,180)); "hi" => "\x1B[48;2;70;130;180;34mhi\x1B[0m");
- test!(rgb_on_rgb: RGB(70,130,180).on(RGB(5,10,15)); "hi" => "\x1B[48;2;5;10;15;38;2;70;130;180mhi\x1B[0m");
+ test!(rgb: Rgb(70,130,180); "hi" => "\x1B[38;2;70;130;180mhi\x1B[0m");
+ test!(rgb_on_blue: Rgb(70,130,180).on(Blue); "hi" => "\x1B[44;38;2;70;130;180mhi\x1B[0m");
+ test!(blue_on_rgb: Blue.on(Rgb(70,130,180)); "hi" => "\x1B[48;2;70;130;180;34mhi\x1B[0m");
+ test!(rgb_on_rgb: Rgb(70,130,180).on(Rgb(5,10,15)); "hi" => "\x1B[48;2;5;10;15;38;2;70;130;180mhi\x1B[0m");
test!(bold: Style::new().bold(); "hi" => "\x1B[1mhi\x1B[0m");
test!(underline: Style::new().underline(); "hi" => "\x1B[4mhi\x1B[0m");
test!(bunderline: Style::new().bold().underline(); "hi" => "\x1B[1;4mhi\x1B[0m");
@@ -362,11 +388,18 @@ mod test {
test!(reverse: Style::new().reverse(); "hi" => "\x1B[7mhi\x1B[0m");
test!(hidden: Style::new().hidden(); "hi" => "\x1B[8mhi\x1B[0m");
test!(stricken: Style::new().strikethrough(); "hi" => "\x1B[9mhi\x1B[0m");
+ test!(lr_on_lr: LightRed.on(LightRed); "hi" => "\x1B[101;91mhi\x1B[0m");
#[test]
fn test_infix() {
- assert_eq!(Style::new().dimmed().infix(Style::new()).to_string(), "\x1B[0m");
- assert_eq!(White.dimmed().infix(White.normal()).to_string(), "\x1B[0m\x1B[37m");
+ assert_eq!(
+ Style::new().dimmed().infix(Style::new()).to_string(),
+ "\x1B[0m"
+ );
+ assert_eq!(
+ White.dimmed().infix(White.normal()).to_string(),
+ "\x1B[0m\x1B[37m"
+ );
assert_eq!(White.normal().infix(White.bold()).to_string(), "\x1B[1m");
assert_eq!(White.normal().infix(Blue.normal()).to_string(), "\x1B[34m");
assert_eq!(Blue.bold().infix(Blue.bold()).to_string(), "");
diff --git a/vendor/ansi_term/src/debug.rs b/vendor/nu-ansi-term/src/debug.rs
index 4877323ff..1dcde52be 100644
--- a/vendor/ansi_term/src/debug.rs
+++ b/vendor/nu-ansi-term/src/debug.rs
@@ -1,67 +1,86 @@
+use crate::style::Style;
use std::fmt;
-use style::Style;
-
/// Styles have a special `Debug` implementation that only shows the fields that
/// are set. Fields that haven’t been touched aren’t included in the output.
///
/// This behaviour gets bypassed when using the alternate formatting mode
/// `format!("{:#?}")`.
///
-/// use ansi_term::Colour::{Red, Blue};
+/// use nu_ansi_term::Color::{Red, Blue};
/// assert_eq!("Style { fg(Red), on(Blue), bold, italic }",
/// format!("{:?}", Red.on(Blue).bold().italic()));
impl fmt::Debug for Style {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if fmt.alternate() {
fmt.debug_struct("Style")
- .field("foreground", &self.foreground)
- .field("background", &self.background)
- .field("blink", &self.is_blink)
- .field("bold", &self.is_bold)
- .field("dimmed", &self.is_dimmed)
- .field("hidden", &self.is_hidden)
- .field("italic", &self.is_italic)
- .field("reverse", &self.is_reverse)
- .field("strikethrough", &self.is_strikethrough)
- .field("underline", &self.is_underline)
- .finish()
- }
- else if self.is_plain() {
+ .field("foreground", &self.foreground)
+ .field("background", &self.background)
+ .field("blink", &self.is_blink)
+ .field("bold", &self.is_bold)
+ .field("dimmed", &self.is_dimmed)
+ .field("hidden", &self.is_hidden)
+ .field("italic", &self.is_italic)
+ .field("reverse", &self.is_reverse)
+ .field("strikethrough", &self.is_strikethrough)
+ .field("underline", &self.is_underline)
+ .finish()
+ } else if self.is_plain() {
fmt.write_str("Style {}")
- }
- else {
+ } else {
fmt.write_str("Style { ")?;
let mut written_anything = false;
if let Some(fg) = self.foreground {
- if written_anything { fmt.write_str(", ")? }
+ if written_anything {
+ fmt.write_str(", ")?
+ }
written_anything = true;
write!(fmt, "fg({:?})", fg)?
}
if let Some(bg) = self.background {
- if written_anything { fmt.write_str(", ")? }
+ if written_anything {
+ fmt.write_str(", ")?
+ }
written_anything = true;
write!(fmt, "on({:?})", bg)?
}
{
let mut write_flag = |name| {
- if written_anything { fmt.write_str(", ")? }
+ if written_anything {
+ fmt.write_str(", ")?
+ }
written_anything = true;
fmt.write_str(name)
};
- if self.is_blink { write_flag("blink")? }
- if self.is_bold { write_flag("bold")? }
- if self.is_dimmed { write_flag("dimmed")? }
- if self.is_hidden { write_flag("hidden")? }
- if self.is_italic { write_flag("italic")? }
- if self.is_reverse { write_flag("reverse")? }
- if self.is_strikethrough { write_flag("strikethrough")? }
- if self.is_underline { write_flag("underline")? }
+ if self.is_blink {
+ write_flag("blink")?
+ }
+ if self.is_bold {
+ write_flag("bold")?
+ }
+ if self.is_dimmed {
+ write_flag("dimmed")?
+ }
+ if self.is_hidden {
+ write_flag("hidden")?
+ }
+ if self.is_italic {
+ write_flag("italic")?
+ }
+ if self.is_reverse {
+ write_flag("reverse")?
+ }
+ if self.is_strikethrough {
+ write_flag("strikethrough")?
+ }
+ if self.is_underline {
+ write_flag("underline")?
+ }
}
write!(fmt, " }}")
@@ -69,11 +88,10 @@ impl fmt::Debug for Style {
}
}
-
#[cfg(test)]
mod test {
- use style::Colour::*;
- use style::Style;
+ use crate::style::Color::*;
+ use crate::style::Style;
fn style() -> Style {
Style::new()
@@ -94,7 +112,7 @@ mod test {
test!(both: style().bold().italic() => "Style { bold, italic }");
test!(red: Red.normal() => "Style { fg(Red) }");
- test!(redblue: Red.normal().on(RGB(3, 2, 4)) => "Style { fg(Red), on(RGB(3, 2, 4)) }");
+ test!(redblue: Red.normal().on(Rgb(3, 2, 4)) => "Style { fg(Red), on(Rgb(3, 2, 4)) }");
test!(everything:
Red.on(Blue).blink().bold().dimmed().hidden().italic().reverse().strikethrough().underline() =>
diff --git a/vendor/ansi_term/src/difference.rs b/vendor/nu-ansi-term/src/difference.rs
index b0de07f7e..beee8ea25 100644
--- a/vendor/ansi_term/src/difference.rs
+++ b/vendor/nu-ansi-term/src/difference.rs
@@ -1,11 +1,9 @@
use super::Style;
-
-/// When printing out one coloured string followed by another, use one of
+/// When printing out one colored string followed by another, use one of
/// these rules to figure out which *extra* control codes need to be sent.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Difference {
-
/// Print out the control codes specified by this style to end up looking
/// like the second string's styles.
ExtraStyles(Style),
@@ -16,19 +14,17 @@ pub enum Difference {
/// The before style is exactly the same as the after style, so no further
/// control codes need to be printed.
- NoDifference,
+ Empty,
}
-
impl Difference {
-
/// Compute the 'style difference' required to turn an existing style into
/// the given, second style.
///
/// For example, to turn green text into green bold text, it's redundant
/// to write a reset command then a second green+bold command, instead of
/// just writing one bold command. This method should see that both styles
- /// use the foreground colour green, and reduce it to a single command.
+ /// use the foreground color green, and reduce it to a single command.
///
/// This method returns an enum value because it's not actually always
/// possible to turn one style into another: for example, text could be
@@ -44,7 +40,7 @@ impl Difference {
// it commented out for now, and defaulting to Reset.
if first == next {
- return NoDifference;
+ return Empty;
}
// Cannot un-bold, so must Reset.
@@ -137,13 +133,12 @@ impl Difference {
}
}
-
#[cfg(test)]
mod test {
- use super::*;
use super::Difference::*;
- use style::Colour::*;
- use style::Style;
+ use super::*;
+ use crate::style::Color::*;
+ use crate::style::Style;
fn style() -> Style {
Style::new()
@@ -158,12 +153,12 @@ mod test {
};
}
- test!(nothing: Green.normal(); Green.normal() => NoDifference);
+ test!(nothing: Green.normal(); Green.normal() => Empty);
test!(uppercase: Green.normal(); Green.bold() => ExtraStyles(style().bold()));
test!(lowercase: Green.bold(); Green.normal() => Reset);
- test!(nothing2: Green.bold(); Green.bold() => NoDifference);
+ test!(nothing2: Green.bold(); Green.bold() => Empty);
- test!(colour_change: Red.normal(); Blue.normal() => ExtraStyles(Blue.normal()));
+ test!(color_change: Red.normal(); Blue.normal() => ExtraStyles(Blue.normal()));
test!(addition_of_blink: style(); style().blink() => ExtraStyles(style().blink()));
test!(addition_of_dimmed: style(); style().dimmed() => ExtraStyles(style().dimmed()));
diff --git a/vendor/ansi_term/src/display.rs b/vendor/nu-ansi-term/src/display.rs
index 17c54f008..bed934cb3 100644
--- a/vendor/ansi_term/src/display.rs
+++ b/vendor/nu-ansi-term/src/display.rs
@@ -1,40 +1,40 @@
+use crate::ansi::RESET;
+use crate::difference::Difference;
+use crate::style::{Color, Style};
+use crate::write::AnyWrite;
use std::borrow::Cow;
use std::fmt;
use std::io;
-use std::ops::Deref;
-use ansi::RESET;
-use difference::Difference;
-use style::{Style, Colour};
-use write::AnyWrite;
-
-
-/// An `ANSIGenericString` includes a generic string type and a `Style` to
-/// display that string. `ANSIString` and `ANSIByteString` are aliases for
+/// An `AnsiGenericString` includes a generic string type and a `Style` to
+/// display that string. `AnsiString` and `AnsiByteString` are aliases for
/// this type on `str` and `\[u8]`, respectively.
#[derive(PartialEq, Debug)]
-pub struct ANSIGenericString<'a, S: 'a + ToOwned + ?Sized>
-where <S as ToOwned>::Owned: fmt::Debug {
- style: Style,
- string: Cow<'a, S>,
+pub struct AnsiGenericString<'a, S: 'a + ToOwned + ?Sized>
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+{
+ pub(crate) style: Style,
+ pub(crate) string: Cow<'a, S>,
}
-
-/// Cloning an `ANSIGenericString` will clone its underlying string.
+/// Cloning an `AnsiGenericString` will clone its underlying string.
///
/// # Examples
///
/// ```
-/// use ansi_term::ANSIString;
+/// use nu_ansi_term::AnsiString;
///
-/// let plain_string = ANSIString::from("a plain string");
+/// let plain_string = AnsiString::from("a plain string");
/// let clone_string = plain_string.clone();
/// assert_eq!(clone_string, plain_string);
/// ```
-impl<'a, S: 'a + ToOwned + ?Sized> Clone for ANSIGenericString<'a, S>
-where <S as ToOwned>::Owned: fmt::Debug {
- fn clone(&self) -> ANSIGenericString<'a, S> {
- ANSIGenericString {
+impl<'a, S: 'a + ToOwned + ?Sized> Clone for AnsiGenericString<'a, S>
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+{
+ fn clone(&self) -> AnsiGenericString<'a, S> {
+ AnsiGenericString {
style: self.style,
string: self.string.clone(),
}
@@ -56,14 +56,12 @@ where <S as ToOwned>::Owned: fmt::Debug {
// that used it:
//
// #[derive(PartialEq, Debug, Clone, Default)]
-// pub struct TextCellContents(Vec<ANSIString<'static>>);
+// pub struct TextCellContents(Vec<AnsiString<'static>>);
// ^^^^^^^^^^^^^^^^^^^^^^^^^
// error[E0277]: the trait `std::clone::Clone` is not implemented for `str`
//
// The hand-written impl above can ignore that constraint and still compile.
-
-
/// An ANSI String is a string coupled with the `Style` to display it
/// in a terminal.
///
@@ -73,39 +71,41 @@ where <S as ToOwned>::Owned: fmt::Debug {
/// # Examples
///
/// ```
-/// use ansi_term::ANSIString;
-/// use ansi_term::Colour::Red;
+/// use nu_ansi_term::AnsiString;
+/// use nu_ansi_term::Color::Red;
///
/// let red_string = Red.paint("a red string");
/// println!("{}", red_string);
/// ```
///
/// ```
-/// use ansi_term::ANSIString;
+/// use nu_ansi_term::AnsiString;
///
-/// let plain_string = ANSIString::from("a plain string");
-/// assert_eq!(&*plain_string, "a plain string");
+/// let plain_string = AnsiString::from("a plain string");
/// ```
-pub type ANSIString<'a> = ANSIGenericString<'a, str>;
-
-/// An `ANSIByteString` represents a formatted series of bytes. Use
-/// `ANSIByteString` when styling text with an unknown encoding.
-pub type ANSIByteString<'a> = ANSIGenericString<'a, [u8]>;
-
-impl<'a, I, S: 'a + ToOwned + ?Sized> From<I> for ANSIGenericString<'a, S>
-where I: Into<Cow<'a, S>>,
- <S as ToOwned>::Owned: fmt::Debug {
- fn from(input: I) -> ANSIGenericString<'a, S> {
- ANSIGenericString {
+pub type AnsiString<'a> = AnsiGenericString<'a, str>;
+
+/// An `AnsiByteString` represents a formatted series of bytes. Use
+/// `AnsiByteString` when styling text with an unknown encoding.
+pub type AnsiByteString<'a> = AnsiGenericString<'a, [u8]>;
+
+impl<'a, I, S: 'a + ToOwned + ?Sized> From<I> for AnsiGenericString<'a, S>
+where
+ I: Into<Cow<'a, S>>,
+ <S as ToOwned>::Owned: fmt::Debug,
+{
+ fn from(input: I) -> AnsiGenericString<'a, S> {
+ AnsiGenericString {
string: input.into(),
- style: Style::default(),
+ style: Style::default(),
}
}
}
-impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S>
- where <S as ToOwned>::Owned: fmt::Debug {
-
+impl<'a, S: 'a + ToOwned + ?Sized> AnsiGenericString<'a, S>
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+{
/// Directly access the style
pub fn style_ref(&self) -> &Style {
&self.style
@@ -117,133 +117,128 @@ impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S>
}
}
-impl<'a, S: 'a + ToOwned + ?Sized> Deref for ANSIGenericString<'a, S>
-where <S as ToOwned>::Owned: fmt::Debug {
- type Target = S;
-
- fn deref(&self) -> &S {
- self.string.deref()
- }
-}
-
-
-/// A set of `ANSIGenericString`s collected together, in order to be
+/// A set of `AnsiGenericStrings`s collected together, in order to be
/// written with a minimum of control characters.
#[derive(Debug, PartialEq)]
-pub struct ANSIGenericStrings<'a, S: 'a + ToOwned + ?Sized>
- (pub &'a [ANSIGenericString<'a, S>])
- where <S as ToOwned>::Owned: fmt::Debug, S: PartialEq;
+pub struct AnsiGenericStrings<'a, S: 'a + ToOwned + ?Sized>(pub &'a [AnsiGenericString<'a, S>])
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+ S: PartialEq;
-/// A set of `ANSIString`s collected together, in order to be written with a
+/// A set of `AnsiString`s collected together, in order to be written with a
/// minimum of control characters.
-pub type ANSIStrings<'a> = ANSIGenericStrings<'a, str>;
+pub type AnsiStrings<'a> = AnsiGenericStrings<'a, str>;
-/// A function to construct an `ANSIStrings` instance.
+/// A function to construct an `AnsiStrings` instance.
#[allow(non_snake_case)]
-pub fn ANSIStrings<'a>(arg: &'a [ANSIString<'a>]) -> ANSIStrings<'a> {
- ANSIGenericStrings(arg)
+pub fn AnsiStrings<'a>(arg: &'a [AnsiString<'a>]) -> AnsiStrings<'a> {
+ AnsiGenericStrings(arg)
}
-/// A set of `ANSIByteString`s collected together, in order to be
+/// A set of `AnsiByteString`s collected together, in order to be
/// written with a minimum of control characters.
-pub type ANSIByteStrings<'a> = ANSIGenericStrings<'a, [u8]>;
+pub type AnsiByteStrings<'a> = AnsiGenericStrings<'a, [u8]>;
-/// A function to construct an `ANSIByteStrings` instance.
+/// A function to construct an `AnsiByteStrings` instance.
#[allow(non_snake_case)]
-pub fn ANSIByteStrings<'a>(arg: &'a [ANSIByteString<'a>]) -> ANSIByteStrings<'a> {
- ANSIGenericStrings(arg)
+pub fn AnsiByteStrings<'a>(arg: &'a [AnsiByteString<'a>]) -> AnsiByteStrings<'a> {
+ AnsiGenericStrings(arg)
}
-
// ---- paint functions ----
impl Style {
-
- /// Paints the given text with this colour, returning an ANSI string.
+ /// Paints the given text with this color, returning an ANSI string.
#[must_use]
- pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S>
- where I: Into<Cow<'a, S>>,
- <S as ToOwned>::Owned: fmt::Debug {
- ANSIGenericString {
+ pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> AnsiGenericString<'a, S>
+ where
+ I: Into<Cow<'a, S>>,
+ <S as ToOwned>::Owned: fmt::Debug,
+ {
+ AnsiGenericString {
string: input.into(),
- style: self,
+ style: self,
}
}
}
-
-impl Colour {
-
- /// Paints the given text with this colour, returning an ANSI string.
+impl Color {
+ /// Paints the given text with this color, returning an ANSI string.
/// This is a short-cut so you don’t have to use `Blue.normal()` just
/// to get blue text.
///
/// ```
- /// use ansi_term::Colour::Blue;
+ /// use nu_ansi_term::Color::Blue;
/// println!("{}", Blue.paint("da ba dee"));
/// ```
#[must_use]
- pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S>
- where I: Into<Cow<'a, S>>,
- <S as ToOwned>::Owned: fmt::Debug {
- ANSIGenericString {
+ pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> AnsiGenericString<'a, S>
+ where
+ I: Into<Cow<'a, S>>,
+ <S as ToOwned>::Owned: fmt::Debug,
+ {
+ AnsiGenericString {
string: input.into(),
- style: self.normal(),
+ style: self.normal(),
}
}
}
-
// ---- writers for individual ANSI strings ----
-impl<'a> fmt::Display for ANSIString<'a> {
+impl<'a> fmt::Display for AnsiString<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let w: &mut fmt::Write = f;
+ let w: &mut dyn fmt::Write = f;
self.write_to_any(w)
}
}
-impl<'a> ANSIByteString<'a> {
- /// Write an `ANSIByteString` to an `io::Write`. This writes the escape
+impl<'a> AnsiByteString<'a> {
+ /// Write an `AnsiByteString` to an `io::Write`. This writes the escape
/// sequences for the associated `Style` around the bytes.
pub fn write_to<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
- let w: &mut io::Write = w;
+ let w: &mut dyn io::Write = w;
self.write_to_any(w)
}
}
-impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S>
-where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
- fn write_to_any<W: AnyWrite<wstr=S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
+impl<'a, S: 'a + ToOwned + ?Sized> AnsiGenericString<'a, S>
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+ &'a S: AsRef<[u8]>,
+{
+ fn write_to_any<W: AnyWrite<Wstr = S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
write!(w, "{}", self.style.prefix())?;
w.write_str(self.string.as_ref())?;
write!(w, "{}", self.style.suffix())
}
}
-
// ---- writers for combined ANSI strings ----
-impl<'a> fmt::Display for ANSIStrings<'a> {
+impl<'a> fmt::Display for AnsiStrings<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let f: &mut fmt::Write = f;
+ let f: &mut dyn fmt::Write = f;
self.write_to_any(f)
}
}
-impl<'a> ANSIByteStrings<'a> {
- /// Write `ANSIByteStrings` to an `io::Write`. This writes the minimal
+impl<'a> AnsiByteStrings<'a> {
+ /// Write `AnsiByteStrings` to an `io::Write`. This writes the minimal
/// escape sequences for the associated `Style`s around each set of
/// bytes.
pub fn write_to<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
- let w: &mut io::Write = w;
+ let w: &mut dyn io::Write = w;
self.write_to_any(w)
}
}
-impl<'a, S: 'a + ToOwned + ?Sized + PartialEq> ANSIGenericStrings<'a, S>
-where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
- fn write_to_any<W: AnyWrite<wstr=S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
+impl<'a, S: 'a + ToOwned + ?Sized + PartialEq> AnsiGenericStrings<'a, S>
+where
+ <S as ToOwned>::Owned: fmt::Debug,
+ &'a S: AsRef<[u8]>,
+{
+ fn write_to_any<W: AnyWrite<Wstr = S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
use self::Difference::*;
let first = match self.0.first() {
@@ -257,14 +252,14 @@ where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
for window in self.0.windows(2) {
match Difference::between(&window[0].style, &window[1].style) {
ExtraStyles(style) => write!(w, "{}", style.prefix())?,
- Reset => write!(w, "{}{}", RESET, window[1].style.prefix())?,
- NoDifference => {/* Do nothing! */},
+ Reset => write!(w, "{}{}", RESET, window[1].style.prefix())?,
+ Empty => { /* Do nothing! */ }
}
w.write_str(&window[1].string)?;
}
- // Write the final reset string after all of the ANSIStrings have been
+ // Write the final reset string after all of the AnsiStrings have been
// written, *except* if the last one has no styles, because it would
// have already been written by this point.
if let Some(last) = self.0.last() {
@@ -277,20 +272,19 @@ where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
}
}
-
// ---- tests ----
#[cfg(test)]
mod tests {
- pub use super::super::ANSIStrings;
- pub use style::Style;
- pub use style::Colour::*;
+ pub use super::super::AnsiStrings;
+ pub use crate::style::Color::*;
+ pub use crate::style::Style;
#[test]
fn no_control_codes_for_plain() {
let one = Style::default().paint("one");
let two = Style::default().paint("two");
- let output = format!("{}", ANSIStrings( &[ one, two ] ));
- assert_eq!(&*output, "onetwo");
+ let output = AnsiStrings(&[one, two]).to_string();
+ assert_eq!(output, "onetwo");
}
}
diff --git a/vendor/nu-ansi-term/src/gradient.rs b/vendor/nu-ansi-term/src/gradient.rs
new file mode 100644
index 000000000..a0d94c8cd
--- /dev/null
+++ b/vendor/nu-ansi-term/src/gradient.rs
@@ -0,0 +1,105 @@
+use crate::{rgb::Rgb, Color};
+
+/// Linear color gradient between two color stops
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct Gradient {
+ /// Start Color of Gradient
+ pub start: Rgb,
+
+ /// End Color of Gradient
+ pub end: Rgb,
+}
+
+impl Gradient {
+ /// Creates a new [Gradient] with two [Rgb] colors, `start` and `end`
+ #[inline]
+ pub const fn new(start: Rgb, end: Rgb) -> Self {
+ Self { start, end }
+ }
+ pub const fn from_color_rgb(start: Color, end: Color) -> Self {
+ let start_grad = match start {
+ Color::Rgb(r, g, b) => Rgb { r, g, b },
+ _ => Rgb { r: 0, g: 0, b: 0 },
+ };
+ let end_grad = match end {
+ Color::Rgb(r, g, b) => Rgb { r, g, b },
+ _ => Rgb { r: 0, g: 0, b: 0 },
+ };
+
+ Self {
+ start: start_grad,
+ end: end_grad,
+ }
+ }
+
+ /// Computes the [Rgb] color between `start` and `end` for `t`
+ pub fn at(&self, t: f32) -> Rgb {
+ self.start.lerp(self.end, t)
+ }
+
+ /// Returns the reverse of `self`
+ #[inline]
+ pub const fn reverse(&self) -> Self {
+ Self::new(self.end, self.start)
+ }
+
+ #[allow(dead_code)]
+ pub fn build(&self, text: &str, target: TargetGround) -> String {
+ let delta = 1.0 / text.len() as f32;
+ let mut result = text.char_indices().fold(String::new(), |mut acc, (i, c)| {
+ let temp = format!(
+ "\x1B[{}m{}",
+ self.at(i as f32 * delta).ansi_color_code(target),
+ c
+ );
+ acc.push_str(&temp);
+ acc
+ });
+
+ result.push_str("\x1B[0m");
+ result
+ }
+}
+
+#[allow(dead_code)]
+pub fn build_all_gradient_text(text: &str, foreground: Gradient, background: Gradient) -> String {
+ let delta = 1.0 / text.len() as f32;
+ let mut result = text.char_indices().fold(String::new(), |mut acc, (i, c)| {
+ let step = i as f32 * delta;
+ let temp = format!(
+ "\x1B[{};{}m{}",
+ foreground
+ .at(step)
+ .ansi_color_code(TargetGround::Foreground),
+ background
+ .at(step)
+ .ansi_color_code(TargetGround::Background),
+ c
+ );
+ acc.push_str(&temp);
+ acc
+ });
+
+ result.push_str("\x1B[0m");
+ result
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetGround {
+ Foreground,
+ Background,
+}
+
+impl TargetGround {
+ #[inline]
+ pub const fn code(&self) -> u8 {
+ match self {
+ Self::Foreground => 30,
+ Self::Background => 40,
+ }
+ }
+}
+
+pub trait ANSIColorCode {
+ fn ansi_color_code(&self, target: TargetGround) -> String;
+}
diff --git a/vendor/ansi_term/src/lib.rs b/vendor/nu-ansi-term/src/lib.rs
index 2d2f83ae6..c04fd3276 100644
--- a/vendor/ansi_term/src/lib.rs
+++ b/vendor/nu-ansi-term/src/lib.rs
@@ -1,40 +1,40 @@
-//! This is a library for controlling colours and formatting, such as
+//! This is a library for controlling colors and formatting, such as
//! red bold text or blue underlined text, on ANSI terminals.
//!
//!
//! ## Basic usage
//!
//! There are three main types in this crate that you need to be
-//! concerned with: [`ANSIString`], [`Style`], and [`Colour`].
+//! concerned with: [`AnsiString`], [`Style`], and [`Color`].
//!
-//! A `Style` holds stylistic information: foreground and background colours,
+//! A `Style` holds stylistic information: foreground and background colors,
//! whether the text should be bold, or blinking, or other properties. The
-//! [`Colour`] enum represents the available colours. And an [`ANSIString`] is a
+//! [`Color`] enum represents the available colors. And an [`AnsiString`] is a
//! string paired with a [`Style`].
//!
-//! [`Color`] is also available as an alias to `Colour`.
+//! [`Color`] is also available as an alias to `Color`.
//!
-//! To format a string, call the `paint` method on a `Style` or a `Colour`,
+//! To format a string, call the `paint` method on a `Style` or a `Color`,
//! passing in the string you want to format as the argument. For example,
//! here’s how to get some red text:
//!
//! ```
-//! use ansi_term::Colour::Red;
+//! use nu_ansi_term::Color::Red;
//!
//! println!("This is in red: {}", Red.paint("a red string"));
//! ```
//!
//! It’s important to note that the `paint` method does *not* actually return a
//! string with the ANSI control characters surrounding it. Instead, it returns
-//! an [`ANSIString`] value that has a [`Display`] implementation that, when
+//! an [`AnsiString`] value that has a [`Display`] implementation that, when
//! formatted, returns the characters. This allows strings to be printed with a
//! minimum of [`String`] allocations being performed behind the scenes.
//!
//! If you *do* want to get at the escape codes, then you can convert the
-//! [`ANSIString`] to a string as you would any other `Display` value:
+//! [`AnsiString`] to a string as you would any other `Display` value:
//!
//! ```
-//! use ansi_term::Colour::Red;
+//! use nu_ansi_term::Color::Red;
//!
//! let red_string = Red.paint("a red string").to_string();
//! ```
@@ -42,26 +42,26 @@
//!
//! ## Bold, underline, background, and other styles
//!
-//! For anything more complex than plain foreground colour changes, you need to
-//! construct `Style` values themselves, rather than beginning with a `Colour`.
+//! For anything more complex than plain foreground color changes, you need to
+//! construct `Style` values themselves, rather than beginning with a `Color`.
//! You can do this by chaining methods based on a new `Style`, created with
//! [`Style::new()`]. Each method creates a new style that has that specific
//! property set. For example:
//!
//! ```
-//! use ansi_term::Style;
+//! use nu_ansi_term::Style;
//!
//! println!("How about some {} and {}?",
//! Style::new().bold().paint("bold"),
//! Style::new().underline().paint("underline"));
//! ```
//!
-//! For brevity, these methods have also been implemented for `Colour` values,
-//! so you can give your styles a foreground colour without having to begin with
+//! For brevity, these methods have also been implemented for `Color` values,
+//! so you can give your styles a foreground color without having to begin with
//! an empty `Style` value:
//!
//! ```
-//! use ansi_term::Colour::{Blue, Yellow};
+//! use nu_ansi_term::Color::{Blue, Yellow};
//!
//! println!("Demonstrating {} and {}!",
//! Blue.bold().paint("blue bold"),
@@ -72,68 +72,68 @@
//!
//! The complete list of styles you can use are: [`bold`], [`dimmed`], [`italic`],
//! [`underline`], [`blink`], [`reverse`], [`hidden`], [`strikethrough`], and [`on`] for
-//! background colours.
+//! background colors.
//!
//! In some cases, you may find it easier to change the foreground on an
-//! existing `Style` rather than starting from the appropriate `Colour`.
+//! existing `Style` rather than starting from the appropriate `Color`.
//! You can do this using the [`fg`] method:
//!
//! ```
-//! use ansi_term::Style;
-//! use ansi_term::Colour::{Blue, Cyan, Yellow};
+//! use nu_ansi_term::Style;
+//! use nu_ansi_term::Color::{Blue, Cyan, Yellow};
//!
//! println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!"));
//! println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!"));
//! ```
//!
-//! You can turn a `Colour` into a `Style` with the [`normal`] method.
-//! This will produce the exact same `ANSIString` as if you just used the
-//! `paint` method on the `Colour` directly, but it’s useful in certain cases:
+//! You can turn a `Color` into a `Style` with the [`normal`] method.
+//! This will produce the exact same `AnsiString` as if you just used the
+//! `paint` method on the `Color` directly, but it’s useful in certain cases:
//! for example, you may have a method that returns `Styles`, and need to
//! represent both the “red bold” and “red, but not bold” styles with values of
//! the same type. The `Style` struct also has a [`Default`] implementation if you
//! want to have a style with *nothing* set.
//!
//! ```
-//! use ansi_term::Style;
-//! use ansi_term::Colour::Red;
+//! use nu_ansi_term::Style;
+//! use nu_ansi_term::Color::Red;
//!
//! Red.normal().paint("yet another red string");
//! Style::default().paint("a completely regular string");
//! ```
//!
//!
-//! ## Extended colours
+//! ## Extended colors
//!
-//! You can access the extended range of 256 colours by using the `Colour::Fixed`
-//! variant, which takes an argument of the colour number to use. This can be
-//! included wherever you would use a `Colour`:
+//! You can access the extended range of 256 colors by using the `Color::Fixed`
+//! variant, which takes an argument of the color number to use. This can be
+//! included wherever you would use a `Color`:
//!
//! ```
-//! use ansi_term::Colour::Fixed;
+//! use nu_ansi_term::Color::Fixed;
//!
//! Fixed(134).paint("A sort of light purple");
//! Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup");
//! ```
//!
//! The first sixteen of these values are the same as the normal and bold
-//! standard colour variants. There’s nothing stopping you from using these as
-//! `Fixed` colours instead, but there’s nothing to be gained by doing so
+//! standard color variants. There’s nothing stopping you from using these as
+//! `Fixed` colors instead, but there’s nothing to be gained by doing so
//! either.
//!
-//! You can also access full 24-bit colour by using the `Colour::RGB` variant,
+//! You can also access full 24-bit color by using the `Color::Rgb` variant,
//! which takes separate `u8` arguments for red, green, and blue:
//!
//! ```
-//! use ansi_term::Colour::RGB;
+//! use nu_ansi_term::Color::Rgb;
//!
-//! RGB(70, 130, 180).paint("Steel blue");
+//! Rgb(70, 130, 180).paint("Steel blue");
//! ```
//!
-//! ## Combining successive coloured strings
+//! ## Combining successive colored strings
//!
//! The benefit of writing ANSI escape codes to the terminal is that they
-//! *stack*: you do not need to end every coloured string with a reset code if
+//! *stack*: you do not need to end every colored string with a reset code if
//! the text that follows it is of a similar style. For example, if you want to
//! have some blue text followed by some blue bold text, it’s possible to send
//! the ANSI code for blue, followed by the ANSI code for bold, and finishing
@@ -141,8 +141,8 @@
//! strings.
//!
//! This crate can optimise the ANSI codes that get printed in situations like
-//! this, making life easier for your terminal renderer. The [`ANSIStrings`]
-//! type takes a slice of several [`ANSIString`] values, and will iterate over
+//! this, making life easier for your terminal renderer. The [`AnsiStrings`]
+//! type takes a slice of several [`AnsiString`] values, and will iterate over
//! each of them, printing only the codes for the styles that need to be updated
//! as part of its formatting routine.
//!
@@ -150,25 +150,25 @@
//! red bold text inside some red, but not bold, brackets:
//!
//! ```
-//! use ansi_term::Colour::Red;
-//! use ansi_term::{ANSIString, ANSIStrings};
+//! use nu_ansi_term::Color::Red;
+//! use nu_ansi_term::{AnsiString, AnsiStrings};
//!
//! let some_value = format!("{:b}", 42);
-//! let strings: &[ANSIString<'static>] = &[
+//! let strings: &[AnsiString<'static>] = &[
//! Red.paint("["),
//! Red.bold().paint(some_value),
//! Red.paint("]"),
//! ];
//!
-//! println!("Value: {}", ANSIStrings(strings));
+//! println!("Value: {}", AnsiStrings(strings));
//! ```
//!
//! There are several things to note here. Firstly, the [`paint`] method can take
-//! *either* an owned [`String`] or a borrowed [`&str`]. Internally, an [`ANSIString`]
+//! *either* an owned [`String`] or a borrowed [`&str`]. Internally, an [`AnsiString`]
//! holds a copy-on-write ([`Cow`]) string value to deal with both owned and
//! borrowed strings at the same time. This is used here to display a `String`,
//! the result of the `format!` call, using the same mechanism as some
-//! statically-available `&str` slices. Secondly, that the [`ANSIStrings`] value
+//! statically-available `&str` slices. Secondly, that the [`AnsiStrings`] value
//! works in the same way as its singular counterpart, with a [`Display`]
//! implementation that only performs the formatting when required.
//!
@@ -176,25 +176,25 @@
//!
//! This library also supports formatting `\[u8]` byte strings; this supports
//! applications working with text in an unknown encoding. [`Style`] and
-//! [`Colour`] support painting `\[u8]` values, resulting in an [`ANSIByteString`].
+//! [`Color`] support painting `\[u8]` values, resulting in an [`AnsiByteString`].
//! This type does not implement [`Display`], as it may not contain UTF-8, but
//! it does provide a method [`write_to`] to write the result to any value that
//! implements [`Write`]:
//!
//! ```
-//! use ansi_term::Colour::Green;
+//! use nu_ansi_term::Color::Green;
//!
//! Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap();
//! ```
//!
-//! Similarly, the type [`ANSIByteStrings`] supports writing a list of
-//! [`ANSIByteString`] values with minimal escape sequences:
+//! Similarly, the type [`AnsiByteStrings`] supports writing a list of
+//! [`AnsiByteString`] values with minimal escape sequences:
//!
//! ```
-//! use ansi_term::Colour::Green;
-//! use ansi_term::ANSIByteStrings;
+//! use nu_ansi_term::Color::Green;
+//! use nu_ansi_term::AnsiByteStrings;
//!
-//! ANSIByteStrings(&[
+//! AnsiByteStrings(&[
//! Green.paint("user data 1\n".as_bytes()),
//! Green.bold().paint("user data 2\n".as_bytes()),
//! ]).write_to(&mut std::io::stdout()).unwrap();
@@ -209,14 +209,14 @@
//! [`Style`]: struct.Style.html
//! [`Style::new()`]: struct.Style.html#method.new
//! [`Color`]: enum.Color.html
-//! [`Colour`]: enum.Colour.html
-//! [`ANSIString`]: type.ANSIString.html
-//! [`ANSIStrings`]: type.ANSIStrings.html
-//! [`ANSIByteString`]: type.ANSIByteString.html
-//! [`ANSIByteStrings`]: type.ANSIByteStrings.html
-//! [`write_to`]: type.ANSIByteString.html#method.write_to
-//! [`paint`]: type.ANSIByteString.html#method.write_to
-//! [`normal`]: enum.Colour.html#method.normal
+//! [`Color`]: enum.Color.html
+//! [`AnsiString`]: type.AnsiString.html
+//! [`AnsiStrings`]: type.AnsiStrings.html
+//! [`AnsiByteString`]: type.AnsiByteString.html
+//! [`AnsiByteStrings`]: type.AnsiByteStrings.html
+//! [`write_to`]: type.AnsiByteString.html#method.write_to
+//! [`paint`]: type.AnsiByteString.html#method.write_to
+//! [`normal`]: enum.Color.html#method.normal
//!
//! [`bold`]: struct.Style.html#method.bold
//! [`dimmed`]: struct.Style.html#method.dimmed
@@ -229,16 +229,14 @@
//! [`fg`]: struct.Style.html#method.fg
//! [`on`]: struct.Style.html#method.on
-#![crate_name = "ansi_term"]
+#![crate_name = "nu_ansi_term"]
#![crate_type = "rlib"]
-#![crate_type = "dylib"]
-
#![warn(missing_copy_implementations)]
-#![warn(missing_docs)]
+// #![warn(missing_docs)]
#![warn(trivial_casts, trivial_numeric_casts)]
-#![warn(unused_extern_crates, unused_qualifications)]
+// #![warn(unused_extern_crates, unused_qualifications)]
-#[cfg(target_os="windows")]
+#[cfg(target_os = "windows")]
extern crate winapi;
#[cfg(test)]
#[macro_use]
@@ -247,14 +245,11 @@ extern crate doc_comment;
#[cfg(test)]
doctest!("../README.md");
-mod ansi;
-pub use ansi::{Prefix, Infix, Suffix};
+pub mod ansi;
+pub use ansi::{Infix, Prefix, Suffix};
mod style;
-pub use style::{Colour, Style};
-
-/// Color is a type alias for `Colour`.
-pub use Colour as Color;
+pub use style::{Color, Style};
mod difference;
mod display;
@@ -269,3 +264,9 @@ mod util;
pub use util::*;
mod debug;
+
+pub mod gradient;
+pub use gradient::*;
+
+mod rgb;
+pub use rgb::*;
diff --git a/vendor/nu-ansi-term/src/rgb.rs b/vendor/nu-ansi-term/src/rgb.rs
new file mode 100644
index 000000000..19475c36b
--- /dev/null
+++ b/vendor/nu-ansi-term/src/rgb.rs
@@ -0,0 +1,173 @@
+// Code liberally borrowed from here
+// https://github.com/navierr/coloriz
+use std::ops;
+use std::u32;
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct Rgb {
+ /// Red
+ pub r: u8,
+ /// Green
+ pub g: u8,
+ /// Blue
+ pub b: u8,
+}
+
+impl Rgb {
+ /// Creates a new [Rgb] color
+ #[inline]
+ pub const fn new(r: u8, g: u8, b: u8) -> Self {
+ Self { r, g, b }
+ }
+
+ /// Creates a new [Rgb] color with a hex code
+ #[inline]
+ pub const fn from_hex(hex: u32) -> Self {
+ Self::new((hex >> 16) as u8, (hex >> 8) as u8, hex as u8)
+ }
+
+ pub fn from_hex_string(hex: String) -> Self {
+ if hex.chars().count() == 8 && hex.starts_with("0x") {
+ // eprintln!("hex:{:?}", hex);
+ let (_, value_string) = hex.split_at(2);
+ // eprintln!("value_string:{:?}", value_string);
+ let int_val = u64::from_str_radix(value_string, 16);
+ match int_val {
+ Ok(num) => Self::new(
+ ((num & 0xff0000) >> 16) as u8,
+ ((num & 0xff00) >> 8) as u8,
+ (num & 0xff) as u8,
+ ),
+ // Don't fail, just make the color black
+ // Should we fail?
+ _ => Self::new(0, 0, 0),
+ }
+ } else {
+ // Don't fail, just make the color black.
+ // Should we fail?
+ Self::new(0, 0, 0)
+ }
+ }
+
+ /// Creates a new [Rgb] color with three [f32] values
+ pub fn from_f32(r: f32, g: f32, b: f32) -> Self {
+ Self::new(
+ (r.clamp(0.0, 1.0) * 255.0) as u8,
+ (g.clamp(0.0, 1.0) * 255.0) as u8,
+ (b.clamp(0.0, 1.0) * 255.0) as u8,
+ )
+ }
+
+ /// Creates a grayscale [Rgb] color
+ #[inline]
+ pub const fn gray(x: u8) -> Self {
+ Self::new(x, x, x)
+ }
+
+ /// Creates a grayscale [Rgb] color with a [f32] value
+ pub fn gray_f32(x: f32) -> Self {
+ Self::from_f32(x, x, x)
+ }
+
+ /// Creates a new [Rgb] color from a [HSL] color
+ // pub fn from_hsl(hsl: HSL) -> Self {
+ // if hsl.s == 0.0 {
+ // return Self::gray_f32(hsl.l);
+ // }
+
+ // let q = if hsl.l < 0.5 {
+ // hsl.l * (1.0 + hsl.s)
+ // } else {
+ // hsl.l + hsl.s - hsl.l * hsl.s
+ // };
+ // let p = 2.0 * hsl.l - q;
+ // let h2c = |t: f32| {
+ // let t = t.clamp(0.0, 1.0);
+ // if 6.0 * t < 1.0 {
+ // p + 6.0 * (q - p) * t
+ // } else if t < 0.5 {
+ // q
+ // } else if 1.0 < 1.5 * t {
+ // p + 6.0 * (q - p) * (1.0 / 1.5 - t)
+ // } else {
+ // p
+ // }
+ // };
+
+ // Self::from_f32(h2c(hsl.h + 1.0 / 3.0), h2c(hsl.h), h2c(hsl.h - 1.0 / 3.0))
+ // }
+
+ /// Computes the linear interpolation between `self` and `other` for `t`
+ pub fn lerp(&self, other: Self, t: f32) -> Self {
+ let t = t.clamp(0.0, 1.0);
+ self * (1.0 - t) + other * t
+ }
+}
+
+impl From<(u8, u8, u8)> for Rgb {
+ fn from((r, g, b): (u8, u8, u8)) -> Self {
+ Self::new(r, g, b)
+ }
+}
+
+impl From<(f32, f32, f32)> for Rgb {
+ fn from((r, g, b): (f32, f32, f32)) -> Self {
+ Self::from_f32(r, g, b)
+ }
+}
+
+use crate::ANSIColorCode;
+use crate::TargetGround;
+impl ANSIColorCode for Rgb {
+ fn ansi_color_code(&self, target: TargetGround) -> String {
+ format!("{};2;{};{};{}", target.code() + 8, self.r, self.g, self.b)
+ }
+}
+
+overload::overload!(
+ (lhs: ?Rgb) + (rhs: ?Rgb) -> Rgb {
+ Rgb::new(
+ lhs.r.saturating_add(rhs.r),
+ lhs.g.saturating_add(rhs.g),
+ lhs.b.saturating_add(rhs.b)
+ )
+ }
+);
+
+overload::overload!(
+ (lhs: ?Rgb) - (rhs: ?Rgb) -> Rgb {
+ Rgb::new(
+ lhs.r.saturating_sub(rhs.r),
+ lhs.g.saturating_sub(rhs.g),
+ lhs.b.saturating_sub(rhs.b)
+ )
+ }
+);
+
+overload::overload!(
+ (lhs: ?Rgb) * (rhs: ?f32) -> Rgb {
+ Rgb::new(
+ (lhs.r as f32 * rhs.clamp(0.0, 1.0)) as u8,
+ (lhs.g as f32 * rhs.clamp(0.0, 1.0)) as u8,
+ (lhs.b as f32 * rhs.clamp(0.0, 1.0)) as u8
+ )
+ }
+);
+
+overload::overload!(
+ (lhs: ?f32) * (rhs: ?Rgb) -> Rgb {
+ Rgb::new(
+ (rhs.r as f32 * lhs.clamp(0.0, 1.0)) as u8,
+ (rhs.g as f32 * lhs.clamp(0.0, 1.0)) as u8,
+ (rhs.b as f32 * lhs.clamp(0.0, 1.0)) as u8
+ )
+ }
+);
+
+overload::overload!(
+ -(rgb: ?Rgb) -> Rgb {
+ Rgb::new(
+ 255 - rgb.r,
+ 255 - rgb.g,
+ 255 - rgb.b)
+ }
+);
diff --git a/vendor/nu-ansi-term/src/style.rs b/vendor/nu-ansi-term/src/style.rs
new file mode 100644
index 000000000..3d47a79f7
--- /dev/null
+++ b/vendor/nu-ansi-term/src/style.rs
@@ -0,0 +1,629 @@
+/// A style is a collection of properties that can format a string
+/// using ANSI escape codes.
+///
+/// # Examples
+///
+/// ```
+/// use nu_ansi_term::{Style, Color};
+///
+/// let style = Style::new().bold().on(Color::Black);
+/// println!("{}", style.paint("Bold on black"));
+/// ```
+#[derive(PartialEq, Clone, Copy)]
+#[cfg_attr(
+ feature = "derive_serde_style",
+ derive(serde::Deserialize, serde::Serialize)
+)]
+pub struct Style {
+ /// The style's foreground color, if it has one.
+ pub foreground: Option<Color>,
+
+ /// The style's background color, if it has one.
+ pub background: Option<Color>,
+
+ /// Whether this style is bold.
+ pub is_bold: bool,
+
+ /// Whether this style is dimmed.
+ pub is_dimmed: bool,
+
+ /// Whether this style is italic.
+ pub is_italic: bool,
+
+ /// Whether this style is underlined.
+ pub is_underline: bool,
+
+ /// Whether this style is blinking.
+ pub is_blink: bool,
+
+ /// Whether this style has reverse colors.
+ pub is_reverse: bool,
+
+ /// Whether this style is hidden.
+ pub is_hidden: bool,
+
+ /// Whether this style is struckthrough.
+ pub is_strikethrough: bool,
+}
+
+impl Style {
+ /// Creates a new Style with no properties set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new();
+ /// println!("{}", style.paint("hi"));
+ /// ```
+ pub fn new() -> Style {
+ Style::default()
+ }
+
+ /// Returns a `Style` with the bold property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().bold();
+ /// println!("{}", style.paint("hey"));
+ /// ```
+ pub fn bold(&self) -> Style {
+ Style {
+ is_bold: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the dimmed property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().dimmed();
+ /// println!("{}", style.paint("sup"));
+ /// ```
+ pub fn dimmed(&self) -> Style {
+ Style {
+ is_dimmed: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the italic property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().italic();
+ /// println!("{}", style.paint("greetings"));
+ /// ```
+ pub fn italic(&self) -> Style {
+ Style {
+ is_italic: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the underline property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().underline();
+ /// println!("{}", style.paint("salutations"));
+ /// ```
+ pub fn underline(&self) -> Style {
+ Style {
+ is_underline: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the blink property set.
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().blink();
+ /// println!("{}", style.paint("wazzup"));
+ /// ```
+ pub fn blink(&self) -> Style {
+ Style {
+ is_blink: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the reverse property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().reverse();
+ /// println!("{}", style.paint("aloha"));
+ /// ```
+ pub fn reverse(&self) -> Style {
+ Style {
+ is_reverse: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the hidden property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().hidden();
+ /// println!("{}", style.paint("ahoy"));
+ /// ```
+ pub fn hidden(&self) -> Style {
+ Style {
+ is_hidden: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the strikethrough property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// let style = Style::new().strikethrough();
+ /// println!("{}", style.paint("yo"));
+ /// ```
+ pub fn strikethrough(&self) -> Style {
+ Style {
+ is_strikethrough: true,
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the foreground color property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::{Style, Color};
+ ///
+ /// let style = Style::new().fg(Color::Yellow);
+ /// println!("{}", style.paint("hi"));
+ /// ```
+ pub fn fg(&self, foreground: Color) -> Style {
+ Style {
+ foreground: Some(foreground),
+ ..*self
+ }
+ }
+
+ /// Returns a `Style` with the background color property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::{Style, Color};
+ ///
+ /// let style = Style::new().on(Color::Blue);
+ /// println!("{}", style.paint("eyyyy"));
+ /// ```
+ pub fn on(&self, background: Color) -> Style {
+ Style {
+ background: Some(background),
+ ..*self
+ }
+ }
+
+ /// Return true if this `Style` has no actual styles, and can be written
+ /// without any control characters.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ ///
+ /// assert_eq!(true, Style::default().is_plain());
+ /// assert_eq!(false, Style::default().bold().is_plain());
+ /// ```
+ pub fn is_plain(self) -> bool {
+ self == Style::default()
+ }
+}
+
+impl Default for Style {
+ /// Returns a style with *no* properties set. Formatting text using this
+ /// style returns the exact same text.
+ ///
+ /// ```
+ /// use nu_ansi_term::Style;
+ /// assert_eq!(None, Style::default().foreground);
+ /// assert_eq!(None, Style::default().background);
+ /// assert_eq!(false, Style::default().is_bold);
+ /// assert_eq!("txt", Style::default().paint("txt").to_string());
+ /// ```
+ fn default() -> Style {
+ Style {
+ foreground: None,
+ background: None,
+ is_bold: false,
+ is_dimmed: false,
+ is_italic: false,
+ is_underline: false,
+ is_blink: false,
+ is_reverse: false,
+ is_hidden: false,
+ is_strikethrough: false,
+ }
+ }
+}
+
+// ---- colors ----
+
+/// A color is one specific type of ANSI escape code, and can refer
+/// to either the foreground or background color.
+///
+/// These use the standard numeric sequences.
+/// See <http://invisible-island.net/xterm/ctlseqs/ctlseqs.html>
+#[derive(PartialEq, Clone, Copy, Debug)]
+#[cfg_attr(
+ feature = "derive_serde_style",
+ derive(serde::Deserialize, serde::Serialize)
+)]
+pub enum Color {
+ /// Color #0 (foreground code `30`, background code `40`).
+ ///
+ /// This is not necessarily the background color, and using it as one may
+ /// render the text hard to read on terminals with dark backgrounds.
+ Black,
+
+ /// Color #0 (foreground code `90`, background code `100`).
+ DarkGray,
+
+ /// Color #1 (foreground code `31`, background code `41`).
+ Red,
+
+ /// Color #1 (foreground code `91`, background code `101`).
+ LightRed,
+
+ /// Color #2 (foreground code `32`, background code `42`).
+ Green,
+
+ /// Color #2 (foreground code `92`, background code `102`).
+ LightGreen,
+
+ /// Color #3 (foreground code `33`, background code `43`).
+ Yellow,
+
+ /// Color #3 (foreground code `93`, background code `103`).
+ LightYellow,
+
+ /// Color #4 (foreground code `34`, background code `44`).
+ Blue,
+
+ /// Color #4 (foreground code `94`, background code `104`).
+ LightBlue,
+
+ /// Color #5 (foreground code `35`, background code `45`).
+ Purple,
+
+ /// Color #5 (foreground code `95`, background code `105`).
+ LightPurple,
+
+ /// Color #5 (foreground code `35`, background code `45`).
+ Magenta,
+
+ /// Color #5 (foreground code `95`, background code `105`).
+ LightMagenta,
+
+ /// Color #6 (foreground code `36`, background code `46`).
+ Cyan,
+
+ /// Color #6 (foreground code `96`, background code `106`).
+ LightCyan,
+
+ /// Color #7 (foreground code `37`, background code `47`).
+ ///
+ /// As above, this is not necessarily the foreground color, and may be
+ /// hard to read on terminals with light backgrounds.
+ White,
+
+ /// Color #7 (foreground code `97`, background code `107`).
+ LightGray,
+
+ /// A color number from 0 to 255, for use in 256-color terminal
+ /// environments.
+ ///
+ /// - colors 0 to 7 are the `Black` to `White` variants respectively.
+ /// These colors can usually be changed in the terminal emulator.
+ /// - colors 8 to 15 are brighter versions of the eight colors above.
+ /// These can also usually be changed in the terminal emulator, or it
+ /// could be configured to use the original colors and show the text in
+ /// bold instead. It varies depending on the program.
+ /// - colors 16 to 231 contain several palettes of bright colors,
+ /// arranged in six squares measuring six by six each.
+ /// - colors 232 to 255 are shades of grey from black to white.
+ ///
+ /// It might make more sense to look at a [color chart][cc].
+ ///
+ /// [cc]: https://upload.wikimedia.org/wikipedia/commons/1/15/Xterm_256color_chart.svg
+ Fixed(u8),
+
+ /// A 24-bit Rgb color, as specified by ISO-8613-3.
+ Rgb(u8, u8, u8),
+
+ /// The default color (foreground code `39`, background codr `49`).
+ Default,
+}
+
+impl Default for Color {
+ fn default() -> Self {
+ Color::White
+ }
+}
+
+impl Color {
+ /// Returns a `Style` with the foreground color set to this color.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Red.normal();
+ /// println!("{}", style.paint("hi"));
+ /// ```
+ pub fn normal(self) -> Style {
+ Style {
+ foreground: Some(self),
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// bold property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Green.bold();
+ /// println!("{}", style.paint("hey"));
+ /// ```
+ pub fn bold(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_bold: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// dimmed property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Yellow.dimmed();
+ /// println!("{}", style.paint("sup"));
+ /// ```
+ pub fn dimmed(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_dimmed: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// italic property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Blue.italic();
+ /// println!("{}", style.paint("greetings"));
+ /// ```
+ pub fn italic(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_italic: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// underline property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Purple.underline();
+ /// println!("{}", style.paint("salutations"));
+ /// ```
+ pub fn underline(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_underline: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// blink property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Cyan.blink();
+ /// println!("{}", style.paint("wazzup"));
+ /// ```
+ pub fn blink(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_blink: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// reverse property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Black.reverse();
+ /// println!("{}", style.paint("aloha"));
+ /// ```
+ pub fn reverse(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_reverse: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// hidden property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::White.hidden();
+ /// println!("{}", style.paint("ahoy"));
+ /// ```
+ pub fn hidden(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_hidden: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// strikethrough property set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Fixed(244).strikethrough();
+ /// println!("{}", style.paint("yo"));
+ /// ```
+ pub fn strikethrough(self) -> Style {
+ Style {
+ foreground: Some(self),
+ is_strikethrough: true,
+ ..Style::default()
+ }
+ }
+
+ /// Returns a `Style` with the foreground color set to this color and the
+ /// background color property set to the given color.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use nu_ansi_term::Color;
+ ///
+ /// let style = Color::Rgb(31, 31, 31).on(Color::White);
+ /// println!("{}", style.paint("eyyyy"));
+ /// ```
+ pub fn on(self, background: Color) -> Style {
+ Style {
+ foreground: Some(self),
+ background: Some(background),
+ ..Style::default()
+ }
+ }
+}
+
+impl From<Color> for Style {
+ /// You can turn a `Color` into a `Style` with the foreground color set
+ /// with the `From` trait.
+ ///
+ /// ```
+ /// use nu_ansi_term::{Style, Color};
+ /// let green_foreground = Style::default().fg(Color::Green);
+ /// assert_eq!(green_foreground, Color::Green.normal());
+ /// assert_eq!(green_foreground, Color::Green.into());
+ /// assert_eq!(green_foreground, Style::from(Color::Green));
+ /// ```
+ fn from(color: Color) -> Style {
+ color.normal()
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "derive_serde_style")]
+mod serde_json_tests {
+ use super::{Color, Style};
+
+ #[test]
+ fn color_serialization() {
+ let colors = &[
+ Color::Red,
+ Color::Blue,
+ Color::Rgb(123, 123, 123),
+ Color::Fixed(255),
+ ];
+
+ assert_eq!(
+ serde_json::to_string(&colors).unwrap(),
+ String::from("[\"Red\",\"Blue\",{\"Rgb\":[123,123,123]},{\"Fixed\":255}]")
+ );
+ }
+
+ #[test]
+ fn color_deserialization() {
+ let colors = [
+ Color::Red,
+ Color::Blue,
+ Color::Rgb(123, 123, 123),
+ Color::Fixed(255),
+ ];
+
+ for color in colors {
+ let serialized = serde_json::to_string(&color).unwrap();
+ let deserialized: Color = serde_json::from_str(&serialized).unwrap();
+
+ assert_eq!(color, deserialized);
+ }
+ }
+
+ #[test]
+ fn style_serialization() {
+ let style = Style::default();
+
+ assert_eq!(serde_json::to_string(&style).unwrap(), "{\"foreground\":null,\"background\":null,\"is_bold\":false,\"is_dimmed\":false,\"is_italic\":false,\"is_underline\":false,\"is_blink\":false,\"is_reverse\":false,\"is_hidden\":false,\"is_strikethrough\":false}".to_string());
+ }
+}
diff --git a/vendor/ansi_term/src/util.rs b/vendor/nu-ansi-term/src/util.rs
index ba0f12a02..a35020137 100644
--- a/vendor/ansi_term/src/util.rs
+++ b/vendor/nu-ansi-term/src/util.rs
@@ -1,27 +1,30 @@
-use display::*;
+use crate::display::{AnsiString, AnsiStrings};
use std::ops::Deref;
-/// Return a substring of the given ANSIStrings sequence, while keeping the formatting.
-pub fn sub_string<'a>(start: usize, len: usize, strs: &ANSIStrings<'a>) -> Vec<ANSIString<'static>> {
+/// Return a substring of the given AnsiStrings sequence, while keeping the formatting.
+pub fn sub_string<'a>(
+ start: usize,
+ len: usize,
+ strs: &AnsiStrings<'a>,
+) -> Vec<AnsiString<'static>> {
let mut vec = Vec::new();
let mut pos = start;
let mut len_rem = len;
for i in strs.0.iter() {
- let fragment = i.deref();
- let frag_len = fragment.len();
+ let frag_len = i.string.len();
if pos >= frag_len {
pos -= frag_len;
continue;
}
- if len_rem <= 0 {
+ if len_rem == 0 {
break;
}
let end = pos + len_rem;
let pos_end = if end >= frag_len { frag_len } else { end };
- vec.push(i.style_ref().paint(String::from(&fragment[pos..pos_end])));
+ vec.push(i.style_ref().paint(String::from(&i.string[pos..pos_end])));
if end <= frag_len {
break;
@@ -35,30 +38,29 @@ pub fn sub_string<'a>(start: usize, len: usize, strs: &ANSIStrings<'a>) -> Vec<A
}
/// Return a concatenated copy of `strs` without the formatting, as an allocated `String`.
-pub fn unstyle(strs: &ANSIStrings) -> String {
+pub fn unstyle(strs: &AnsiStrings) -> String {
let mut s = String::new();
for i in strs.0.iter() {
- s += &i.deref();
+ s += i.string.deref();
}
s
}
-/// Return the unstyled length of ANSIStrings. This is equaivalent to `unstyle(strs).len()`.
-pub fn unstyled_len(strs: &ANSIStrings) -> usize {
+/// Return the unstyled length of AnsiStrings. This is equaivalent to `unstyle(strs).len()`.
+pub fn unstyled_len(strs: &AnsiStrings) -> usize {
let mut l = 0;
for i in strs.0.iter() {
- l += i.deref().len();
+ l += i.string.len();
}
l
}
#[cfg(test)]
mod test {
- use Colour::*;
- use display::*;
use super::*;
+ use crate::Color::*;
#[test]
fn test() {
@@ -67,15 +69,11 @@ mod test {
Red.paint("-second"),
White.paint("-third"),
];
- let a = ANSIStrings(&l);
+ let a = AnsiStrings(&l);
assert_eq!(unstyle(&a), "first-second-third");
assert_eq!(unstyled_len(&a), 18);
- let l2 = [
- Black.paint("st"),
- Red.paint("-second"),
- White.paint("-t"),
- ];
- assert_eq!(sub_string(3, 11, &a).as_slice(), &l2);
+ let l2 = [Black.paint("st"), Red.paint("-second"), White.paint("-t")];
+ assert_eq!(sub_string(3, 11, &a), l2);
}
}
diff --git a/vendor/ansi_term/src/windows.rs b/vendor/nu-ansi-term/src/windows.rs
index fcf02ecf6..828e35573 100644
--- a/vendor/ansi_term/src/windows.rs
+++ b/vendor/nu-ansi-term/src/windows.rs
@@ -25,7 +25,8 @@ pub fn enable_ansi_support() -> Result<(), u32> {
unsafe {
// ref: https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-createfilew
// Using `CreateFileW("CONOUT$", ...)` to retrieve the console handle works correctly even if STDOUT and/or STDERR are redirected
- let console_out_name: Vec<u16> = OsStr::new("CONOUT$").encode_wide().chain(once(0)).collect();
+ let console_out_name: Vec<u16> =
+ OsStr::new("CONOUT$").encode_wide().chain(once(0)).collect();
let console_handle = CreateFileW(
console_out_name.as_ptr(),
GENERIC_READ | GENERIC_WRITE,
@@ -35,27 +36,27 @@ pub fn enable_ansi_support() -> Result<(), u32> {
0,
null_mut(),
);
- if console_handle == INVALID_HANDLE_VALUE
- {
+ if console_handle == INVALID_HANDLE_VALUE {
return Err(GetLastError());
}
// ref: https://docs.microsoft.com/en-us/windows/console/getconsolemode
let mut console_mode: u32 = 0;
- if 0 == GetConsoleMode(console_handle, &mut console_mode)
- {
+ if 0 == GetConsoleMode(console_handle, &mut console_mode) {
return Err(GetLastError());
}
// VT processing not already enabled?
if console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0 {
// https://docs.microsoft.com/en-us/windows/console/setconsolemode
- if 0 == SetConsoleMode(console_handle, console_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
- {
+ if 0 == SetConsoleMode(
+ console_handle,
+ console_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+ ) {
return Err(GetLastError());
}
}
}
- return Ok(());
+ Ok(())
}
diff --git a/vendor/ansi_term/src/write.rs b/vendor/nu-ansi-term/src/write.rs
index 65a64feb2..552771918 100644
--- a/vendor/ansi_term/src/write.rs
+++ b/vendor/nu-ansi-term/src/write.rs
@@ -1,40 +1,37 @@
use std::fmt;
use std::io;
-
pub trait AnyWrite {
- type wstr: ?Sized;
+ type Wstr: ?Sized;
type Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>;
- fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>;
+ fn write_str(&mut self, s: &Self::Wstr) -> Result<(), Self::Error>;
}
-
-impl<'a> AnyWrite for fmt::Write + 'a {
- type wstr = str;
+impl<'a> AnyWrite for dyn fmt::Write + 'a {
+ type Wstr = str;
type Error = fmt::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
fmt::Write::write_fmt(self, fmt)
}
- fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
+ fn write_str(&mut self, s: &Self::Wstr) -> Result<(), Self::Error> {
fmt::Write::write_str(self, s)
}
}
-
-impl<'a> AnyWrite for io::Write + 'a {
- type wstr = [u8];
+impl<'a> AnyWrite for dyn io::Write + 'a {
+ type Wstr = [u8];
type Error = io::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
io::Write::write_fmt(self, fmt)
}
- fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
+ fn write_str(&mut self, s: &Self::Wstr) -> Result<(), Self::Error> {
io::Write::write_all(self, s)
}
}
diff --git a/vendor/num_cpus/.cargo-checksum.json b/vendor/num_cpus/.cargo-checksum.json
index e101b17c3..496119d1d 100644
--- a/vendor/num_cpus/.cargo-checksum.json
+++ b/vendor/num_cpus/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"1b01434272c7e464f187eedc95522d9f5668f7f423459572fd33cd8eb5c4c5cf","CONTRIBUTING.md":"2390961aab1bba026135338da1216b6cc828dfaeed9357d9c155c55a252d3efb","Cargo.lock":"dabfb21d5ed9cefaf28e1fcbcaf253de08807ca3be33d0de4b06bb7a377d7b60","Cargo.toml":"885f8ecc8197bec06e151fde7769dc7c267d2f25a4b573240463e8a40a5be298","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0593d22d122d4bfec6407115e3907546312976f75473417aaa4c57ecd2095ae6","README.md":"7760d315d3efd2306affa4c8e4dcdb3a245ae30b3c9cfb9cffed2f1116c86362","ci/cgroups/Dockerfile":"567f00918a6422b363e2c21e57bd47cef9dcc03d0ab109c9605e052f83af7e89","examples/values.rs":"46c833324b7339d359054c4f8e8284259e860df206c552c63b5893ade59c16a6","fixtures/cgroups/cgroups/ceil/cpu.cfs_period_us":"d2ace393dc9388863d75d8de140df516d7ffe4aa7ed2f9a545aa71c9930d6638","fixtures/cgroups/cgroups/ceil/cpu.cfs_quota_us":"7ccd86cde0b22ffc2318f2509726d2a13053f6973e96dc5ca6965a56497e485e","fixtures/cgroups/cgroups/good/cpu.cfs_period_us":"d2ace393dc9388863d75d8de140df516d7ffe4aa7ed2f9a545aa71c9930d6638","fixtures/cgroups/cgroups/good/cpu.cfs_quota_us":"cdc3397c35d915e5fe61f8d2bdedcae00a225d55cc6b090580cde1b71c63463b","fixtures/cgroups/cgroups/zero-period/cpu.cfs_period_us":"74d01a0c051c963d9a9b8ab9dbeab1723f0ad8534ea9fa6a942f358d7fa011b4","fixtures/cgroups/cgroups/zero-period/cpu.cfs_quota_us":"1e6ffd8a95fab538ddd645a767e8cc505722d5c8aaf008969f2ed8ab753ff61e","fixtures/cgroups/proc/cgroups/cgroup":"6812299a4409bfd831ed751fdbbfdd9c5749f69acd7b14c5b0a704271a1f74c6","fixtures/cgroups/proc/cgroups/mountinfo":"3187b0b1c0fa192790abced7d435190e8979059186055688e4c3c2ca013398c6","fixtures/cgroups/proc/cgroups/mountinfo_multi_opt":"d1e397752bc5c4558a3230ad847e89c6885362eab68a53b08dbf219de2e0138a","fixtures/cgroups/proc/cgroups/mountinfo_zero_opt":"91b03b270c76460bc19225aa4b743f893c7d32173e9609ef77e5a04814ab81ff","src/lib.rs":"81fc237964757a53689db848b1df4fed75bbd6a6e7a9063b6bb008161f8c22af","src/linux.rs":"a453e1056c130beaa7e2338222d04b5d3786f6325f5ca314ce9f4130079add2f"},"package":"19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"66c7e8dedaa631c5e8d189b896efb691a97529358a56ad78282fbd4cbae3c638","CONTRIBUTING.md":"2390961aab1bba026135338da1216b6cc828dfaeed9357d9c155c55a252d3efb","Cargo.lock":"5a782f53bfc63c9ef7a901472c8ae3599d3f5ecb6f8bb9ce871235b4d9c8bfc1","Cargo.toml":"7a8fa6033b28607f305e144269902dc62c082cf02f76780fcce866810fcf931b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0593d22d122d4bfec6407115e3907546312976f75473417aaa4c57ecd2095ae6","README.md":"7760d315d3efd2306affa4c8e4dcdb3a245ae30b3c9cfb9cffed2f1116c86362","ci/cgroups/Dockerfile":"567f00918a6422b363e2c21e57bd47cef9dcc03d0ab109c9605e052f83af7e89","examples/values.rs":"46c833324b7339d359054c4f8e8284259e860df206c552c63b5893ade59c16a6","fixtures/cgroups/cgroups/ceil/cpu.cfs_period_us":"d2ace393dc9388863d75d8de140df516d7ffe4aa7ed2f9a545aa71c9930d6638","fixtures/cgroups/cgroups/ceil/cpu.cfs_quota_us":"7ccd86cde0b22ffc2318f2509726d2a13053f6973e96dc5ca6965a56497e485e","fixtures/cgroups/cgroups/good/cpu.cfs_period_us":"d2ace393dc9388863d75d8de140df516d7ffe4aa7ed2f9a545aa71c9930d6638","fixtures/cgroups/cgroups/good/cpu.cfs_quota_us":"cdc3397c35d915e5fe61f8d2bdedcae00a225d55cc6b090580cde1b71c63463b","fixtures/cgroups/cgroups/zero-period/cpu.cfs_period_us":"74d01a0c051c963d9a9b8ab9dbeab1723f0ad8534ea9fa6a942f358d7fa011b4","fixtures/cgroups/cgroups/zero-period/cpu.cfs_quota_us":"1e6ffd8a95fab538ddd645a767e8cc505722d5c8aaf008969f2ed8ab753ff61e","fixtures/cgroups/proc/cgroups/cgroup":"6812299a4409bfd831ed751fdbbfdd9c5749f69acd7b14c5b0a704271a1f74c6","fixtures/cgroups/proc/cgroups/mountinfo":"3187b0b1c0fa192790abced7d435190e8979059186055688e4c3c2ca013398c6","fixtures/cgroups/proc/cgroups/mountinfo_multi_opt":"d1e397752bc5c4558a3230ad847e89c6885362eab68a53b08dbf219de2e0138a","fixtures/cgroups/proc/cgroups/mountinfo_zero_opt":"91b03b270c76460bc19225aa4b743f893c7d32173e9609ef77e5a04814ab81ff","fixtures/cgroups2/cgroups/ceil/cpu.max":"d6eb496d0851963c9e6a9cf33c0a3ef2f08cbbee2a387c093b30ceca23239226","fixtures/cgroups2/cgroups/good/cpu.max":"2f8bd783be33cf80b6a2a0cf810f81ccf85f988c72c9bb76fa6172b6e4ec2b02","fixtures/cgroups2/cgroups/zero-period/cpu.max":"f42f2771a5f669873e0f45400ff64c481618029fe14a5bcb15d51b426646a9aa","fixtures/cgroups2/proc/cgroups/cgroup":"f8327c16331cc79ecbf875bd7bea6cf1831757589c39e16a149bfb3b6f81f7c8","fixtures/cgroups2/proc/cgroups/cgroup_multi":"96e107b552b1f51a8e96199c12e42dc3807e207d5a1be4a901f49db8e23ed869","fixtures/cgroups2/proc/cgroups/mountinfo":"7ac8cda160d4d80de73195fda560b46ed3826f43ef07fb513308549b32b08b56","src/lib.rs":"81fc237964757a53689db848b1df4fed75bbd6a6e7a9063b6bb008161f8c22af","src/linux.rs":"378ea174d00d560e16fda4b09fb523dda5f4c6f420e46af7d317393a39d88dd1"},"package":"0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"} \ No newline at end of file
diff --git a/vendor/num_cpus/CHANGELOG.md b/vendor/num_cpus/CHANGELOG.md
index 5496ace25..5be725c54 100644
--- a/vendor/num_cpus/CHANGELOG.md
+++ b/vendor/num_cpus/CHANGELOG.md
@@ -1,3 +1,16 @@
+## v1.15.0
+
+### Fixes
+
+- update hermit-abi
+
+## v1.14.0
+
+### Features
+
+- add support for cgroups v2
+- Skip reading files in Miri
+
## v1.13.1
### Fixes
diff --git a/vendor/num_cpus/Cargo.lock b/vendor/num_cpus/Cargo.lock
index 93137c3ea..1e955390a 100644
--- a/vendor/num_cpus/Cargo.lock
+++ b/vendor/num_cpus/Cargo.lock
@@ -4,9 +4,9 @@ version = 3
[[package]]
name = "hermit-abi"
-version = "0.1.3"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
@@ -19,7 +19,7 @@ checksum = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8"
[[package]]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
dependencies = [
"hermit-abi",
"libc",
diff --git a/vendor/num_cpus/Cargo.toml b/vendor/num_cpus/Cargo.toml
index f708cc878..6d62e8626 100644
--- a/vendor/num_cpus/Cargo.toml
+++ b/vendor/num_cpus/Cargo.toml
@@ -11,16 +11,22 @@
[package]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
authors = ["Sean McArthur <sean@seanmonstar.com>"]
description = "Get the number of CPUs on a machine."
documentation = "https://docs.rs/num_cpus"
readme = "README.md"
-keywords = ["cpu", "cpus", "cores"]
+keywords = [
+ "cpu",
+ "cpus",
+ "cores",
+]
categories = ["hardware-support"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/seanmonstar/num_cpus"
+
[target."cfg(all(any(target_arch = \"x86_64\", target_arch = \"aarch64\"), target_os = \"hermit\"))".dependencies.hermit-abi]
-version = "0.1.3"
+version = "0.2.6"
+
[target."cfg(not(windows))".dependencies.libc]
version = "0.2.26"
diff --git a/vendor/num_cpus/fixtures/cgroups2/cgroups/ceil/cpu.max b/vendor/num_cpus/fixtures/cgroups2/cgroups/ceil/cpu.max
new file mode 100644
index 000000000..833a8f2d3
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/cgroups/ceil/cpu.max
@@ -0,0 +1 @@
+150000 100000
diff --git a/vendor/num_cpus/fixtures/cgroups2/cgroups/good/cpu.max b/vendor/num_cpus/fixtures/cgroups2/cgroups/good/cpu.max
new file mode 100644
index 000000000..e469067a6
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/cgroups/good/cpu.max
@@ -0,0 +1 @@
+600000 100000
diff --git a/vendor/num_cpus/fixtures/cgroups2/cgroups/zero-period/cpu.max b/vendor/num_cpus/fixtures/cgroups2/cgroups/zero-period/cpu.max
new file mode 100644
index 000000000..24e757f51
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/cgroups/zero-period/cpu.max
@@ -0,0 +1 @@
+600000 0
diff --git a/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup
new file mode 100644
index 000000000..35b49db2c
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup
@@ -0,0 +1,2 @@
+12::/
+3::/user.slice
diff --git a/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup_multi b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup_multi
new file mode 100644
index 000000000..1a9282a6e
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/cgroup_multi
@@ -0,0 +1,3 @@
+12::/
+11:cpu,cpuacct:/
+3::/user.slice
diff --git a/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/mountinfo b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/mountinfo
new file mode 100644
index 000000000..da36e4102
--- /dev/null
+++ b/vendor/num_cpus/fixtures/cgroups2/proc/cgroups/mountinfo
@@ -0,0 +1,5 @@
+1 0 8:1 / / rw,noatime shared:1 - ext4 /dev/sda1 rw,errors=remount-ro,data=reordered
+2 1 0:1 / /dev rw,relatime shared:2 - devtmpfs udev rw,size=10240k,nr_inodes=16487629,mode=755
+3 1 0:2 / /proc rw,nosuid,nodev,noexec,relatime shared:3 - proc proc rw
+4 1 0:3 / /sys rw,nosuid,nodev,noexec,relatime shared:4 - sysfs sysfs rw
+5 4 0:4 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime shared:5 - cgroup2 cgroup2 rw,nsdelegate,memory_recursiveprot
diff --git a/vendor/num_cpus/src/linux.rs b/vendor/num_cpus/src/linux.rs
index 36f472717..295c925fb 100644
--- a/vendor/num_cpus/src/linux.rs
+++ b/vendor/num_cpus/src/linux.rs
@@ -126,6 +126,11 @@ fn init_cgroups() {
// Should only be called once
debug_assert!(CGROUPS_CPUS.load(Ordering::SeqCst) == 0);
+ // Fails in Miri by default (cannot open files), and Miri does not have parallelism anyway.
+ if cfg!(miri) {
+ return;
+ }
+
if let Some(quota) = load_cgroups("/proc/self/cgroup", "/proc/self/mountinfo") {
if quota == 0 {
return;
@@ -144,27 +149,36 @@ where
P2: AsRef<Path>,
{
let subsys = some!(Subsys::load_cpu(cgroup_proc));
- let mntinfo = some!(MountInfo::load_cpu(mountinfo_proc));
+ let mntinfo = some!(MountInfo::load_cpu(mountinfo_proc, subsys.version));
let cgroup = some!(Cgroup::translate(mntinfo, subsys));
cgroup.cpu_quota()
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum CgroupVersion {
+ V1,
+ V2,
+}
+
struct Cgroup {
+ version: CgroupVersion,
base: PathBuf,
}
struct MountInfo {
+ version: CgroupVersion,
root: String,
mount_point: String,
}
struct Subsys {
+ version: CgroupVersion,
base: String,
}
impl Cgroup {
- fn new(dir: PathBuf) -> Cgroup {
- Cgroup { base: dir }
+ fn new(version: CgroupVersion, dir: PathBuf) -> Cgroup {
+ Cgroup { version: version, base: dir }
}
fn translate(mntinfo: MountInfo, subsys: Subsys) -> Option<Cgroup> {
@@ -181,12 +195,14 @@ impl Cgroup {
// join(mp.MountPoint, relPath)
let mut path = PathBuf::from(mntinfo.mount_point);
path.push(rel_from_root);
- Some(Cgroup::new(path))
+ Some(Cgroup::new(mntinfo.version, path))
}
fn cpu_quota(&self) -> Option<usize> {
- let quota_us = some!(self.quota_us());
- let period_us = some!(self.period_us());
+ let (quota_us, period_us) = match self.version {
+ CgroupVersion::V1 => (some!(self.quota_us()), some!(self.period_us())),
+ CgroupVersion::V2 => some!(self.max()),
+ };
// protect against dividing by zero
if period_us == 0 {
@@ -207,25 +223,41 @@ impl Cgroup {
self.param("cpu.cfs_period_us")
}
+ fn max(&self) -> Option<(usize, usize)> {
+ let max = some!(self.raw_param("cpu.max"));
+ let mut max = some!(max.lines().next()).split(' ');
+
+ let quota = some!(max.next().and_then(|quota| quota.parse().ok()));
+ let period = some!(max.next().and_then(|period| period.parse().ok()));
+
+ Some((quota, period))
+ }
+
fn param(&self, param: &str) -> Option<usize> {
+ let buf = some!(self.raw_param(param));
+
+ buf.trim().parse().ok()
+ }
+
+ fn raw_param(&self, param: &str) -> Option<String> {
let mut file = some!(File::open(self.base.join(param)).ok());
let mut buf = String::new();
some!(file.read_to_string(&mut buf).ok());
- buf.trim().parse().ok()
+ Some(buf)
}
}
impl MountInfo {
- fn load_cpu<P: AsRef<Path>>(proc_path: P) -> Option<MountInfo> {
+ fn load_cpu<P: AsRef<Path>>(proc_path: P, version: CgroupVersion) -> Option<MountInfo> {
let file = some!(File::open(proc_path).ok());
let file = BufReader::new(file);
file.lines()
.filter_map(|result| result.ok())
.filter_map(MountInfo::parse_line)
- .next()
+ .find(|mount_info| mount_info.version == version)
}
fn parse_line(line: String) -> Option<MountInfo> {
@@ -247,19 +279,25 @@ impl MountInfo {
};
// 7 5 0:6 / /sys/fs/cgroup/cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:7 - <cgroup> cgroup rw,cpu,cpuacct
- if fields.next() != Some("cgroup") {
- return None;
- }
+ let version = match fields.next() {
+ Some("cgroup") => CgroupVersion::V1,
+ Some("cgroup2") => CgroupVersion::V2,
+ _ => return None,
+ };
- // 7 5 0:6 / /sys/fs/cgroup/cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:7 - cgroup cgroup <rw,cpu,cpuacct>
- let super_opts = some!(fields.nth(1));
+ // cgroups2 only has a single mount point
+ if version == CgroupVersion::V1 {
+ // 7 5 0:6 / /sys/fs/cgroup/cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:7 - cgroup cgroup <rw,cpu,cpuacct>
+ let super_opts = some!(fields.nth(1));
- // We only care about the 'cpu' option
- if !super_opts.split(',').any(|opt| opt == "cpu") {
- return None;
+ // We only care about the 'cpu' option
+ if !super_opts.split(',').any(|opt| opt == "cpu") {
+ return None;
+ }
}
Some(MountInfo {
+ version: version,
root: mnt_root.to_owned(),
mount_point: mnt_point.to_owned(),
})
@@ -274,7 +312,14 @@ impl Subsys {
file.lines()
.filter_map(|result| result.ok())
.filter_map(Subsys::parse_line)
- .next()
+ .fold(None, |previous, line| {
+ // already-found v1 trumps v2 since it explicitly specifies its controllers
+ if previous.is_some() && line.version == CgroupVersion::V2 {
+ return previous;
+ }
+
+ Some(line)
+ })
}
fn parse_line(line: String) -> Option<Subsys> {
@@ -284,11 +329,18 @@ impl Subsys {
let sub_systems = some!(fields.nth(1));
- if !sub_systems.split(',').any(|sub| sub == "cpu") {
+ let version = if sub_systems.is_empty() {
+ CgroupVersion::V2
+ } else {
+ CgroupVersion::V1
+ };
+
+ if version == CgroupVersion::V1 && !sub_systems.split(',').any(|sub| sub == "cpu") {
return None;
}
fields.next().map(|path| Subsys {
+ version: version,
base: path.to_owned(),
})
}
@@ -296,123 +348,248 @@ impl Subsys {
#[cfg(test)]
mod tests {
- use super::{Cgroup, MountInfo, Subsys};
- use std::path::{Path, PathBuf};
+ mod v1 {
+ use super::super::{Cgroup, CgroupVersion, MountInfo, Subsys};
+ use std::path::{Path, PathBuf};
- // `static_in_const` feature is not stable in Rust 1.13.
- static FIXTURES_PROC: &'static str = "fixtures/cgroups/proc/cgroups";
+ // `static_in_const` feature is not stable in Rust 1.13.
+ static FIXTURES_PROC: &'static str = "fixtures/cgroups/proc/cgroups";
- static FIXTURES_CGROUPS: &'static str = "fixtures/cgroups/cgroups";
+ static FIXTURES_CGROUPS: &'static str = "fixtures/cgroups/cgroups";
- macro_rules! join {
- ($base:expr, $($path:expr),+) => ({
- Path::new($base)
- $(.join($path))+
- })
- }
+ macro_rules! join {
+ ($base:expr, $($path:expr),+) => ({
+ Path::new($base)
+ $(.join($path))+
+ })
+ }
- #[test]
- fn test_load_mountinfo() {
- // test only one optional fields
- let path = join!(FIXTURES_PROC, "mountinfo");
+ #[test]
+ fn test_load_mountinfo() {
+ // test only one optional fields
+ let path = join!(FIXTURES_PROC, "mountinfo");
- let mnt_info = MountInfo::load_cpu(path).unwrap();
+ let mnt_info = MountInfo::load_cpu(path, CgroupVersion::V1).unwrap();
- assert_eq!(mnt_info.root, "/");
- assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
+ assert_eq!(mnt_info.root, "/");
+ assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
- // test zero optional field
- let path = join!(FIXTURES_PROC, "mountinfo_zero_opt");
+ // test zero optional field
+ let path = join!(FIXTURES_PROC, "mountinfo_zero_opt");
- let mnt_info = MountInfo::load_cpu(path).unwrap();
+ let mnt_info = MountInfo::load_cpu(path, CgroupVersion::V1).unwrap();
- assert_eq!(mnt_info.root, "/");
- assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
+ assert_eq!(mnt_info.root, "/");
+ assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
- // test multi optional fields
- let path = join!(FIXTURES_PROC, "mountinfo_multi_opt");
+ // test multi optional fields
+ let path = join!(FIXTURES_PROC, "mountinfo_multi_opt");
- let mnt_info = MountInfo::load_cpu(path).unwrap();
+ let mnt_info = MountInfo::load_cpu(path, CgroupVersion::V1).unwrap();
- assert_eq!(mnt_info.root, "/");
- assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
- }
+ assert_eq!(mnt_info.root, "/");
+ assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup/cpu,cpuacct");
+ }
- #[test]
- fn test_load_subsys() {
- let path = join!(FIXTURES_PROC, "cgroup");
+ #[test]
+ fn test_load_subsys() {
+ let path = join!(FIXTURES_PROC, "cgroup");
- let subsys = Subsys::load_cpu(path).unwrap();
+ let subsys = Subsys::load_cpu(path).unwrap();
- assert_eq!(subsys.base, "/");
- }
+ assert_eq!(subsys.base, "/");
+ assert_eq!(subsys.version, CgroupVersion::V1);
+ }
- #[test]
- fn test_cgroup_mount() {
- let cases = &[
- ("/", "/sys/fs/cgroup/cpu", "/", Some("/sys/fs/cgroup/cpu")),
- (
- "/docker/01abcd",
- "/sys/fs/cgroup/cpu",
- "/docker/01abcd",
- Some("/sys/fs/cgroup/cpu"),
- ),
- (
- "/docker/01abcd",
- "/sys/fs/cgroup/cpu",
- "/docker/01abcd/",
- Some("/sys/fs/cgroup/cpu"),
- ),
- (
- "/docker/01abcd",
- "/sys/fs/cgroup/cpu",
- "/docker/01abcd/large",
- Some("/sys/fs/cgroup/cpu/large"),
- ),
- // fails
- ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/", None),
- ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/docker", None),
- ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/elsewhere", None),
- (
- "/docker/01abcd",
- "/sys/fs/cgroup/cpu",
- "/docker/01abcd-other-dir",
- None,
- ),
- ];
-
- for &(root, mount_point, subsys, expected) in cases.iter() {
- let mnt_info = MountInfo {
- root: root.into(),
- mount_point: mount_point.into(),
- };
- let subsys = Subsys {
- base: subsys.into(),
- };
+ #[test]
+ fn test_cgroup_mount() {
+ let cases = &[
+ ("/", "/sys/fs/cgroup/cpu", "/", Some("/sys/fs/cgroup/cpu")),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd",
+ Some("/sys/fs/cgroup/cpu"),
+ ),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd/",
+ Some("/sys/fs/cgroup/cpu"),
+ ),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd/large",
+ Some("/sys/fs/cgroup/cpu/large"),
+ ),
+ // fails
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/", None),
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/docker", None),
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/elsewhere", None),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd-other-dir",
+ None,
+ ),
+ ];
+
+ for &(root, mount_point, subsys, expected) in cases.iter() {
+ let mnt_info = MountInfo {
+ version: CgroupVersion::V1,
+ root: root.into(),
+ mount_point: mount_point.into(),
+ };
+ let subsys = Subsys {
+ version: CgroupVersion::V1,
+ base: subsys.into(),
+ };
+
+ let actual = Cgroup::translate(mnt_info, subsys).map(|c| c.base);
+ let expected = expected.map(PathBuf::from);
+ assert_eq!(actual, expected);
+ }
+ }
- let actual = Cgroup::translate(mnt_info, subsys).map(|c| c.base);
- let expected = expected.map(PathBuf::from);
- assert_eq!(actual, expected);
+ #[test]
+ fn test_cgroup_cpu_quota() {
+ let cgroup = Cgroup::new(CgroupVersion::V1, join!(FIXTURES_CGROUPS, "good"));
+ assert_eq!(cgroup.cpu_quota(), Some(6));
}
- }
- #[test]
- fn test_cgroup_cpu_quota() {
- let cgroup = Cgroup::new(join!(FIXTURES_CGROUPS, "good"));
- assert_eq!(cgroup.cpu_quota(), Some(6));
- }
+ #[test]
+ fn test_cgroup_cpu_quota_divide_by_zero() {
+ let cgroup = Cgroup::new(CgroupVersion::V1, join!(FIXTURES_CGROUPS, "zero-period"));
+ assert!(cgroup.quota_us().is_some());
+ assert_eq!(cgroup.period_us(), Some(0));
+ assert_eq!(cgroup.cpu_quota(), None);
+ }
- #[test]
- fn test_cgroup_cpu_quota_divide_by_zero() {
- let cgroup = Cgroup::new(join!(FIXTURES_CGROUPS, "zero-period"));
- assert!(cgroup.quota_us().is_some());
- assert_eq!(cgroup.period_us(), Some(0));
- assert_eq!(cgroup.cpu_quota(), None);
+ #[test]
+ fn test_cgroup_cpu_quota_ceil() {
+ let cgroup = Cgroup::new(CgroupVersion::V1, join!(FIXTURES_CGROUPS, "ceil"));
+ assert_eq!(cgroup.cpu_quota(), Some(2));
+ }
}
- #[test]
- fn test_cgroup_cpu_quota_ceil() {
- let cgroup = Cgroup::new(join!(FIXTURES_CGROUPS, "ceil"));
- assert_eq!(cgroup.cpu_quota(), Some(2));
+ mod v2 {
+ use super::super::{Cgroup, CgroupVersion, MountInfo, Subsys};
+ use std::path::{Path, PathBuf};
+
+ // `static_in_const` feature is not stable in Rust 1.13.
+ static FIXTURES_PROC: &'static str = "fixtures/cgroups2/proc/cgroups";
+
+ static FIXTURES_CGROUPS: &'static str = "fixtures/cgroups2/cgroups";
+
+ macro_rules! join {
+ ($base:expr, $($path:expr),+) => ({
+ Path::new($base)
+ $(.join($path))+
+ })
+ }
+
+ #[test]
+ fn test_load_mountinfo() {
+ // test only one optional fields
+ let path = join!(FIXTURES_PROC, "mountinfo");
+
+ let mnt_info = MountInfo::load_cpu(path, CgroupVersion::V2).unwrap();
+
+ assert_eq!(mnt_info.root, "/");
+ assert_eq!(mnt_info.mount_point, "/sys/fs/cgroup");
+ }
+
+ #[test]
+ fn test_load_subsys() {
+ let path = join!(FIXTURES_PROC, "cgroup");
+
+ let subsys = Subsys::load_cpu(path).unwrap();
+
+ assert_eq!(subsys.base, "/");
+ assert_eq!(subsys.version, CgroupVersion::V2);
+ }
+
+ #[test]
+ fn test_load_subsys_multi() {
+ let path = join!(FIXTURES_PROC, "cgroup_multi");
+
+ let subsys = Subsys::load_cpu(path).unwrap();
+
+ assert_eq!(subsys.base, "/");
+ assert_eq!(subsys.version, CgroupVersion::V1);
+ }
+
+ #[test]
+ fn test_cgroup_mount() {
+ let cases = &[
+ ("/", "/sys/fs/cgroup/cpu", "/", Some("/sys/fs/cgroup/cpu")),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd",
+ Some("/sys/fs/cgroup/cpu"),
+ ),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd/",
+ Some("/sys/fs/cgroup/cpu"),
+ ),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd/large",
+ Some("/sys/fs/cgroup/cpu/large"),
+ ),
+ // fails
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/", None),
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/docker", None),
+ ("/docker/01abcd", "/sys/fs/cgroup/cpu", "/elsewhere", None),
+ (
+ "/docker/01abcd",
+ "/sys/fs/cgroup/cpu",
+ "/docker/01abcd-other-dir",
+ None,
+ ),
+ ];
+
+ for &(root, mount_point, subsys, expected) in cases.iter() {
+ let mnt_info = MountInfo {
+ version: CgroupVersion::V1,
+ root: root.into(),
+ mount_point: mount_point.into(),
+ };
+ let subsys = Subsys {
+ version: CgroupVersion::V1,
+ base: subsys.into(),
+ };
+
+ let actual = Cgroup::translate(mnt_info, subsys).map(|c| c.base);
+ let expected = expected.map(PathBuf::from);
+ assert_eq!(actual, expected);
+ }
+ }
+
+ #[test]
+ fn test_cgroup_cpu_quota() {
+ let cgroup = Cgroup::new(CgroupVersion::V2, join!(FIXTURES_CGROUPS, "good"));
+ assert_eq!(cgroup.cpu_quota(), Some(6));
+ }
+
+ #[test]
+ fn test_cgroup_cpu_quota_divide_by_zero() {
+ let cgroup = Cgroup::new(CgroupVersion::V2, join!(FIXTURES_CGROUPS, "zero-period"));
+ let period = cgroup.max().map(|max| max.1);
+
+ assert_eq!(period, Some(0));
+ assert_eq!(cgroup.cpu_quota(), None);
+ }
+
+ #[test]
+ fn test_cgroup_cpu_quota_ceil() {
+ let cgroup = Cgroup::new(CgroupVersion::V2, join!(FIXTURES_CGROUPS, "ceil"));
+ assert_eq!(cgroup.cpu_quota(), Some(2));
+ }
}
}
diff --git a/vendor/object/.cargo-checksum.json b/vendor/object/.cargo-checksum.json
index 7453e6945..4b0e67365 100644
--- a/vendor/object/.cargo-checksum.json
+++ b/vendor/object/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"e14387eeab6f5becbfb1fdaadd5056f5316a1b386620afeff1bc0553d3419ae6","Cargo.toml":"b21db34ed7541075cd178edf290ebe328217f94dd0347d425d900d3a1fb5f16a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0b74dfa0bcee5c420c6b7f67b4b2658f9ab8388c97b8e733975f2cecbdd668a6","README.md":"a91c65ccbcb9e5bb6344a537a28d43e6f8ff43f7a730493521371d69c7a07045","clippy.toml":"50fd0cdaae995561b1d688c9410fe99335fc7ac3916a400dafd25ff63f8215f7","src/archive.rs":"d6cead723242c26db2967b63385b79ed2008980a8c64b123a5eecffd7ed388fc","src/common.rs":"721281f967576b136bb66d368babc0e0497f61ca201208915f73f22ba9c24852","src/elf.rs":"50116decb5f2a05c5c2b12e287a0d41d6391c34549fd1188ee2b86bbb04c12bf","src/endian.rs":"ceaad1b651627ad9e6218a168f87353ae651f5d76c9f61bc8099dff25007405e","src/lib.rs":"d12d8b0b9ecb80ce0624c818acad1ce5d0a51b8e12960d913c1af31cec71ef50","src/macho.rs":"50f7afc1bba3c59542f55b7b5c7357fb71fef52235f1b568f08f3efb6780aaf5","src/pe.rs":"0e9f47653eb6255a04948a0494d453fd6d416bf7c4a0c43f0cfc4a2b86cc1ac8","src/pod.rs":"d2967732f0052e6cfa18a2dd62c57bc3b640a20eb9a6db9f39836000ceabb399","src/read/any.rs":"1057d642dd06b8d20c953ac1ef4e2c99ace06632283e9497925c48c28d58ea3d","src/read/archive.rs":"479574cff125a74fc5512d75c1531da3bb006005fe544ffd2531a7d4f35a9bb4","src/read/coff/comdat.rs":"36846a11b285ad560dc1f18c67b1659f19fe8b5f11a350fe8b7bc5d27d0afb65","src/read/coff/file.rs":"874b4b357dbcb1a6a29c993e908b4044c9f90b0acd402cc8504ab84c3a036e1d","src/read/coff/mod.rs":"5eed1c0ca7cf044b3173223b06afacc4961a0515ef2478fffa35641f4ee364ee","src/read/coff/relocation.rs":"3b8e1405921eb16b8d38da4639a81be0546dca51c7747c126729d7a15da93a17","src/read/coff/section.rs":"dcb5b697a9371b1de6584603266673badfcd5d7f82b5472ead37555d69449e19","src/read/coff/symbol.rs":"52872aa7f306dc28a21d039895dedf8f06e6ad4ee54ebde7aed9c759616e38d5","src/read/elf/comdat.rs":"d39155e00c10e3f76d6776e2604d2ecd5039929979c179131101887d54a0e494","src/read/elf/compression.rs":"097ff8bdc78d01a1532b11c1c0cae3b35905128c7d98b471de188d46da3ff970","src/read/elf/dynamic.rs":"8f59bd6d352f6810be6b6dc02c2f88229f15aa02a42f8fc09bcf3f284d4b1021","src/read/elf/file.rs":"add48c004fb2e93718a4a0cdd4d12f52a563854e46904e1f3d02c19cda7fb52a","src/read/elf/hash.rs":"82123642ba71d2e56acae5bdbb59253b16ced8f6defdc58f4c37cafb7154a7b0","src/read/elf/mod.rs":"98b5bd46778d3c33e9d19dee2d294f61677dec16b28f22aa73340008c241fe09","src/read/elf/note.rs":"33bf89a85bb7927fd0123cd6c2919f0b9b0f8935db1ae7b446b75ea2717a476d","src/read/elf/relocation.rs":"f710c7a6f8e743f8f4172141e03294399ea4d59f4726650168e3c66544fe2f5f","src/read/elf/section.rs":"2c535c3ccd4d333a4abdfe9d14eeee0afb7b11c536387ba2c150e4ead7fb51bd","src/read/elf/segment.rs":"cdc9740d996bb262d99db3e9a50e3fda2a5802cbd9864ebeae36e5f9f96d58bf","src/read/elf/symbol.rs":"34e0fc849ffe3ae9af6a43954eb817729186313ac5edcbb0eed030b6d451982c","src/read/elf/version.rs":"154510d6868730f2d2fae2e0d9bdb697907ed48ded51c55ba115edd98d819e82","src/read/macho/dyld_cache.rs":"1526f518dfdcc9e024e6c0284ff75018e33413c14ea2ee00c8a99c31ccce6413","src/read/macho/fat.rs":"d27a1052f2e47cd5b798a6359f33c3bfe7f7971b13259f6545118213ace7f5dd","src/read/macho/file.rs":"21fa6d56cc241881ee241d004c1bf4ac0a4b40e3fa0bac2d0da562c7c429e01d","src/read/macho/load_command.rs":"efaf1ab5fe22fddc30af03f9cbd2a076f373cba3aab29329050c27a5a94b581f","src/read/macho/mod.rs":"23b353da3b7e076c68a067776e6a5b346a746116ac42c2c90bafd95ad54a90b3","src/read/macho/relocation.rs":"77fa3e65ccf5884433c96cdc39c457b8a0c430432e5ff9cad22a8becf13f0183","src/read/macho/section.rs":"78e98624691ef9872cc090187d90e09f9a54b47b3b1f174fd4f57e0640f6ceeb","src/read/macho/segment.rs":"cd3727796b672adba03443fadc4f458e117c3f56c2ebb318e32c408329a40492","src/read/macho/symbol.rs":"a2e50bcc1ebb5ea356a6f08b9083533c62ae3b3af90de18e8c942bf06589e85a","src/read/mod.rs":"d4615cecc02fb051314b16b900ff0d2bff561848ce36a5fd1398d66ba0013d9a","src/read/pe/data_directory.rs":"0b358dd5c9df7db570efc9c69896b88e8d13560a51fd7385f18d9e5ad8ac955b","src/read/pe/export.rs":"07ac5ec7b67d4a09037d8f11eb4426d96515687ee299df2a3d8cd4fd93eb2036","src/read/pe/file.rs":"485528fa444b3c8a20884dbb934422d3c7381f9d0351a5d14ca0813b14c51fcc","src/read/pe/import.rs":"ea20dfc0d462ba20e149bf9408f4ec1d0b202abf1f15536f6d091f0c0e756ac8","src/read/pe/mod.rs":"69832b7f4ccd93b59e08bafcbd0d3226c450d7801ad49ab554b38b660c8997fd","src/read/pe/relocation.rs":"0335c06b6d37df4939c8b88044313e88661ee45e5a57d2eec40143f2fe481838","src/read/pe/resource.rs":"21cc2077ff6f20d854b94134af29062166cc561c7bb1e5e47371e7965221f011","src/read/pe/rich.rs":"abf005004e69a4533132358aa54df73d7cc3a744738518e5b11c6f6f861e9f00","src/read/pe/section.rs":"f936dd73dbb1838cf558483949acdfefce701cdc22c21c96db345062c1ce641d","src/read/read_cache.rs":"939b66cfc11fc022c4e4b78fcad63b375516967da2274da78fb200271d934a53","src/read/read_ref.rs":"5fe6f717c79b07fecac9ee8ba15740c7a9581c36f1e356119e99dd669af6c39c","src/read/traits.rs":"b0e4cf654301843fa4db05a6fb1e22c454eb45da6af99b66e631f2b49bab9e21","src/read/util.rs":"c329a240689b177613049e00faf7928e6cd6aad021542cd708cc06b598c6900e","src/read/wasm.rs":"e5bd4cf1282c877f55401b14bb92beb1a8b7f222e4facd2b8fcf6a8599c1ba7e","src/read/xcoff/comdat.rs":"e7a74ce2c5817f29d184d0be1fc1309ff843a648b1539689561d85d1410d0cd5","src/read/xcoff/file.rs":"17f751578d052cb8f74ee56a4e17b053b06e82e4efbe943907943bc561fb301e","src/read/xcoff/mod.rs":"d0179d3f95797464ca5919563454d1123ce8c35dfc5f40ecd6ca0d002a9824a8","src/read/xcoff/relocation.rs":"ff30373e33bf79f3c690933044762460d9d852d1ec80883ee7205e80c424d849","src/read/xcoff/section.rs":"9bccdcbc0aa26b90a4a0b1b125aee52628e599dce74a097d6668df14e95be1be","src/read/xcoff/segment.rs":"7bee1d20185df21b2e00a581053095a6bc071b0ff003e04c4a3ef881fe990f45","src/read/xcoff/symbol.rs":"bcb7a57a107a145fad85d2c5325113d828e30fa8255023a6234d8b17234461c4","src/write/coff.rs":"52b1b402975fc84a5095050ee5be47097e90daf1a5379f9fc081c07b2b9fa432","src/write/elf/mod.rs":"1bb945edad539b4f19dda5d46c9b86fa4ea3721eedda77ca2595b5519c3e30f2","src/write/elf/object.rs":"e72b159e1a03e7f37d87249d7f43cccd8291cc41e41ae711dda61927f4bf65ed","src/write/elf/writer.rs":"a0bf5bb8bcd9d25510ce14f3a070ad9f9bfed3becc70ee600b2c73bc1e0eccd8","src/write/macho.rs":"cfd3a1ee65c800c53fb6c878882e8ae853b68756cbd7624386f48f92b56ff49f","src/write/mod.rs":"4ee5a5f971a4a4b184169c6b8dc50d79eb89937316b4677a1f526fef4a0ed106","src/write/pe.rs":"6c72185705a3e067c481f2b9f81c64a84e062e67781928e58fd1150314dad8f9","src/write/string.rs":"674c5913d0702cbaebe79d2a7e91f6a04327ac30e370557f02928eee1b0bb0d0","src/write/util.rs":"0e96abed0e8aae33c2efd8b836f29647eac310b58fad4029b68894e9f57bf137","src/xcoff.rs":"3580336207bdfe01631f528678b72b6a13b876716f82ac789620011516c67051","tests/integration.rs":"0fa704827e4da1be38dac2e3820d92f6b20c4d415803b04f67c3516020b1de97","tests/parse_self.rs":"81b44b2dd1de9a5d8c18d9bd8926156e39fb83931837afa8ca344da0d309aeee","tests/read/coff.rs":"11bf5a1b5770a4312e334580975a7cac9d69f1b12a4d75f25aacc931df01c5c7","tests/read/mod.rs":"7833826f169ac3be2b4f274e5fc8cf4a51742bd0010803ff0dc20ea5643a7e61","tests/round_trip/bss.rs":"849d69b063fd757fed02219dd81e9d13b82068a2025d2cc5cfd40cf557e31bda","tests/round_trip/coff.rs":"8a25aab7164a5c8aa7a21279f8bae1f4d5f68a8d09c29a4ecd0d0c14564851cc","tests/round_trip/comdat.rs":"a8f729e218fee21e90b9f39b5cfcb4f80bc3ce26d3a297323667e6eb14f882cc","tests/round_trip/common.rs":"ced08ff559ca4d343ceef54bb4c581a3405cd96d6a1628ba43b7aab82070800b","tests/round_trip/elf.rs":"690015fb4d3e79ee6d41c4d3a8e89a6806f1a0c313804707b83e44fceefac472","tests/round_trip/macho.rs":"b23931f506345b26ce3b4908dc2ce02f704603c622d39f5e9e7c8529f2882818","tests/round_trip/mod.rs":"b0942a5e0ffff38c3c12276dfd96ca6b0516b48e86ede979e7da782b9e7530d2","tests/round_trip/section_flags.rs":"0e17639e5f86d576f039a294c274ce8db2e2a8add31a2fffc33a6e93a6d2791e","tests/round_trip/tls.rs":"23a49a1036b9173ece82a3080745930e5925e745280ab38866c9d3c29f463e63"},"package":"8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"598939c018d604e242cc44ebe3f192def44ab195fdc638b662e53f2c55435e4f","Cargo.toml":"b829ce9480a5feb1a27027bf6af9e6e09ba40f7aa2c7fd6c226cd8ea674f1646","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0b74dfa0bcee5c420c6b7f67b4b2658f9ab8388c97b8e733975f2cecbdd668a6","README.md":"a91c65ccbcb9e5bb6344a537a28d43e6f8ff43f7a730493521371d69c7a07045","clippy.toml":"50fd0cdaae995561b1d688c9410fe99335fc7ac3916a400dafd25ff63f8215f7","src/archive.rs":"d6cead723242c26db2967b63385b79ed2008980a8c64b123a5eecffd7ed388fc","src/common.rs":"721281f967576b136bb66d368babc0e0497f61ca201208915f73f22ba9c24852","src/elf.rs":"f179b9de249e5a40d82161a14a176e2d5f6df94cf8b15b3dbab04ac824e9bbfa","src/endian.rs":"ceaad1b651627ad9e6218a168f87353ae651f5d76c9f61bc8099dff25007405e","src/lib.rs":"d12d8b0b9ecb80ce0624c818acad1ce5d0a51b8e12960d913c1af31cec71ef50","src/macho.rs":"50f7afc1bba3c59542f55b7b5c7357fb71fef52235f1b568f08f3efb6780aaf5","src/pe.rs":"0e9f47653eb6255a04948a0494d453fd6d416bf7c4a0c43f0cfc4a2b86cc1ac8","src/pod.rs":"d2967732f0052e6cfa18a2dd62c57bc3b640a20eb9a6db9f39836000ceabb399","src/read/any.rs":"1057d642dd06b8d20c953ac1ef4e2c99ace06632283e9497925c48c28d58ea3d","src/read/archive.rs":"479574cff125a74fc5512d75c1531da3bb006005fe544ffd2531a7d4f35a9bb4","src/read/coff/comdat.rs":"36846a11b285ad560dc1f18c67b1659f19fe8b5f11a350fe8b7bc5d27d0afb65","src/read/coff/file.rs":"874b4b357dbcb1a6a29c993e908b4044c9f90b0acd402cc8504ab84c3a036e1d","src/read/coff/mod.rs":"5eed1c0ca7cf044b3173223b06afacc4961a0515ef2478fffa35641f4ee364ee","src/read/coff/relocation.rs":"3b8e1405921eb16b8d38da4639a81be0546dca51c7747c126729d7a15da93a17","src/read/coff/section.rs":"dcb5b697a9371b1de6584603266673badfcd5d7f82b5472ead37555d69449e19","src/read/coff/symbol.rs":"52872aa7f306dc28a21d039895dedf8f06e6ad4ee54ebde7aed9c759616e38d5","src/read/elf/comdat.rs":"d39155e00c10e3f76d6776e2604d2ecd5039929979c179131101887d54a0e494","src/read/elf/compression.rs":"097ff8bdc78d01a1532b11c1c0cae3b35905128c7d98b471de188d46da3ff970","src/read/elf/dynamic.rs":"8f59bd6d352f6810be6b6dc02c2f88229f15aa02a42f8fc09bcf3f284d4b1021","src/read/elf/file.rs":"add48c004fb2e93718a4a0cdd4d12f52a563854e46904e1f3d02c19cda7fb52a","src/read/elf/hash.rs":"82123642ba71d2e56acae5bdbb59253b16ced8f6defdc58f4c37cafb7154a7b0","src/read/elf/mod.rs":"98b5bd46778d3c33e9d19dee2d294f61677dec16b28f22aa73340008c241fe09","src/read/elf/note.rs":"33bf89a85bb7927fd0123cd6c2919f0b9b0f8935db1ae7b446b75ea2717a476d","src/read/elf/relocation.rs":"f710c7a6f8e743f8f4172141e03294399ea4d59f4726650168e3c66544fe2f5f","src/read/elf/section.rs":"2c535c3ccd4d333a4abdfe9d14eeee0afb7b11c536387ba2c150e4ead7fb51bd","src/read/elf/segment.rs":"cdc9740d996bb262d99db3e9a50e3fda2a5802cbd9864ebeae36e5f9f96d58bf","src/read/elf/symbol.rs":"34e0fc849ffe3ae9af6a43954eb817729186313ac5edcbb0eed030b6d451982c","src/read/elf/version.rs":"154510d6868730f2d2fae2e0d9bdb697907ed48ded51c55ba115edd98d819e82","src/read/macho/dyld_cache.rs":"1526f518dfdcc9e024e6c0284ff75018e33413c14ea2ee00c8a99c31ccce6413","src/read/macho/fat.rs":"d27a1052f2e47cd5b798a6359f33c3bfe7f7971b13259f6545118213ace7f5dd","src/read/macho/file.rs":"21fa6d56cc241881ee241d004c1bf4ac0a4b40e3fa0bac2d0da562c7c429e01d","src/read/macho/load_command.rs":"efaf1ab5fe22fddc30af03f9cbd2a076f373cba3aab29329050c27a5a94b581f","src/read/macho/mod.rs":"23b353da3b7e076c68a067776e6a5b346a746116ac42c2c90bafd95ad54a90b3","src/read/macho/relocation.rs":"77fa3e65ccf5884433c96cdc39c457b8a0c430432e5ff9cad22a8becf13f0183","src/read/macho/section.rs":"78e98624691ef9872cc090187d90e09f9a54b47b3b1f174fd4f57e0640f6ceeb","src/read/macho/segment.rs":"cd3727796b672adba03443fadc4f458e117c3f56c2ebb318e32c408329a40492","src/read/macho/symbol.rs":"a2e50bcc1ebb5ea356a6f08b9083533c62ae3b3af90de18e8c942bf06589e85a","src/read/mod.rs":"d4615cecc02fb051314b16b900ff0d2bff561848ce36a5fd1398d66ba0013d9a","src/read/pe/data_directory.rs":"0b358dd5c9df7db570efc9c69896b88e8d13560a51fd7385f18d9e5ad8ac955b","src/read/pe/export.rs":"07ac5ec7b67d4a09037d8f11eb4426d96515687ee299df2a3d8cd4fd93eb2036","src/read/pe/file.rs":"485528fa444b3c8a20884dbb934422d3c7381f9d0351a5d14ca0813b14c51fcc","src/read/pe/import.rs":"ea20dfc0d462ba20e149bf9408f4ec1d0b202abf1f15536f6d091f0c0e756ac8","src/read/pe/mod.rs":"69832b7f4ccd93b59e08bafcbd0d3226c450d7801ad49ab554b38b660c8997fd","src/read/pe/relocation.rs":"0335c06b6d37df4939c8b88044313e88661ee45e5a57d2eec40143f2fe481838","src/read/pe/resource.rs":"21cc2077ff6f20d854b94134af29062166cc561c7bb1e5e47371e7965221f011","src/read/pe/rich.rs":"abf005004e69a4533132358aa54df73d7cc3a744738518e5b11c6f6f861e9f00","src/read/pe/section.rs":"f936dd73dbb1838cf558483949acdfefce701cdc22c21c96db345062c1ce641d","src/read/read_cache.rs":"939b66cfc11fc022c4e4b78fcad63b375516967da2274da78fb200271d934a53","src/read/read_ref.rs":"5fe6f717c79b07fecac9ee8ba15740c7a9581c36f1e356119e99dd669af6c39c","src/read/traits.rs":"b0e4cf654301843fa4db05a6fb1e22c454eb45da6af99b66e631f2b49bab9e21","src/read/util.rs":"c329a240689b177613049e00faf7928e6cd6aad021542cd708cc06b598c6900e","src/read/wasm.rs":"e5bd4cf1282c877f55401b14bb92beb1a8b7f222e4facd2b8fcf6a8599c1ba7e","src/read/xcoff/comdat.rs":"e7a74ce2c5817f29d184d0be1fc1309ff843a648b1539689561d85d1410d0cd5","src/read/xcoff/file.rs":"17f751578d052cb8f74ee56a4e17b053b06e82e4efbe943907943bc561fb301e","src/read/xcoff/mod.rs":"d0179d3f95797464ca5919563454d1123ce8c35dfc5f40ecd6ca0d002a9824a8","src/read/xcoff/relocation.rs":"ff30373e33bf79f3c690933044762460d9d852d1ec80883ee7205e80c424d849","src/read/xcoff/section.rs":"9bccdcbc0aa26b90a4a0b1b125aee52628e599dce74a097d6668df14e95be1be","src/read/xcoff/segment.rs":"7bee1d20185df21b2e00a581053095a6bc071b0ff003e04c4a3ef881fe990f45","src/read/xcoff/symbol.rs":"bcb7a57a107a145fad85d2c5325113d828e30fa8255023a6234d8b17234461c4","src/write/coff.rs":"52b1b402975fc84a5095050ee5be47097e90daf1a5379f9fc081c07b2b9fa432","src/write/elf/mod.rs":"1bb945edad539b4f19dda5d46c9b86fa4ea3721eedda77ca2595b5519c3e30f2","src/write/elf/object.rs":"e72b159e1a03e7f37d87249d7f43cccd8291cc41e41ae711dda61927f4bf65ed","src/write/elf/writer.rs":"a0bf5bb8bcd9d25510ce14f3a070ad9f9bfed3becc70ee600b2c73bc1e0eccd8","src/write/macho.rs":"cfd3a1ee65c800c53fb6c878882e8ae853b68756cbd7624386f48f92b56ff49f","src/write/mod.rs":"4ee5a5f971a4a4b184169c6b8dc50d79eb89937316b4677a1f526fef4a0ed106","src/write/pe.rs":"6c72185705a3e067c481f2b9f81c64a84e062e67781928e58fd1150314dad8f9","src/write/string.rs":"674c5913d0702cbaebe79d2a7e91f6a04327ac30e370557f02928eee1b0bb0d0","src/write/util.rs":"0e96abed0e8aae33c2efd8b836f29647eac310b58fad4029b68894e9f57bf137","src/xcoff.rs":"3580336207bdfe01631f528678b72b6a13b876716f82ac789620011516c67051","tests/integration.rs":"0fa704827e4da1be38dac2e3820d92f6b20c4d415803b04f67c3516020b1de97","tests/parse_self.rs":"81b44b2dd1de9a5d8c18d9bd8926156e39fb83931837afa8ca344da0d309aeee","tests/read/coff.rs":"11bf5a1b5770a4312e334580975a7cac9d69f1b12a4d75f25aacc931df01c5c7","tests/read/mod.rs":"7833826f169ac3be2b4f274e5fc8cf4a51742bd0010803ff0dc20ea5643a7e61","tests/round_trip/bss.rs":"849d69b063fd757fed02219dd81e9d13b82068a2025d2cc5cfd40cf557e31bda","tests/round_trip/coff.rs":"8a25aab7164a5c8aa7a21279f8bae1f4d5f68a8d09c29a4ecd0d0c14564851cc","tests/round_trip/comdat.rs":"a8f729e218fee21e90b9f39b5cfcb4f80bc3ce26d3a297323667e6eb14f882cc","tests/round_trip/common.rs":"ced08ff559ca4d343ceef54bb4c581a3405cd96d6a1628ba43b7aab82070800b","tests/round_trip/elf.rs":"690015fb4d3e79ee6d41c4d3a8e89a6806f1a0c313804707b83e44fceefac472","tests/round_trip/macho.rs":"b23931f506345b26ce3b4908dc2ce02f704603c622d39f5e9e7c8529f2882818","tests/round_trip/mod.rs":"b0942a5e0ffff38c3c12276dfd96ca6b0516b48e86ede979e7da782b9e7530d2","tests/round_trip/section_flags.rs":"0e17639e5f86d576f039a294c274ce8db2e2a8add31a2fffc33a6e93a6d2791e","tests/round_trip/tls.rs":"23a49a1036b9173ece82a3080745930e5925e745280ab38866c9d3c29f463e63"},"package":"2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83"} \ No newline at end of file
diff --git a/vendor/object/CHANGELOG.md b/vendor/object/CHANGELOG.md
index 66d789db1..3865c24d4 100644
--- a/vendor/object/CHANGELOG.md
+++ b/vendor/object/CHANGELOG.md
@@ -2,6 +2,17 @@
--------------------------------------------------------------------------------
+## 0.30.2
+
+Released 2023/01/11.
+
+### Added
+
+* Added more ELF constants for AVR flags and relocations.
+ [#500](https://github.com/gimli-rs/object/pull/500)
+
+--------------------------------------------------------------------------------
+
## 0.30.1
Released 2023/01/04.
diff --git a/vendor/object/Cargo.toml b/vendor/object/Cargo.toml
index 0d90ab271..84efba254 100644
--- a/vendor/object/Cargo.toml
+++ b/vendor/object/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "object"
-version = "0.30.1"
+version = "0.30.2"
exclude = [
"/.github",
"/testfiles",
diff --git a/vendor/object/src/elf.rs b/vendor/object/src/elf.rs
index ac9742080..edbfd9e3d 100644
--- a/vendor/object/src/elf.rs
+++ b/vendor/object/src/elf.rs
@@ -4238,12 +4238,128 @@ pub const R_AARCH64_TLSDESC: u32 = 1031;
/// STT_GNU_IFUNC relocation.
pub const R_AARCH64_IRELATIVE: u32 = 1032;
+// AVR values for `FileHeader*::e_flags`.
+
+/// Bitmask for `EF_AVR_ARCH_*`.
+pub const EF_AVR_ARCH: u32 = 0x7F;
+
+/// If set, it is assumed that the elf file uses local symbols as reference
+/// for the relocations so that linker relaxation is possible.
+pub const EF_AVR_LINKRELAX_PREPARED: u32 = 0x80;
+
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR1: u32 = 1;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR2: u32 = 2;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR25: u32 = 25;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR3: u32 = 3;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR31: u32 = 31;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR35: u32 = 35;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR4: u32 = 4;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR5: u32 = 5;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR51: u32 = 51;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVR6: u32 = 6;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_AVRTINY: u32 = 100;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA1: u32 = 101;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA2: u32 = 102;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA3: u32 = 103;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA4: u32 = 104;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA5: u32 = 105;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA6: u32 = 106;
+#[allow(missing_docs)]
+pub const EF_AVR_ARCH_XMEGA7: u32 = 107;
+
// AVR values for `Rel*::r_type`.
+#[allow(missing_docs)]
+pub const R_AVR_NONE: u32 = 0;
/// Direct 32 bit
pub const R_AVR_32: u32 = 1;
+#[allow(missing_docs)]
+pub const R_AVR_7_PCREL: u32 = 2;
+#[allow(missing_docs)]
+pub const R_AVR_13_PCREL: u32 = 3;
/// Direct 16 bit
pub const R_AVR_16: u32 = 4;
+#[allow(missing_docs)]
+pub const R_AVR_16_PM: u32 = 5;
+#[allow(missing_docs)]
+pub const R_AVR_LO8_LDI: u32 = 6;
+#[allow(missing_docs)]
+pub const R_AVR_HI8_LDI: u32 = 7;
+#[allow(missing_docs)]
+pub const R_AVR_HH8_LDI: u32 = 8;
+#[allow(missing_docs)]
+pub const R_AVR_LO8_LDI_NEG: u32 = 9;
+#[allow(missing_docs)]
+pub const R_AVR_HI8_LDI_NEG: u32 = 10;
+#[allow(missing_docs)]
+pub const R_AVR_HH8_LDI_NEG: u32 = 11;
+#[allow(missing_docs)]
+pub const R_AVR_LO8_LDI_PM: u32 = 12;
+#[allow(missing_docs)]
+pub const R_AVR_HI8_LDI_PM: u32 = 13;
+#[allow(missing_docs)]
+pub const R_AVR_HH8_LDI_PM: u32 = 14;
+#[allow(missing_docs)]
+pub const R_AVR_LO8_LDI_PM_NEG: u32 = 15;
+#[allow(missing_docs)]
+pub const R_AVR_HI8_LDI_PM_NEG: u32 = 16;
+#[allow(missing_docs)]
+pub const R_AVR_HH8_LDI_PM_NEG: u32 = 17;
+#[allow(missing_docs)]
+pub const R_AVR_CALL: u32 = 18;
+#[allow(missing_docs)]
+pub const R_AVR_LDI: u32 = 19;
+#[allow(missing_docs)]
+pub const R_AVR_6: u32 = 20;
+#[allow(missing_docs)]
+pub const R_AVR_6_ADIW: u32 = 21;
+#[allow(missing_docs)]
+pub const R_AVR_MS8_LDI: u32 = 22;
+#[allow(missing_docs)]
+pub const R_AVR_MS8_LDI_NEG: u32 = 23;
+#[allow(missing_docs)]
+pub const R_AVR_LO8_LDI_GS: u32 = 24;
+#[allow(missing_docs)]
+pub const R_AVR_HI8_LDI_GS: u32 = 25;
+#[allow(missing_docs)]
+pub const R_AVR_8: u32 = 26;
+#[allow(missing_docs)]
+pub const R_AVR_8_LO8: u32 = 27;
+#[allow(missing_docs)]
+pub const R_AVR_8_HI8: u32 = 28;
+#[allow(missing_docs)]
+pub const R_AVR_8_HLO8: u32 = 29;
+#[allow(missing_docs)]
+pub const R_AVR_DIFF8: u32 = 30;
+#[allow(missing_docs)]
+pub const R_AVR_DIFF16: u32 = 31;
+#[allow(missing_docs)]
+pub const R_AVR_DIFF32: u32 = 32;
+#[allow(missing_docs)]
+pub const R_AVR_LDS_STS_16: u32 = 33;
+#[allow(missing_docs)]
+pub const R_AVR_PORT6: u32 = 34;
+#[allow(missing_docs)]
+pub const R_AVR_PORT5: u32 = 35;
+#[allow(missing_docs)]
+pub const R_AVR_32_PCREL: u32 = 36;
// MSP430 values for `Rel*::r_type`.
diff --git a/vendor/once_cell/.cargo-checksum.json b/vendor/once_cell/.cargo-checksum.json
index 8fb4b41d7..bb03debe2 100644
--- a/vendor/once_cell/.cargo-checksum.json
+++ b/vendor/once_cell/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"f624f016b8fa7e0f3aa46665f9fa7eb9ab46a02743fa14b29b3dad5bb9c57a9e","Cargo.lock":"f21b2f56fe0bd4911048ebeddc572a6ab6be3248a8de5f24f606bc4b96047455","Cargo.toml":"90819aab2f2f2696d640edcd5806293788279adf6b2f22edb04b25b37be82eb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"813d262a320611ba874c4b2488256bdb2b4073649616a1471b389d464a704301","bors.toml":"ebd69f714a49dceb8fd10ebadfea6e2767be4732fdef49eddf6239151b4bc78c","examples/bench.rs":"1597a52529f75d6c5ad0b86759a775b1d723dfa810e2016317283b13594219da","examples/bench_acquire.rs":"9f4912ca262194cb55e893c33739c85c2f4868d07905b9dd3238552b6ce8a6e4","examples/bench_vs_lazy_static.rs":"d527294a2e73b53ac5faed8b316dfd1ae2a06adb31384134af21f10ce76333a5","examples/lazy_static.rs":"8bca1b264da21eceb1ccaf30477fc941bc71bedd030f1c6982ed3a7804abfb4f","examples/reentrant_init_deadlocks.rs":"ff84929de27a848e5b155549caa96db5db5f030afca975f8ba3f3da640083001","examples/regex.rs":"4a2e0fb093c7f5bbe0fff8689fc0c670c5334344a1bfda376f5faa98a05d459f","examples/test_synchronization.rs":"88abd5c16275bb2f2d77eaecf369d97681404a77b8edd0021f24bfd377c46be3","src/imp_cs.rs":"888fc76a1f4e55b1ece3ef748b0aa6a47be2d8d928c10f89ae6b4f12330c0e55","src/imp_pl.rs":"cd69042890c25fd3db97a4762abea4b814c961eadaf5d6ed7c7db17a6abd4c5b","src/imp_std.rs":"f13a5bfe08ac02eb0d5a0271cb5be9e8c534a81cddc3253aaca28b69bded8e65","src/lib.rs":"b1f8113fc779d6ea398ddb736fe5b3e3c4a63f23404569e4a08a5003bdd3774f","src/race.rs":"bb89ba6fe9420b8d3a173c1a484dde1b6a65289c5d72eb57cd3b0cca3ac23c04","tests/it.rs":"41f50496463a0036c45ed138f158d221d379e50a91ca452ba8ffe8caa7a59e3a"},"package":"86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"ad20651be71bad8624d6fda2999d7766346dd8a286cdab4f1884eb8e6f2ac505","Cargo.lock":"a4f40f03460766a82f9baebfb58afc22970218bd7d4ab257f1c9c554e08bc74a","Cargo.toml":"3681ed9cbb458d0c95932843a5a7f96d84a61c50e8792a0c6950304ebd2f8c84","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"e883909b29dc4d1c44270136fe2cfe7b2df6b416226b13928fdf9f1e15130be7","bors.toml":"ebd69f714a49dceb8fd10ebadfea6e2767be4732fdef49eddf6239151b4bc78c","examples/bench.rs":"1597a52529f75d6c5ad0b86759a775b1d723dfa810e2016317283b13594219da","examples/bench_acquire.rs":"9f4912ca262194cb55e893c33739c85c2f4868d07905b9dd3238552b6ce8a6e4","examples/bench_vs_lazy_static.rs":"d527294a2e73b53ac5faed8b316dfd1ae2a06adb31384134af21f10ce76333a5","examples/lazy_static.rs":"8bca1b264da21eceb1ccaf30477fc941bc71bedd030f1c6982ed3a7804abfb4f","examples/reentrant_init_deadlocks.rs":"ff84929de27a848e5b155549caa96db5db5f030afca975f8ba3f3da640083001","examples/regex.rs":"4a2e0fb093c7f5bbe0fff8689fc0c670c5334344a1bfda376f5faa98a05d459f","examples/test_synchronization.rs":"88abd5c16275bb2f2d77eaecf369d97681404a77b8edd0021f24bfd377c46be3","src/imp_cs.rs":"888fc76a1f4e55b1ece3ef748b0aa6a47be2d8d928c10f89ae6b4f12330c0e55","src/imp_pl.rs":"cd69042890c25fd3db97a4762abea4b814c961eadaf5d6ed7c7db17a6abd4c5b","src/imp_std.rs":"f13a5bfe08ac02eb0d5a0271cb5be9e8c534a81cddc3253aaca28b69bded8e65","src/lib.rs":"242f9f96e7efdeb5cc58d915514ad4ead96bb0616a004625a37f3eda0cb5ec45","src/race.rs":"0aea3dc70cc476a9da4d976e2e33052fef0cce3208e38dff52cfbf10a3a60ffb","tests/it.rs":"41f50496463a0036c45ed138f158d221d379e50a91ca452ba8ffe8caa7a59e3a"},"package":"6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"} \ No newline at end of file
diff --git a/vendor/once_cell/CHANGELOG.md b/vendor/once_cell/CHANGELOG.md
index 5cf6ea6b7..3f3a8623d 100644
--- a/vendor/once_cell/CHANGELOG.md
+++ b/vendor/once_cell/CHANGELOG.md
@@ -4,6 +4,10 @@
-
+## 1.17.0
+
+- Add `race::OnceRef` for storing a `&'a T`.
+
## 1.16.0
- Add `no_std` implementation based on `critical-section`,
diff --git a/vendor/once_cell/Cargo.lock b/vendor/once_cell/Cargo.lock
index b72bbabf5..1f838d5ec 100644
--- a/vendor/once_cell/Cargo.lock
+++ b/vendor/once_cell/Cargo.lock
@@ -74,7 +74,7 @@ checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
[[package]]
name = "once_cell"
-version = "1.16.0"
+version = "1.17.0"
dependencies = [
"atomic-polyfill",
"critical-section",
diff --git a/vendor/once_cell/Cargo.toml b/vendor/once_cell/Cargo.toml
index 6587f4e52..d2d7f1917 100644
--- a/vendor/once_cell/Cargo.toml
+++ b/vendor/once_cell/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.56"
name = "once_cell"
-version = "1.16.0"
+version = "1.17.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
exclude = [
"*.png",
diff --git a/vendor/once_cell/README.md b/vendor/once_cell/README.md
index de65dbbcc..737f2defd 100644
--- a/vendor/once_cell/README.md
+++ b/vendor/once_cell/README.md
@@ -51,6 +51,8 @@ More patterns and use-cases are in the [docs](https://docs.rs/once_cell/)!
* [lazycell](https://crates.io/crates/lazycell)
* [mitochondria](https://crates.io/crates/mitochondria)
* [lazy_static](https://crates.io/crates/lazy_static)
+* [async_once_cell](https://crates.io/crates/async_once_cell)
+* [generic_once_cell](https://crates.io/crates/generic_once_cell) (bring your own mutex)
The API of `once_cell` is being proposed for inclusion in
[`std`](https://github.com/rust-lang/rfcs/pull/2788).
diff --git a/vendor/once_cell/src/lib.rs b/vendor/once_cell/src/lib.rs
index 41313f736..83149ac4f 100644
--- a/vendor/once_cell/src/lib.rs
+++ b/vendor/once_cell/src/lib.rs
@@ -208,7 +208,6 @@
//! ```
//! use once_cell::sync::OnceCell;
//!
-//! #[derive(Debug)]
//! pub struct LateInit<T> { cell: OnceCell<T> }
//!
//! impl<T> LateInit<T> {
@@ -228,22 +227,24 @@
//! }
//! }
//!
-//! #[derive(Default, Debug)]
+//! #[derive(Default)]
//! struct A<'a> {
//! b: LateInit<&'a B<'a>>,
//! }
//!
-//! #[derive(Default, Debug)]
+//! #[derive(Default)]
//! struct B<'a> {
//! a: LateInit<&'a A<'a>>
//! }
//!
+//!
//! fn build_cycle() {
//! let a = A::default();
//! let b = B::default();
//! a.b.init(&b);
//! b.a.init(&a);
-//! println!("{:?}", a.b.a.b.a);
+//!
+//! let _a = &a.b.a.b.a;
//! }
//! ```
//!
@@ -315,6 +316,10 @@
//!
//! No, but you can use [`async_once_cell`](https://crates.io/crates/async_once_cell) instead.
//!
+//! **Can I bring my own mutex?**
+//!
+//! There is [generic_once_cell](https://crates.io/crates/generic_once_cell) to allow just that.
+//!
//! # Related crates
//!
//! * [double-checked-cell](https://github.com/niklasf/double-checked-cell)
@@ -323,6 +328,7 @@
//! * [mitochondria](https://crates.io/crates/mitochondria)
//! * [lazy_static](https://crates.io/crates/lazy_static)
//! * [async_once_cell](https://crates.io/crates/async_once_cell)
+//! * [generic_once_cell](https://crates.io/crates/generic_once_cell) (bring your own mutex)
//!
//! Most of this crate's functionality is available in `std` in nightly Rust.
//! See the [tracking issue](https://github.com/rust-lang/rust/issues/74465).
diff --git a/vendor/once_cell/src/race.rs b/vendor/once_cell/src/race.rs
index fd255c4c7..dff5847c8 100644
--- a/vendor/once_cell/src/race.rs
+++ b/vendor/once_cell/src/race.rs
@@ -25,6 +25,8 @@ use atomic_polyfill as atomic;
use core::sync::atomic;
use atomic::{AtomicUsize, Ordering};
+use core::cell::UnsafeCell;
+use core::marker::PhantomData;
use core::num::NonZeroUsize;
/// A thread-safe cell which can be written to only once.
@@ -172,6 +174,96 @@ impl OnceBool {
}
}
+/// A thread-safe cell which can be written to only once.
+pub struct OnceRef<'a, T> {
+ inner: OnceNonZeroUsize,
+ ghost: PhantomData<UnsafeCell<&'a T>>,
+}
+
+// TODO: Replace UnsafeCell with SyncUnsafeCell once stabilized
+unsafe impl<'a, T: Sync> Sync for OnceRef<'a, T> {}
+
+impl<'a, T> core::fmt::Debug for OnceRef<'a, T> {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ write!(f, "OnceRef({:?})", self.inner)
+ }
+}
+
+impl<'a, T> Default for OnceRef<'a, T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'a, T> OnceRef<'a, T> {
+ /// Creates a new empty cell.
+ pub const fn new() -> OnceRef<'a, T> {
+ OnceRef { inner: OnceNonZeroUsize::new(), ghost: PhantomData }
+ }
+
+ /// Gets a reference to the underlying value.
+ pub fn get(&self) -> Option<&'a T> {
+ self.inner.get().map(|ptr| unsafe { &*(ptr.get() as *const T) })
+ }
+
+ /// Sets the contents of this cell to `value`.
+ ///
+ /// Returns `Ok(())` if the cell was empty and `Err(value)` if it was
+ /// full.
+ pub fn set(&self, value: &'a T) -> Result<(), ()> {
+ let ptr = NonZeroUsize::new(value as *const T as usize).unwrap();
+ self.inner.set(ptr)
+ }
+
+ /// Gets the contents of the cell, initializing it with `f` if the cell was
+ /// empty.
+ ///
+ /// If several threads concurrently run `get_or_init`, more than one `f` can
+ /// be called. However, all threads will return the same value, produced by
+ /// some `f`.
+ pub fn get_or_init<F>(&self, f: F) -> &'a T
+ where
+ F: FnOnce() -> &'a T,
+ {
+ let f = || NonZeroUsize::new(f() as *const T as usize).unwrap();
+ let ptr = self.inner.get_or_init(f);
+ unsafe { &*(ptr.get() as *const T) }
+ }
+
+ /// Gets the contents of the cell, initializing it with `f` if
+ /// the cell was empty. If the cell was empty and `f` failed, an
+ /// error is returned.
+ ///
+ /// If several threads concurrently run `get_or_init`, more than one `f` can
+ /// be called. However, all threads will return the same value, produced by
+ /// some `f`.
+ pub fn get_or_try_init<F, E>(&self, f: F) -> Result<&'a T, E>
+ where
+ F: FnOnce() -> Result<&'a T, E>,
+ {
+ let f = || f().map(|value| NonZeroUsize::new(value as *const T as usize).unwrap());
+ let ptr = self.inner.get_or_try_init(f)?;
+ unsafe { Ok(&*(ptr.get() as *const T)) }
+ }
+
+ /// ```compile_fail
+ /// use once_cell::race::OnceRef;
+ ///
+ /// let mut l = OnceRef::new();
+ ///
+ /// {
+ /// let y = 2;
+ /// let mut r = OnceRef::new();
+ /// r.set(&y).unwrap();
+ /// core::mem::swap(&mut l, &mut r);
+ /// }
+ ///
+ /// // l now contains a dangling reference to y
+ /// eprintln!("uaf: {}", l.get().unwrap());
+ /// ```
+ fn _dummy() {}
+}
+
#[cfg(feature = "alloc")]
pub use self::once_box::OnceBox;
diff --git a/vendor/overload/.cargo-checksum.json b/vendor/overload/.cargo-checksum.json
new file mode 100644
index 000000000..dfd877e5b
--- /dev/null
+++ b/vendor/overload/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"7dae2c1f39fcd51345ecd0c84ff5ffd2544087b2fce175d253d388c4e61af61c","LICENSE":"0d1bdcc9f95914fc1709781e65a75b85db7f73fd1c335bf6f7b1aae618df322e","README.md":"163212683c01ec376c6fc1d4b6acb10cc1cb4af622e9ddbd869a48c7ac9c6169","logo.png":"6992807474a15bf663aeeb357b6038cdbaed7f5787a26941d1fc006176c3a6ae","src/assignment.rs":"f4ff0b288ed0d7cd03bd53d3c5f452ffdd36e20d816f6f68762ace1c2609a445","src/binary.rs":"321c474d28d7b8367ba71ad6b8691d023cefce4d2b9417c454e57ada42b618c6","src/lib.rs":"542d5b297d3b9039450dce66e58b89b8d3707ee322230f7e9cc25f367e6338a1","src/unary.rs":"f2f87399a153e225acc4899651ad079bf02b4fe426fb5c94799eb2747b84a870","tests/assignment.rs":"635f4a214ad9e725d2f46d12983a96167ac7bd8af979e69d20bdcd8739d9e3a9","tests/binary.rs":"1acc26022414a6d54f7fe18d85c41433dec2b6adb6615d04a261f9b47c438b88","tests/unary.rs":"4bc733e466c19296bc3738f866b710fc421a04f6dd0180348bd2fddecdc88c51"},"package":"b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"} \ No newline at end of file
diff --git a/vendor/rls-data/Cargo.toml b/vendor/overload/Cargo.toml
index 54d416498..1e0fbc514 100644
--- a/vendor/rls-data/Cargo.toml
+++ b/vendor/overload/Cargo.toml
@@ -12,19 +12,11 @@
[package]
edition = "2018"
-name = "rls-data"
-version = "0.19.1"
-authors = ["Nick Cameron <ncameron@mozilla.com>"]
-description = "Data structures used by the RLS and Rust compiler"
-categories = ["development-tools"]
-license = "Apache-2.0/MIT"
-repository = "https://github.com/rust-lang/rls"
-[dependencies.rls-span]
-version = "0.5.0"
-
-[dependencies.serde]
-version = "1.0"
-
-[features]
-default = ["derive"]
-derive = ["serde/derive", "rls-span/derive"]
+name = "overload"
+version = "0.1.1"
+authors = ["Daniel Salvadori <danaugrs@gmail.com>"]
+description = "Provides a macro to simplify operator overloading."
+keywords = ["operator", "overloading", "macro", "op"]
+categories = ["rust-patterns"]
+license = "MIT"
+repository = "https://github.com/danaugrs/overload"
diff --git a/vendor/overload/LICENSE b/vendor/overload/LICENSE
new file mode 100644
index 000000000..0ed504b3c
--- /dev/null
+++ b/vendor/overload/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Daniel Augusto Rizzi Salvadori
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE. \ No newline at end of file
diff --git a/vendor/overload/README.md b/vendor/overload/README.md
new file mode 100644
index 000000000..c40088ead
--- /dev/null
+++ b/vendor/overload/README.md
@@ -0,0 +1,64 @@
+<p align="center"><img width="460" src="https://github.com/danaugrs/overload/blob/master/logo.png"></p>
+<p align="center">
+ <a href="https://docs.rs/overload"><img src="https://docs.rs/overload/badge.svg"/></a>
+ <a href="https://crates.io/crates/overload"><img src="https://img.shields.io/crates/v/overload.svg"/></a>
+</p>
+
+Provides a macro to simplify operator overloading. See the [documentation](https://docs.rs/overload/) for details and supported operators.
+
+## Example
+
+```rust
+extern crate overload;
+use overload::overload;
+use std::ops; // <- don't forget this or you'll get nasty errors
+
+#[derive(PartialEq, Debug)]
+struct Val {
+ v: i32
+}
+
+overload!((a: ?Val) + (b: ?Val) -> Val { Val { v: a.v + b.v } });
+```
+
+The macro call in the snippet above generates the following code:
+
+```rust
+impl ops::Add<Val> for Val {
+ type Output = Val;
+ fn add(self, b: Val) -> Self::Output {
+ let a = self;
+ Val { v: a.v + b.v }
+ }
+}
+impl ops::Add<&Val> for Val {
+ type Output = Val;
+ fn add(self, b: &Val) -> Self::Output {
+ let a = self;
+ Val { v: a.v + b.v }
+ }
+}
+impl ops::Add<Val> for &Val {
+ type Output = Val;
+ fn add(self, b: Val) -> Self::Output {
+ let a = self;
+ Val { v: a.v + b.v }
+ }
+}
+impl ops::Add<&Val> for &Val {
+ type Output = Val;
+ fn add(self, b: &Val) -> Self::Output {
+ let a = self;
+ Val { v: a.v + b.v }
+ }
+}
+```
+
+We are now able to add `Val`s and `&Val`s in any combination:
+
+```rust
+assert_eq!(Val{v:3} + Val{v:5}, Val{v:8});
+assert_eq!(Val{v:3} + &Val{v:5}, Val{v:8});
+assert_eq!(&Val{v:3} + Val{v:5}, Val{v:8});
+assert_eq!(&Val{v:3} + &Val{v:5}, Val{v:8});
+```
diff --git a/vendor/overload/logo.png b/vendor/overload/logo.png
new file mode 100644
index 000000000..61d33090b
--- /dev/null
+++ b/vendor/overload/logo.png
Binary files differ
diff --git a/vendor/overload/src/assignment.rs b/vendor/overload/src/assignment.rs
new file mode 100644
index 000000000..550f07a96
--- /dev/null
+++ b/vendor/overload/src/assignment.rs
@@ -0,0 +1,27 @@
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_assignment {
+ (+=, $($t:tt)+) => (_overload_assignment_internal!(AddAssign, add_assign, $($t)+););
+ (-=, $($t:tt)+) => (_overload_assignment_internal!(SubAssign, sub_assign, $($t)+););
+ (*=, $($t:tt)+) => (_overload_assignment_internal!(MulAssign, mul_assign, $($t)+););
+ (/=, $($t:tt)+) => (_overload_assignment_internal!(DivAssign, div_assign, $($t)+););
+ (%=, $($t:tt)+) => (_overload_assignment_internal!(RemAssign, rem_assign, $($t)+););
+ (&=, $($t:tt)+) => (_overload_assignment_internal!(BitAndAssign, bitand_assign, $($t)+););
+ (|=, $($t:tt)+) => (_overload_assignment_internal!(BitOrAssign, bitor_assign, $($t)+););
+ (^=, $($t:tt)+) => (_overload_assignment_internal!(BitXorAssign, bitxor_assign, $($t)+););
+ (<<=, $($t:tt)+) => (_overload_assignment_internal!(ShlAssign, shl_assign, $($t)+););
+ (>>=, $($t:tt)+) => (_overload_assignment_internal!(ShrAssign, shr_assign, $($t)+););
+}
+
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_assignment_internal {
+ ($op_trait:ident, $op_fn:ident, $li:ident, $lt:ty, $ri:ident, $rt:ty, $body:block) => (
+ impl ops::$op_trait<$rt> for $lt {
+ fn $op_fn(&mut self, $ri: $rt) {
+ let $li = self;
+ $body
+ }
+ }
+ );
+}
diff --git a/vendor/overload/src/binary.rs b/vendor/overload/src/binary.rs
new file mode 100644
index 000000000..b4c781bff
--- /dev/null
+++ b/vendor/overload/src/binary.rs
@@ -0,0 +1,28 @@
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_binary {
+ (+, $($t:tt)+) => (_overload_binary_internal!(Add, add, $($t)+););
+ (-, $($t:tt)+) => (_overload_binary_internal!(Sub, sub, $($t)+););
+ (*, $($t:tt)+) => (_overload_binary_internal!(Mul, mul, $($t)+););
+ (/, $($t:tt)+) => (_overload_binary_internal!(Div, div, $($t)+););
+ (%, $($t:tt)+) => (_overload_binary_internal!(Rem, rem, $($t)+););
+ (&, $($t:tt)+) => (_overload_binary_internal!(BitAnd, bitand, $($t)+););
+ (|, $($t:tt)+) => (_overload_binary_internal!(BitOr, bitor, $($t)+););
+ (^, $($t:tt)+) => (_overload_binary_internal!(BitXor, bitxor, $($t)+););
+ (<<, $($t:tt)+) => (_overload_binary_internal!(Shl, shl, $($t)+););
+ (>>, $($t:tt)+) => (_overload_binary_internal!(Shr, shr, $($t)+););
+}
+
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_binary_internal {
+ ($op_trait:ident, $op_fn:ident, $li:ident, $lt:ty, $ri:ident, $rt:ty, $out:ty, $body:block) => (
+ impl ops::$op_trait<$rt> for $lt {
+ type Output = $out;
+ fn $op_fn(self, $ri: $rt) -> Self::Output {
+ let $li = self;
+ $body
+ }
+ }
+ );
+}
diff --git a/vendor/overload/src/lib.rs b/vendor/overload/src/lib.rs
new file mode 100644
index 000000000..9364d7a38
--- /dev/null
+++ b/vendor/overload/src/lib.rs
@@ -0,0 +1,257 @@
+//! Provides a macro to simplify operator overloading.
+//!
+//! To use, include the following:
+//! ```
+//! extern crate overload;
+//! use overload::overload;
+//! use std::ops; // <- don't forget this or you'll get nasty errors
+//! ```
+//!
+//! # Introduction
+//!
+//! Suppose we have the following `struct` definition:
+//! ```
+//! #[derive(PartialEq, Debug)]
+//! struct Val {
+//! v: i32
+//! }
+//! ```
+//! We can overload the addition of `Val`s like so:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! overload!((a: Val) + (b: Val) -> Val { Val { v: a.v + b.v } });
+//! ```
+//! The macro call above generates the following code:
+//! ```ignore
+//! impl ops::Add<Val> for Val {
+//! type Output = Val;
+//! fn add(self, b: Val) -> Self::Output {
+//! let a = self;
+//! Val { v: a.v + b.v }
+//! }
+//! }
+//! ```
+//! We are now able to add `Val`s:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! # overload!((a: Val) + (b: Val) -> Val { Val { v: a.v + b.v } });
+//! assert_eq!(Val{v:3} + Val{v:5}, Val{v:8});
+//! ```
+//!
+//! # Owned and borrowed types
+//!
+//! If we also wanted to overload addition for the borrowed type `&Val` we could write:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! overload!((a: &Val) + (b: &Val) -> Val { Val { v: a.v + b.v } });
+//! ```
+//! We might also want to overload addition between the owned and borrowed types:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! overload!((a: Val) + (b: &Val) -> Val { Val { v: a.v + b.v } });
+//! overload!((a: &Val) + (b: Val) -> Val { Val { v: a.v + b.v } });
+//! ```
+//! Let's see how we can write these combinations more concisely.
+//!
+//! We can include a `?` in front of a type to indicate that it should stand in for both the owned and borrowed type.
+//!
+//! To overload addition for all four combinations between `Val` and `&Val` we can therefore simply include a `?` in front of both types:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! overload!((a: ?Val) + (b: ?Val) -> Val { Val { v: a.v + b.v } });
+//! ```
+//! The macro call above generates the following code:
+//! ```ignore
+//! impl ops::Add<Val> for Val {
+//! type Output = Val;
+//! fn add(self, b: Val) -> Self::Output {
+//! let a = self;
+//! Val { v: a.v + b.v }
+//! }
+//! }
+//!
+//! impl ops::Add<&Val> for Val {
+//! type Output = Val;
+//! fn add(self, b: &Val) -> Self::Output {
+//! let a = self;
+//! Val { v: a.v + b.v }
+//! }
+//! }
+//!
+//! impl ops::Add<Val> for &Val {
+//! type Output = Val;
+//! fn add(self, b: Val) -> Self::Output {
+//! let a = self;
+//! Val { v: a.v + b.v }
+//! }
+//! }
+//!
+//! impl ops::Add<&Val> for &Val {
+//! type Output = Val;
+//! fn add(self, b: &Val) -> Self::Output {
+//! let a = self;
+//! Val { v: a.v + b.v }
+//! }
+//! }
+//! ```
+//! We are now able to add `Val`s and `&Val`s in any combination:
+//! ```
+//! # extern crate overload;
+//! # use overload::overload;
+//! # use std::ops;
+//! # #[derive(PartialEq, Debug)]
+//! # struct Val {
+//! # v: i32
+//! # }
+//! # overload!((a: ?Val) + (b: ?Val) -> Val { Val { v: a.v + b.v } });
+//! assert_eq!(Val{v:3} + Val{v:5}, Val{v:8});
+//! assert_eq!(Val{v:3} + &Val{v:5}, Val{v:8});
+//! assert_eq!(&Val{v:3} + Val{v:5}, Val{v:8});
+//! assert_eq!(&Val{v:3} + &Val{v:5}, Val{v:8});
+//! ```
+//!
+//! # Binary operators
+//!
+//! The general syntax to overload a binary operator between types `<a_type>` and `<b_type>` is:
+//! ```ignore
+//! overload!((<a_ident>: <a_type>) <op> (<b_ident>: <b_type>) -> <out_type> { /*body*/ });
+//! ```
+//! Inside the body you can use `<a_ident>` and `<b_ident>` freely to perform any computation.
+//!
+//! The last line of the body needs to be an expression (i.e. no `;` at the end of the line) of type `<out_type>`.
+//!
+//! | Operator | Example | Trait |
+//! |----------|-----------------------------------------------------------------|--------|
+//! | + | `overload!((a: A) + (b: B) -> C { /*...*/ );` | Add |
+//! | - | `overload!((a: A) - (b: B) -> C { /*...*/ );` | Sub |
+//! | * | `overload!((a: A) * (b: B) -> C { /*...*/ );` | Mul |
+//! | / | `overload!((a: A) / (b: B) -> C { /*...*/ );` | Div |
+//! | % | `overload!((a: A) % (b: B) -> C { /*...*/ );` | Rem |
+//! | & | `overload!((a: A) & (b: B) -> C { /*...*/ );` | BitAnd |
+//! | \| | <code>overload!((a: A) &vert; (b: B) -> C { /\*...*\/ );</code> | BitOr |
+//! | ^ | `overload!((a: A) ^ (b: B) -> C { /*...*/ );` | BitXor |
+//! | << | `overload!((a: A) << (b: B) -> C { /*...*/ );` | Shl |
+//! | >> | `overload!((a: A) >> (b: B) -> C { /*...*/ );` | Shr |
+//!
+//! # Assignment operators
+//!
+//! The general syntax to overload an assignment operator between types `<a_type>` and `<b_type>` is:
+//! ```ignore
+//! overload!((<a_ident>: &mut <a_type>) <op> (<b_ident>: <b_type>) { /*body*/ });
+//! ```
+//! Inside the body you can use `<a_ident>` and `<b_ident>` freely to perform any computation and mutate `<a_ident>` as desired.
+//!
+//! | Operator | Example | Trait |
+//! |----------|------------------------------------------------------------------|--------------|
+//! | += | `overload!((a: &mut A) += (b: B) { /*...*/ );` | AddAssign |
+//! | -= | `overload!((a: &mut A) -= (b: B) { /*...*/ );` | SubAssign |
+//! | *= | `overload!((a: &mut A) *= (b: B) { /*...*/ );` | MulAssign |
+//! | /= | `overload!((a: &mut A) /= (b: B) { /*...*/ );` | DivAssign |
+//! | %= | `overload!((a: &mut A) %= (b: B) { /*...*/ );` | RemAssign |
+//! | &= | `overload!((a: &mut A) &= (b: B) { /*...*/ );` | BitAndAssign |
+//! | \|= | <code>overload!((a: &mut A) &vert;= (b: B) { /\*...*\/ );</code> | BitOrAssign |
+//! | ^= | `overload!((a: &mut A) ^= (b: B) { /*...*/ );` | BitXorAssign |
+//! | <<= | `overload!((a: &mut A) <<= (b: B) { /*...*/ );` | ShlAssign |
+//! | >>= | `overload!((a: &mut A) >>= (b: B) { /*...*/ );` | ShrAssign |
+//!
+//! # Unary operators
+//!
+//! The general syntax to overload a unary operator for type `<a_type>` is:
+//! ```ignore
+//! overload!(<op> (<a_ident>: <a_type>) -> <out_type> { /*body*/ });
+//! ```
+//! Inside the body you can use `<a_ident>` freely to perform any computation.
+//!
+//! The last line of the body needs to be an expression (i.e. no `;` at the end of the line) of type `<out_type>`.
+//!
+//! | Operator | Example | Trait |
+//! |----------|---------------------------------------------------------|-------|
+//! | - | `overload!(- (a: A) -> B { /*...*/ );` | Neg |
+//! | ! | `overload!(! (a: A) -> B { /*...*/ );` | Not |
+//!
+//! # Notes
+//!
+//! Remember that you can only overload operators between one or more types if at least one of the types is defined in the current crate.
+
+#[macro_use]
+mod unary;
+
+#[macro_use]
+mod assignment;
+
+#[macro_use]
+mod binary;
+
+/// Overloads an operator. See the [module level documentation](index.html) for more information.
+#[macro_export(local_inner_macros)]
+macro_rules! overload {
+ // Unary (both owned and borrowed)
+ ($op:tt ($i:ident : ? $t:ty) -> $out:ty $body:block) => (
+ _overload_unary!($op, $i, $t, $out, $body);
+ _overload_unary!($op, $i, &$t, $out, $body);
+ );
+ // Unary (either owned or borrowed)
+ ($op:tt ($i:ident : $t:ty) -> $out:ty $body:block) => (
+ _overload_unary!($op, $i, $t, $out, $body);
+ );
+ // Assignment (both owned and borrowed)
+ (($li:ident : &mut $lt:ty) $op:tt ($ri:ident : ? $rt:ty) $body:block) => (
+ _overload_assignment!($op, $li, $lt, $ri, $rt, $body);
+ _overload_assignment!($op, $li, $lt, $ri, &$rt, $body);
+ );
+ // Assignment (either owned or borrowed)
+ (($li:ident : &mut $lt:ty) $op:tt ($ri:ident : $rt:ty) $body:block) => (
+ _overload_assignment!($op, $li, $lt, $ri, $rt, $body);
+ );
+ // Binary (both - both)
+ (($li:ident : ? $lt:ty) $op:tt ($ri:ident : ? $rt:ty) -> $out:ty $body:block) => (
+ _overload_binary!($op, $li, $lt, $ri, $rt, $out, $body);
+ _overload_binary!($op, $li, $lt, $ri, &$rt, $out, $body);
+ _overload_binary!($op, $li, &$lt, $ri, $rt, $out, $body);
+ _overload_binary!($op, $li, &$lt, $ri, &$rt, $out, $body);
+ );
+ // Binary (both - either)
+ (($li:ident : ? $lt:ty) $op:tt ($ri:ident : $rt:ty) -> $out:ty $body:block) => (
+ _overload_binary!($op, $li, $lt, $ri, $rt, $out, $body);
+ _overload_binary!($op, $li, &$lt, $ri, $rt, $out, $body);
+ );
+ // Binary (either - both)
+ (($li:ident : $lt:ty) $op:tt ($ri:ident : ? $rt:ty) -> $out:ty $body:block) => (
+ _overload_binary!($op, $li, $lt, $ri, $rt, $out, $body);
+ _overload_binary!($op, $li, $lt, $ri, &$rt, $out, $body);
+ );
+ // Binary (either - either)
+ (($li:ident : $lt:ty) $op:tt ($ri:ident : $rt:ty) -> $out:ty $body:block) => (
+ _overload_binary!($op, $li, $lt, $ri, $rt, $out, $body);
+ );
+}
diff --git a/vendor/overload/src/unary.rs b/vendor/overload/src/unary.rs
new file mode 100644
index 000000000..da8f6b590
--- /dev/null
+++ b/vendor/overload/src/unary.rs
@@ -0,0 +1,20 @@
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_unary {
+ (-, $($t:tt)+) => (_overload_unary_internal!(Neg, neg, $($t)+););
+ (!, $($t:tt)+) => (_overload_unary_internal!(Not, not, $($t)+););
+}
+
+#[doc(hidden)]
+#[macro_export(local_inner_macros)]
+macro_rules! _overload_unary_internal {
+ ($op_trait:ident, $op_fn:ident, $i:ident, $t:ty, $out:ty, $body:block) => (
+ impl ops::$op_trait for $t {
+ type Output = $out;
+ fn $op_fn(self) -> Self::Output {
+ let $i = self;
+ $body
+ }
+ }
+ );
+}
diff --git a/vendor/overload/tests/assignment.rs b/vendor/overload/tests/assignment.rs
new file mode 100644
index 000000000..9657b4bd4
--- /dev/null
+++ b/vendor/overload/tests/assignment.rs
@@ -0,0 +1,89 @@
+extern crate overload;
+use overload::overload;
+use std::ops;
+
+#[derive(PartialEq, Debug)]
+struct A(i32);
+
+#[derive(PartialEq, Debug)]
+struct B(i32);
+
+overload!((a: &mut A) += (b: B) { a.0 += b.0; });
+#[test]
+fn add_assign() {
+ let mut a = A(3);
+ a += B(4);
+ assert_eq!(A(3 + 4), a);
+}
+
+overload!((a: &mut A) -= (b: B) { a.0 -= b.0; });
+#[test]
+fn sub_assign() {
+ let mut a = A(3);
+ a -= B(4);
+ assert_eq!(A(3 - 4), a);
+}
+
+overload!((a: &mut A) *= (b: B) { a.0 *= b.0; });
+#[test]
+fn mul_assign() {
+ let mut a = A(3);
+ a *= B(4);
+ assert_eq!(A(3 * 4), a);
+}
+
+overload!((a: &mut A) /= (b: B) { a.0 /= b.0; });
+#[test]
+fn div_assign() {
+ let mut a = A(6);
+ a /= B(3);
+ assert_eq!(A(6 / 3), a);
+}
+
+overload!((a: &mut A) %= (b: B) { a.0 %= b.0; });
+#[test]
+fn rem_assign() {
+ let mut a = A(6);
+ a %= B(4);
+ assert_eq!(A(6 % 4), a);
+}
+
+overload!((a: &mut A) &= (b: B) { a.0 &= b.0; });
+#[test]
+fn bitand_assign() {
+ let mut a = A(6);
+ a &= B(4);
+ assert_eq!(A(6 & 4), a);
+}
+
+overload!((a: &mut A) |= (b: B) { a.0 |= b.0; });
+#[test]
+fn bitor_assign() {
+ let mut a = A(6);
+ a |= B(4);
+ assert_eq!(A(6 | 4), a);
+}
+
+overload!((a: &mut A) ^= (b: B) { a.0 ^= b.0; });
+#[test]
+fn bitxor_assign() {
+ let mut a = A(6);
+ a ^= B(4);
+ assert_eq!(A(6 ^ 4), a);
+}
+
+overload!((a: &mut A) <<= (b: B) { a.0 <<= b.0; });
+#[test]
+fn shl_assign() {
+ let mut a = A(6);
+ a <<= B(4);
+ assert_eq!(A(6 << 4), a);
+}
+
+overload!((a: &mut A) >>= (b: B) { a.0 >>= b.0; });
+#[test]
+fn shr_assign() {
+ let mut a = A(6);
+ a >>= B(4);
+ assert_eq!(A(6 >> 4), a);
+}
diff --git a/vendor/overload/tests/binary.rs b/vendor/overload/tests/binary.rs
new file mode 100644
index 000000000..2fb209ad3
--- /dev/null
+++ b/vendor/overload/tests/binary.rs
@@ -0,0 +1,72 @@
+extern crate overload;
+use overload::overload;
+use std::ops;
+
+#[derive(PartialEq, Debug)]
+struct A(i32);
+
+#[derive(PartialEq, Debug)]
+struct B(i32);
+
+#[derive(PartialEq, Debug)]
+struct C(i32);
+
+overload!((a: A) + (b: B) -> C { C(a.0 + b.0) });
+#[test]
+fn add() {
+ assert_eq!(A(3) + B(4), C(3 + 4));
+}
+
+overload!((a: A) - (b: B) -> C { C(a.0 - b.0) });
+#[test]
+fn sub() {
+ assert_eq!(A(3) - B(4), C(3 - 4));
+}
+
+overload!((a: A) * (b: B) -> C { C(a.0 * b.0) });
+#[test]
+fn mul() {
+ assert_eq!(A(3) * B(4), C(3 * 4));
+}
+
+overload!((a: A) / (b: B) -> C { C(a.0 / b.0) });
+#[test]
+fn div() {
+ assert_eq!(A(6) / B(3), C(6 / 3));
+}
+
+overload!((a: A) % (b: B) -> C { C(a.0 % b.0) });
+#[test]
+fn rem() {
+ assert_eq!(A(6) % B(4), C(6 % 4));
+}
+
+overload!((a: A) & (b: B) -> C { C(a.0 & b.0) });
+#[test]
+fn bitand() {
+ assert_eq!(A(6) & B(4), C(6 & 4));
+}
+
+overload!((a: A) | (b: B) -> C { C(a.0 | b.0) });
+#[test]
+fn bitor() {
+ assert_eq!(A(6) | B(4), C(6 | 4));
+}
+
+overload!((a: A) ^ (b: B) -> C { C(a.0 ^ b.0) });
+#[test]
+fn bitxor() {
+ assert_eq!(A(6) ^ B(4), C(6 ^ 4));
+}
+
+overload!((a: A) << (b: B) -> C { C(a.0 << b.0) });
+#[test]
+fn shl() {
+ assert_eq!(A(6) << B(4), C(6 << 4));
+}
+
+overload!((a: A) >> (b: B) -> C { C(a.0 >> b.0) });
+#[test]
+fn shr() {
+ assert_eq!(A(6) >> B(4), C(6 >> 4));
+}
diff --git a/vendor/overload/tests/unary.rs b/vendor/overload/tests/unary.rs
new file mode 100644
index 000000000..76a3925fe
--- /dev/null
+++ b/vendor/overload/tests/unary.rs
@@ -0,0 +1,21 @@
+extern crate overload;
+use overload::overload;
+use std::ops;
+
+#[derive(PartialEq, Debug)]
+struct A(i32);
+
+#[derive(PartialEq, Debug)]
+struct B(i32);
+
+overload!(- (a: A) -> B { B(-a.0) });
+#[test]
+fn neg() {
+ assert_eq!(-A(3), B(-3));
+}
+
+overload!(! (a: A) -> B { B(!a.0) });
+#[test]
+fn not() {
+ assert_eq!(!A(3), B(!3));
+}
diff --git a/vendor/parking_lot_core-0.8.5/.cargo-checksum.json b/vendor/parking_lot_core-0.8.6/.cargo-checksum.json
index 8eb5c0c8d..0306c5e78 100644
--- a/vendor/parking_lot_core-0.8.5/.cargo-checksum.json
+++ b/vendor/parking_lot_core-0.8.6/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"fdba14f9aee05b55ba008685e1d3a9050d14a56251608b23cc0970caa6b9fae4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","build.rs":"29e629057144d1238dcd8ea70ad6cbb6ec14ca742797af3fa9335710ff5cbaaa","src/lib.rs":"7baf09034aafc28f7dbb1550cdde89221e4eb5dfda51b55aeb652ee8710c715d","src/parking_lot.rs":"58125667bd78399e8753b6bd8acef84f180f369f0bc174c573887176bab9f9d3","src/spinwait.rs":"d568d8a81f9144ec4c4a139dc934d7d04ee1656a4a221eb548742fe7aba09ab1","src/thread_parker/generic.rs":"574aecb3c325012b683eca4135441ec73f44c33cc9955aa05db24d7e4c991cd7","src/thread_parker/linux.rs":"4a2c76b3dc09301ceb73d904460f49d91bc1a2492cc123ee26ca22ece3faae79","src/thread_parker/mod.rs":"9c675b7690bbde62e88d946fad218623d423edccff4e01e8e52b116d815c695c","src/thread_parker/redox.rs":"91ca107c4edffa57e87294cadec3b6010b584fb272c044e2557c925dbcb90f6a","src/thread_parker/sgx.rs":"898ced116fb7b0ac077b5977b5bcac610f1d55beffb613ec73e083b1ef09cc28","src/thread_parker/unix.rs":"02a17ad1241e8547cc7ee096318757b01d328354b639d0f799fca66ff2f07439","src/thread_parker/wasm.rs":"903b7eec240cdbe8a23467f6b41d042d93b35755bd1763be02f9cc55756c4aec","src/thread_parker/wasm_atomic.rs":"cf761157803256b18205e747bc99e30b18d5410c27121fa9595e12cb51bb6bef","src/thread_parker/windows/keyed_event.rs":"fc1cf4e592d814c4c949217d91317ec2afb6048430abebb3cea2e8487b369734","src/thread_parker/windows/mod.rs":"c99a3871e69800452a56928a9e870530b7f48a563a4d3efe6184103147899f0c","src/thread_parker/windows/waitaddress.rs":"8e037df2a5692905e2bc2d4ea955295ab92bcc7e26eea0bb7a4eaac9ce657321","src/util.rs":"285e6133150645525f2ca1ece41f6d35bad4e7c5e08b42b20c99d2a97e04a974","src/word_lock.rs":"9ba49082359c35ad5b4e8d219fede1ffca75225f9ccb971cbba01f20e2ed2738"},"package":"d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"} \ No newline at end of file
+{"files":{"Cargo.toml":"eb7f5eba6c03746da0585ae4abdc3ce74103ac9b9fc6f0d1db02fe5bf786e810","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","build.rs":"29e629057144d1238dcd8ea70ad6cbb6ec14ca742797af3fa9335710ff5cbaaa","src/lib.rs":"7baf09034aafc28f7dbb1550cdde89221e4eb5dfda51b55aeb652ee8710c715d","src/parking_lot.rs":"58125667bd78399e8753b6bd8acef84f180f369f0bc174c573887176bab9f9d3","src/spinwait.rs":"d568d8a81f9144ec4c4a139dc934d7d04ee1656a4a221eb548742fe7aba09ab1","src/thread_parker/generic.rs":"574aecb3c325012b683eca4135441ec73f44c33cc9955aa05db24d7e4c991cd7","src/thread_parker/linux.rs":"98ee0cc037e2eea6bd92a5460ce8dad62d4c25dae209dfc638e69dba71e29c85","src/thread_parker/mod.rs":"9c675b7690bbde62e88d946fad218623d423edccff4e01e8e52b116d815c695c","src/thread_parker/redox.rs":"91ca107c4edffa57e87294cadec3b6010b584fb272c044e2557c925dbcb90f6a","src/thread_parker/sgx.rs":"898ced116fb7b0ac077b5977b5bcac610f1d55beffb613ec73e083b1ef09cc28","src/thread_parker/unix.rs":"02a17ad1241e8547cc7ee096318757b01d328354b639d0f799fca66ff2f07439","src/thread_parker/wasm.rs":"903b7eec240cdbe8a23467f6b41d042d93b35755bd1763be02f9cc55756c4aec","src/thread_parker/wasm_atomic.rs":"cf761157803256b18205e747bc99e30b18d5410c27121fa9595e12cb51bb6bef","src/thread_parker/windows/keyed_event.rs":"fc1cf4e592d814c4c949217d91317ec2afb6048430abebb3cea2e8487b369734","src/thread_parker/windows/mod.rs":"c99a3871e69800452a56928a9e870530b7f48a563a4d3efe6184103147899f0c","src/thread_parker/windows/waitaddress.rs":"8e037df2a5692905e2bc2d4ea955295ab92bcc7e26eea0bb7a4eaac9ce657321","src/util.rs":"285e6133150645525f2ca1ece41f6d35bad4e7c5e08b42b20c99d2a97e04a974","src/word_lock.rs":"9ba49082359c35ad5b4e8d219fede1ffca75225f9ccb971cbba01f20e2ed2738"},"package":"60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"} \ No newline at end of file
diff --git a/vendor/parking_lot_core-0.8.5/Cargo.toml b/vendor/parking_lot_core-0.8.6/Cargo.toml
index 7afeb4114..698151644 100644
--- a/vendor/parking_lot_core-0.8.5/Cargo.toml
+++ b/vendor/parking_lot_core-0.8.6/Cargo.toml
@@ -12,13 +12,20 @@
[package]
edition = "2018"
name = "parking_lot_core"
-version = "0.8.5"
+version = "0.8.6"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "An advanced API for creating custom synchronization primitives."
-keywords = ["mutex", "condvar", "rwlock", "once", "thread"]
+keywords = [
+ "mutex",
+ "condvar",
+ "rwlock",
+ "once",
+ "thread",
+]
categories = ["concurrency"]
license = "Apache-2.0/MIT"
repository = "https://github.com/Amanieu/parking_lot"
+
[dependencies.backtrace]
version = "0.3.60"
optional = true
@@ -41,12 +48,27 @@ version = "4.0.0"
optional = true
[features]
-deadlock_detection = ["petgraph", "thread-id", "backtrace"]
+deadlock_detection = [
+ "petgraph",
+ "thread-id",
+ "backtrace",
+]
nightly = []
+
[target."cfg(target_os = \"redox\")".dependencies.redox_syscall]
version = "0.2.8"
+
[target."cfg(unix)".dependencies.libc]
version = "0.2.95"
+
[target."cfg(windows)".dependencies.winapi]
version = "0.3.9"
-features = ["winnt", "ntstatus", "minwindef", "winerror", "winbase", "errhandlingapi", "handleapi"]
+features = [
+ "winnt",
+ "ntstatus",
+ "minwindef",
+ "winerror",
+ "winbase",
+ "errhandlingapi",
+ "handleapi",
+]
diff --git a/vendor/parking_lot_core-0.8.6/LICENSE-APACHE b/vendor/parking_lot_core-0.8.6/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/vendor/parking_lot_core-0.8.6/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/parking_lot_core-0.8.5/LICENSE-MIT b/vendor/parking_lot_core-0.8.6/LICENSE-MIT
index 40b8817a4..40b8817a4 100644
--- a/vendor/parking_lot_core-0.8.5/LICENSE-MIT
+++ b/vendor/parking_lot_core-0.8.6/LICENSE-MIT
diff --git a/vendor/parking_lot_core-0.8.5/build.rs b/vendor/parking_lot_core-0.8.6/build.rs
index d29c769a8..d29c769a8 100644
--- a/vendor/parking_lot_core-0.8.5/build.rs
+++ b/vendor/parking_lot_core-0.8.6/build.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/lib.rs b/vendor/parking_lot_core-0.8.6/src/lib.rs
index 27087f476..27087f476 100644
--- a/vendor/parking_lot_core-0.8.5/src/lib.rs
+++ b/vendor/parking_lot_core-0.8.6/src/lib.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/parking_lot.rs b/vendor/parking_lot_core-0.8.6/src/parking_lot.rs
index 519ce9e34..519ce9e34 100644
--- a/vendor/parking_lot_core-0.8.5/src/parking_lot.rs
+++ b/vendor/parking_lot_core-0.8.6/src/parking_lot.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/spinwait.rs b/vendor/parking_lot_core-0.8.6/src/spinwait.rs
index ad0327a3a..ad0327a3a 100644
--- a/vendor/parking_lot_core-0.8.5/src/spinwait.rs
+++ b/vendor/parking_lot_core-0.8.6/src/spinwait.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/generic.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/generic.rs
index 5236e14ab..5236e14ab 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/generic.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/generic.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/linux.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/linux.rs
index 766e63b38..b22d0149f 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/linux.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/linux.rs
@@ -80,10 +80,10 @@ impl super::ThreadParkerT for ThreadParker {
self.park();
return true;
}
- let ts = libc::timespec {
- tv_sec: diff.as_secs() as libc::time_t,
- tv_nsec: diff.subsec_nanos() as tv_nsec_t,
- };
+ // SAFETY: libc::timespec is zero initializable.
+ let mut ts: libc::timespec = std::mem::zeroed();
+ ts.tv_sec = diff.as_secs() as libc::time_t;
+ ts.tv_nsec = diff.subsec_nanos() as tv_nsec_t;
self.futex_wait(Some(ts));
}
true
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/mod.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/mod.rs
index a7e4bb69c..a7e4bb69c 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/mod.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/mod.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/redox.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/redox.rs
index cac06bcfe..cac06bcfe 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/redox.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/redox.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/sgx.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/sgx.rs
index 341efe2ba..341efe2ba 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/sgx.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/sgx.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/unix.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/unix.rs
index c2381e6df..c2381e6df 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/unix.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/unix.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/wasm.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/wasm.rs
index ba4118c01..ba4118c01 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/wasm.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/wasm.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/wasm_atomic.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/wasm_atomic.rs
index 2128e93cb..2128e93cb 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/wasm_atomic.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/wasm_atomic.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/keyed_event.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/keyed_event.rs
index 7c371537c..7c371537c 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/keyed_event.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/keyed_event.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/mod.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/mod.rs
index 76dbb5d49..76dbb5d49 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/mod.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/mod.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/waitaddress.rs b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/waitaddress.rs
index 862c5c652..862c5c652 100644
--- a/vendor/parking_lot_core-0.8.5/src/thread_parker/windows/waitaddress.rs
+++ b/vendor/parking_lot_core-0.8.6/src/thread_parker/windows/waitaddress.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/util.rs b/vendor/parking_lot_core-0.8.6/src/util.rs
index d7aaa8715..d7aaa8715 100644
--- a/vendor/parking_lot_core-0.8.5/src/util.rs
+++ b/vendor/parking_lot_core-0.8.6/src/util.rs
diff --git a/vendor/parking_lot_core-0.8.5/src/word_lock.rs b/vendor/parking_lot_core-0.8.6/src/word_lock.rs
index 1109401ae..1109401ae 100644
--- a/vendor/parking_lot_core-0.8.5/src/word_lock.rs
+++ b/vendor/parking_lot_core-0.8.6/src/word_lock.rs
diff --git a/vendor/parking_lot_core/.cargo-checksum.json b/vendor/parking_lot_core/.cargo-checksum.json
index 196164748..f1b0944e2 100644
--- a/vendor/parking_lot_core/.cargo-checksum.json
+++ b/vendor/parking_lot_core/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"894a414fd99c9650a87b880fbc3b14e35e7bb33eb9c318404ec260a7a9b34fb9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","build.rs":"29e629057144d1238dcd8ea70ad6cbb6ec14ca742797af3fa9335710ff5cbaaa","src/lib.rs":"def53c1ff1d4a05e6261aa9b75ac089a51ecc15642e1320202e5c6836e2bb732","src/parking_lot.rs":"b08d6c00de6fc342a1b4f0268b056789ad1300aa8a9c4fb7ad4d49514cfab72c","src/spinwait.rs":"0d73980c72e84fd75c73693f0b351e097353b46f4b8aaa67b3dde7c721817bf7","src/thread_parker/generic.rs":"414bd3114b40f7665efe59fa4c5033d2401eafc58b57a9ba5803949d26cc0454","src/thread_parker/linux.rs":"6837304ca62e1774b0d811f9a61f4a7a70b8914a905b69aa4808220fb9063879","src/thread_parker/mod.rs":"afaf652e242d5235d38f5749f3b12dc6def793dee40cd6c820e1dd45a70a5ac8","src/thread_parker/redox.rs":"38fbede41817b6606a5612ee9456940eaf627540a4aa39de8452e355ca1df306","src/thread_parker/sgx.rs":"3b6190eb90fd88d5eee5370213a1de408cce24ded04bfb0f374c3bbf10752187","src/thread_parker/unix.rs":"4a314ea3b0b2a22cd3857bedcd1d15d843365acbc085f0cdc9ae085d600760da","src/thread_parker/wasm.rs":"86e954d009d57d2dfdbca71e5c0f116a99a182af83b7e51ed5b14fc55b2f7b01","src/thread_parker/wasm_atomic.rs":"6fbc09accc9df9d42444cec6f55d3eed2c7078a474485fa269a9f503da2ee1c5","src/thread_parker/windows/keyed_event.rs":"758b475e6931d5cb8715786e8ef3b4e6b0d017fb163ea5d8cc8b6b728454238b","src/thread_parker/windows/mod.rs":"3073e82ecc46d713b2c4296459ead9f3788d29e7b5c9be4b68d29d98b5a803ce","src/thread_parker/windows/waitaddress.rs":"8eee4b444a00e7ad83bdc50eb4b4af36610dfa4690329e8947962fef04cb1000","src/util.rs":"285e6133150645525f2ca1ece41f6d35bad4e7c5e08b42b20c99d2a97e04a974","src/word_lock.rs":"9ba49082359c35ad5b4e8d219fede1ffca75225f9ccb971cbba01f20e2ed2738"},"package":"4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"} \ No newline at end of file
+{"files":{"Cargo.toml":"fdae8721cb66a7a3544ae5e2de3784c44c6f00a898c436132524693aba7aa22e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","build.rs":"29e629057144d1238dcd8ea70ad6cbb6ec14ca742797af3fa9335710ff5cbaaa","src/lib.rs":"def53c1ff1d4a05e6261aa9b75ac089a51ecc15642e1320202e5c6836e2bb732","src/parking_lot.rs":"b08d6c00de6fc342a1b4f0268b056789ad1300aa8a9c4fb7ad4d49514cfab72c","src/spinwait.rs":"0d73980c72e84fd75c73693f0b351e097353b46f4b8aaa67b3dde7c721817bf7","src/thread_parker/generic.rs":"414bd3114b40f7665efe59fa4c5033d2401eafc58b57a9ba5803949d26cc0454","src/thread_parker/linux.rs":"70bfe6c168fc431965374d921e33b3e04536cb68fde3e807a837aa38a65772b0","src/thread_parker/mod.rs":"afaf652e242d5235d38f5749f3b12dc6def793dee40cd6c820e1dd45a70a5ac8","src/thread_parker/redox.rs":"38fbede41817b6606a5612ee9456940eaf627540a4aa39de8452e355ca1df306","src/thread_parker/sgx.rs":"3b6190eb90fd88d5eee5370213a1de408cce24ded04bfb0f374c3bbf10752187","src/thread_parker/unix.rs":"f1425cf5876d1f9be98979343fb3bcb2a25982bcff80e7399c86ce835d450143","src/thread_parker/wasm.rs":"86e954d009d57d2dfdbca71e5c0f116a99a182af83b7e51ed5b14fc55b2f7b01","src/thread_parker/wasm_atomic.rs":"6fbc09accc9df9d42444cec6f55d3eed2c7078a474485fa269a9f503da2ee1c5","src/thread_parker/windows/keyed_event.rs":"758b475e6931d5cb8715786e8ef3b4e6b0d017fb163ea5d8cc8b6b728454238b","src/thread_parker/windows/mod.rs":"3073e82ecc46d713b2c4296459ead9f3788d29e7b5c9be4b68d29d98b5a803ce","src/thread_parker/windows/waitaddress.rs":"8eee4b444a00e7ad83bdc50eb4b4af36610dfa4690329e8947962fef04cb1000","src/util.rs":"285e6133150645525f2ca1ece41f6d35bad4e7c5e08b42b20c99d2a97e04a974","src/word_lock.rs":"9ba49082359c35ad5b4e8d219fede1ffca75225f9ccb971cbba01f20e2ed2738"},"package":"ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"} \ No newline at end of file
diff --git a/vendor/parking_lot_core/Cargo.toml b/vendor/parking_lot_core/Cargo.toml
index f4daf6f02..47529ad8d 100644
--- a/vendor/parking_lot_core/Cargo.toml
+++ b/vendor/parking_lot_core/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "parking_lot_core"
-version = "0.9.4"
+version = "0.9.6"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "An advanced API for creating custom synchronization primitives."
keywords = [
diff --git a/vendor/parking_lot_core/src/thread_parker/linux.rs b/vendor/parking_lot_core/src/thread_parker/linux.rs
index 5d4e229ad..92601f62a 100644
--- a/vendor/parking_lot_core/src/thread_parker/linux.rs
+++ b/vendor/parking_lot_core/src/thread_parker/linux.rs
@@ -80,10 +80,10 @@ impl super::ThreadParkerT for ThreadParker {
self.park();
return true;
}
- let ts = libc::timespec {
- tv_sec: diff.as_secs() as libc::time_t,
- tv_nsec: diff.subsec_nanos() as tv_nsec_t,
- };
+ // SAFETY: libc::timespec is zero initializable.
+ let mut ts: libc::timespec = std::mem::zeroed();
+ ts.tv_sec = diff.as_secs() as libc::time_t;
+ ts.tv_nsec = diff.subsec_nanos() as tv_nsec_t;
self.futex_wait(Some(ts));
}
true
diff --git a/vendor/parking_lot_core/src/thread_parker/unix.rs b/vendor/parking_lot_core/src/thread_parker/unix.rs
index 88b6df839..7f2860372 100644
--- a/vendor/parking_lot_core/src/thread_parker/unix.rs
+++ b/vendor/parking_lot_core/src/thread_parker/unix.rs
@@ -5,7 +5,7 @@
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
-#[cfg(any(target_os = "macos", target_os = "ios"))]
+#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos"))]
use core::ptr;
use core::{
cell::{Cell, UnsafeCell},
@@ -130,6 +130,7 @@ impl ThreadParker {
#[cfg(any(
target_os = "macos",
target_os = "ios",
+ target_os = "watchos",
target_os = "android",
target_os = "espidf"
))]
@@ -140,6 +141,7 @@ impl ThreadParker {
#[cfg(not(any(
target_os = "macos",
target_os = "ios",
+ target_os = "watchos",
target_os = "android",
target_os = "espidf"
)))]
@@ -193,7 +195,7 @@ impl super::UnparkHandleT for UnparkHandle {
}
// Returns the current time on the clock used by pthread_cond_t as a timespec.
-#[cfg(any(target_os = "macos", target_os = "ios"))]
+#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos"))]
#[inline]
fn timespec_now() -> libc::timespec {
let mut now = MaybeUninit::<libc::timeval>::uninit();
@@ -206,7 +208,7 @@ fn timespec_now() -> libc::timespec {
tv_nsec: now.tv_usec as tv_nsec_t * 1000,
}
}
-#[cfg(not(any(target_os = "macos", target_os = "ios")))]
+#[cfg(not(any(target_os = "macos", target_os = "ios", target_os = "watchos")))]
#[inline]
fn timespec_now() -> libc::timespec {
let mut now = MaybeUninit::<libc::timespec>::uninit();
diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json
index b20fcaba3..de2b0c753 100644
--- a/vendor/proc-macro2/.cargo-checksum.json
+++ b/vendor/proc-macro2/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"070b0704e5cdbac330b9cecee44e488a40b6daf6161215e4457bdfc3e7e9bf94","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"0c17148c1957c3f721d99fc99aedaefee5f2f1ba7e2336a289b02f91609099fb","build.rs":"275f7a9ee0b9eff972124951de544ae17ee3e698a4e89b0f0393b334344f5e30","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/fallback.rs":"a9e6fa159d6a111a231fa9367d54859103e9d49f6662397baea951b5f3e7e983","src/lib.rs":"81865a868ef697987cafef5eb9512d3109da373456eead2a36c22d44e769c947","src/marker.rs":"344a8394f06a1d43355b514920e7e3c0c6dce507be767e3a590bbe3552edd110","src/parse.rs":"637a9fe6e3e21c36fa411b70674f617743fe0129787c17a559e78f86418d0da4","src/rcvec.rs":"49b6784c6ca5f32573cd8a83758b485d8acbfa126e5fb516ae439e429ef4c144","src/wrapper.rs":"8ea825cdac628570719a258419fcffd1c9d2ca1ca5e2fbbbf283dd9cc6695910","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"cb6d776eba6a238d726b0f531883adf41957e06f2717ee8a069821c81e7081d6","tests/test.rs":"d7f21088314d1df25447fdc0a32feffae26d4d637e3ce68e23c0190060cb5652","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0"},"package":"5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"} \ No newline at end of file
+{"files":{"Cargo.toml":"a4f134ba553220d2d6a3ae778423be3e0223ad8b301a64c0e00ec97c65388de2","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"32cbd395594db59ecc43d7866cfa2663f3687bb7df631781d60ae83200dae8a8","build.rs":"275f7a9ee0b9eff972124951de544ae17ee3e698a4e89b0f0393b334344f5e30","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/fallback.rs":"7b581d52bea33e78542c230afb6ae5212b322f6a584244a63ddc28ed32939a12","src/lib.rs":"4a7358479655f388f11f2255783b97ee4caa9d63a59be6f60e50760a7d836b2c","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"344a8394f06a1d43355b514920e7e3c0c6dce507be767e3a590bbe3552edd110","src/parse.rs":"637a9fe6e3e21c36fa411b70674f617743fe0129787c17a559e78f86418d0da4","src/rcvec.rs":"49b6784c6ca5f32573cd8a83758b485d8acbfa126e5fb516ae439e429ef4c144","src/wrapper.rs":"75fd4c805da3d384fb957f6ac76ec33398d45b121e49a3a5e5403301062c6619","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"cb6d776eba6a238d726b0f531883adf41957e06f2717ee8a069821c81e7081d6","tests/test.rs":"d7f21088314d1df25447fdc0a32feffae26d4d637e3ce68e23c0190060cb5652","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0"},"package":"6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"} \ No newline at end of file
diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml
index 1bda7e36d..7fef345e4 100644
--- a/vendor/proc-macro2/Cargo.toml
+++ b/vendor/proc-macro2/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "proc-macro2"
-version = "1.0.47"
+version = "1.0.50"
authors = [
"David Tolnay <dtolnay@gmail.com>",
"Alex Crichton <alex@alexcrichton.com>",
@@ -46,6 +46,9 @@ targets = ["x86_64-unknown-linux-gnu"]
[package.metadata.playground]
features = ["span-locations"]
+[lib]
+doc-scrape-examples = false
+
[dependencies.unicode-ident]
version = "1.0"
diff --git a/vendor/proc-macro2/LICENSE-APACHE b/vendor/proc-macro2/LICENSE-APACHE
index 16fe87b06..1b5ec8b78 100644
--- a/vendor/proc-macro2/LICENSE-APACHE
+++ b/vendor/proc-macro2/LICENSE-APACHE
@@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/vendor/proc-macro2/LICENSE-MIT b/vendor/proc-macro2/LICENSE-MIT
index 39e0ed660..31aa79387 100644
--- a/vendor/proc-macro2/LICENSE-MIT
+++ b/vendor/proc-macro2/LICENSE-MIT
@@ -1,5 +1,3 @@
-Copyright (c) 2014 Alex Crichton
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
diff --git a/vendor/proc-macro2/README.md b/vendor/proc-macro2/README.md
index 70b6c869e..131ba5130 100644
--- a/vendor/proc-macro2/README.md
+++ b/vendor/proc-macro2/README.md
@@ -3,7 +3,7 @@
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/proc-macro2)
[<img alt="crates.io" src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/proc-macro2)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/proc-macro2)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/proc-macro2/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
This library serves two purposes:
diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs
index fe4f248d3..5a4c350d5 100644
--- a/vendor/proc-macro2/src/fallback.rs
+++ b/vendor/proc-macro2/src/fallback.rs
@@ -1,3 +1,5 @@
+#[cfg(span_locations)]
+use crate::location::LineColumn;
use crate::parse::{self, Cursor};
use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut};
use crate::{Delimiter, Spacing, TokenTree};
@@ -332,12 +334,6 @@ impl Debug for SourceFile {
}
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub(crate) struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
#[cfg(span_locations)]
thread_local! {
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs
index 3fda02d5c..633333ba8 100644
--- a/vendor/proc-macro2/src/lib.rs
+++ b/vendor/proc-macro2/src/lib.rs
@@ -86,7 +86,7 @@
//! a different thread.
// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.47")]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.50")]
#![cfg_attr(
any(proc_macro_span, super_unstable),
feature(proc_macro_span, proc_macro_span_shrink)
@@ -139,6 +139,9 @@ use crate::fallback as imp;
#[cfg(wrap_proc_macro)]
mod imp;
+#[cfg(span_locations)]
+mod location;
+
use crate::marker::Marker;
use core::cmp::Ordering;
use core::fmt::{self, Debug, Display};
@@ -150,6 +153,9 @@ use std::error::Error;
#[cfg(procmacro2_semver_exempt)]
use std::path::PathBuf;
+#[cfg(span_locations)]
+pub use crate::location::LineColumn;
+
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
/// This type provides interfaces for iterating over token trees and for
@@ -356,37 +362,6 @@ impl Debug for SourceFile {
}
}
-/// A line-column pair representing the start or end of a `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg(span_locations)]
-#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct LineColumn {
- /// The 1-indexed line in the source file on which the span starts or ends
- /// (inclusive).
- pub line: usize,
- /// The 0-indexed column (in UTF-8 characters) in the source file on which
- /// the span starts or ends (inclusive).
- pub column: usize,
-}
-
-#[cfg(span_locations)]
-impl Ord for LineColumn {
- fn cmp(&self, other: &Self) -> Ordering {
- self.line
- .cmp(&other.line)
- .then(self.column.cmp(&other.column))
- }
-}
-
-#[cfg(span_locations)]
-impl PartialOrd for LineColumn {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
/// A region of source code, along with macro expansion information.
#[derive(Copy, Clone)]
pub struct Span {
@@ -492,8 +467,7 @@ impl Span {
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn start(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.start();
- LineColumn { line, column }
+ self.inner.start()
}
/// Get the ending line/column in the source file for this span.
@@ -508,8 +482,7 @@ impl Span {
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn end(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.end();
- LineColumn { line, column }
+ self.inner.end()
}
/// Creates an empty span pointing to directly before this span.
diff --git a/vendor/proc-macro2/src/location.rs b/vendor/proc-macro2/src/location.rs
new file mode 100644
index 000000000..463026c27
--- /dev/null
+++ b/vendor/proc-macro2/src/location.rs
@@ -0,0 +1,29 @@
+use core::cmp::Ordering;
+
+/// A line-column pair representing the start or end of a `Span`.
+///
+/// This type is semver exempt and not exposed by default.
+#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends
+ /// (inclusive).
+ pub line: usize,
+ /// The 0-indexed column (in UTF-8 characters) in the source file on which
+ /// the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line
+ .cmp(&other.line)
+ .then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs
index 47d149473..bc800d56d 100644
--- a/vendor/proc-macro2/src/wrapper.rs
+++ b/vendor/proc-macro2/src/wrapper.rs
@@ -1,4 +1,6 @@
use crate::detection::inside_proc_macro;
+#[cfg(span_locations)]
+use crate::location::LineColumn;
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
use core::fmt::{self, Debug, Display};
use core::iter::FromIterator;
@@ -389,12 +391,6 @@ impl Debug for SourceFile {
}
}
-#[cfg(any(super_unstable, feature = "span-locations"))]
-pub(crate) struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
#[derive(Copy, Clone)]
pub(crate) enum Span {
Compiler(proc_macro::Span),
@@ -471,7 +467,7 @@ impl Span {
}
}
- #[cfg(any(super_unstable, feature = "span-locations"))]
+ #[cfg(span_locations)]
pub fn start(&self) -> LineColumn {
match self {
#[cfg(proc_macro_span)]
@@ -481,14 +477,11 @@ impl Span {
}
#[cfg(not(proc_macro_span))]
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.start();
- LineColumn { line, column }
- }
+ Span::Fallback(s) => s.start(),
}
}
- #[cfg(any(super_unstable, feature = "span-locations"))]
+ #[cfg(span_locations)]
pub fn end(&self) -> LineColumn {
match self {
#[cfg(proc_macro_span)]
@@ -498,10 +491,7 @@ impl Span {
}
#[cfg(not(proc_macro_span))]
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.end();
- LineColumn { line, column }
- }
+ Span::Fallback(s) => s.end(),
}
}
diff --git a/vendor/quote/.cargo-checksum.json b/vendor/quote/.cargo-checksum.json
index dd217b081..990f0d7ad 100644
--- a/vendor/quote/.cargo-checksum.json
+++ b/vendor/quote/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"f4cf791ed3ccb9a3d5840f63af9c8d6b60453d9cd2451bf71c98f413e639b5ac","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"1aed9e312142718f8e1a37a0a982d8e45398dee835776b0ffa54639efdd75dfb","build.rs":"3733c86ae2733629f873f93c2f45da30164beee8de9ee0833099fac6a05a3e6b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"66788c5f57681547d936a9bcf51873b658630c76b2e690df4b3158edf573384a","src/lib.rs":"17db85f0808d31a597ea7f56744b031b5bdba548d9474efd874cacfe6541ff60","src/runtime.rs":"79bbb2fe5b18bc3ec9f8f8143bd120b45680a3027c89f37b0a6a6b97bdaadb21","src/spanned.rs":"43ff919f1d2d27dff6b2db409539b1c697e913eb8c3131cf5de45a845752b7b5","src/to_tokens.rs":"99bb6f467289c32af6c1f7af0d45cc6ac7b31e2436774e616770152a49e6ac0f","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"29221bc5eb9497a2fcda1b44e9535c645f7c7ca84ffc32e6040e57d94dbda2c8","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"be67a6c99eed689aa08b46afd0ab3ed4e71fde42e5efed41ab05741710f42fe5","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ed05bc229abf5a267ea3d194336a3a845a061bd10c1be7020b9351f81e737946","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"873f4db0ec63606d64d46790f3ee24bdb4dd04379b8e57dc5ac1114cc3775fb3","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"0e3de2635a79cce9226113fa8cb6bdbdc0ffcd487d7537d4dd0dc8222adf4a8a","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-quotable.stderr":"d55354e6a963b5eedce9bc68a1b04bf5b82f966c3c1bb987d1690e74c0c3d69c","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/not-repeatable.stderr":"27149da38cc074953f124e995d76e569e0f718a5431c321cb5d639b0065520b3","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"c986de5cb858272636c9e36ae5f57e5ee13589d4f1a73a050b21824010314f8d"},"package":"bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"} \ No newline at end of file
+{"files":{"Cargo.toml":"b31678b5e9696b0320493f7120e873490183308fc5afb052dc23a265048b8e16","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"66f3cf08338e47618fd23d810355b075da573815d9c1e158a7f7ab140decc16d","build.rs":"3733c86ae2733629f873f93c2f45da30164beee8de9ee0833099fac6a05a3e6b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"66788c5f57681547d936a9bcf51873b658630c76b2e690df4b3158edf573384a","src/lib.rs":"5f0dac39c736d01c698745909c93efb7e701aed4493c488a32239a7efd7d4469","src/runtime.rs":"79bbb2fe5b18bc3ec9f8f8143bd120b45680a3027c89f37b0a6a6b97bdaadb21","src/spanned.rs":"43ff919f1d2d27dff6b2db409539b1c697e913eb8c3131cf5de45a845752b7b5","src/to_tokens.rs":"99bb6f467289c32af6c1f7af0d45cc6ac7b31e2436774e616770152a49e6ac0f","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"c4967a33fcf7c2effd1979bcb4c03ae797359eeab92c627ab4b609cd8678ff78","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"be67a6c99eed689aa08b46afd0ab3ed4e71fde42e5efed41ab05741710f42fe5","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ed05bc229abf5a267ea3d194336a3a845a061bd10c1be7020b9351f81e737946","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"873f4db0ec63606d64d46790f3ee24bdb4dd04379b8e57dc5ac1114cc3775fb3","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"0e3de2635a79cce9226113fa8cb6bdbdc0ffcd487d7537d4dd0dc8222adf4a8a","tests/ui/not-quotable.rs":"d630ed8e5fe16f125015999d068569cc3fe5dc1033a56e622690ec2c080c13f4","tests/ui/not-quotable.stderr":"4b81ec7bb82ba197ede6d47c1b6e5cacc0999cc8c9e2fa77a46db5e29397153c","tests/ui/not-repeatable.rs":"dbfedcad67b57543aa7d6684b6549db90fbdb74ffebcae42323d31eb88e59c87","tests/ui/not-repeatable.stderr":"a578a6293fef33c54f8e8114bf72a933a1315b45e866e4bcef1e31ce2ce55dcd","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"c986de5cb858272636c9e36ae5f57e5ee13589d4f1a73a050b21824010314f8d"},"package":"8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"} \ No newline at end of file
diff --git a/vendor/quote/Cargo.toml b/vendor/quote/Cargo.toml
index b2abe6612..4d6b32670 100644
--- a/vendor/quote/Cargo.toml
+++ b/vendor/quote/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
authors = ["David Tolnay <dtolnay@gmail.com>"]
autobenches = false
description = "Quasi-quoting macro quote!(...)"
@@ -30,6 +30,9 @@ repository = "https://github.com/dtolnay/quote"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
+[lib]
+doc-scrape-examples = false
+
[dependencies.proc-macro2]
version = "1.0.40"
default-features = false
@@ -38,7 +41,7 @@ default-features = false
version = "1.0"
[dev-dependencies.trybuild]
-version = "1.0.52"
+version = "1.0.66"
features = ["diff"]
[features]
diff --git a/vendor/quote/LICENSE-MIT b/vendor/quote/LICENSE-MIT
index 40b8817a4..31aa79387 100644
--- a/vendor/quote/LICENSE-MIT
+++ b/vendor/quote/LICENSE-MIT
@@ -1,5 +1,3 @@
-Copyright (c) 2016 The Rust Project Developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
diff --git a/vendor/quote/README.md b/vendor/quote/README.md
index 2c21a50d2..74e99ce42 100644
--- a/vendor/quote/README.md
+++ b/vendor/quote/README.md
@@ -4,7 +4,7 @@ Rust Quasi-Quoting
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/quote-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/quote)
[<img alt="crates.io" src="https://img.shields.io/crates/v/quote.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/quote)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-quote-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/quote)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/quote/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/quote/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/quote/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/quote/actions?query=branch%3Amaster)
This crate provides the [`quote!`] macro for turning Rust syntax tree data
structures into tokens of source code.
diff --git a/vendor/quote/src/lib.rs b/vendor/quote/src/lib.rs
index 35594827f..adc14c573 100644
--- a/vendor/quote/src/lib.rs
+++ b/vendor/quote/src/lib.rs
@@ -81,7 +81,7 @@
//! ```
// Quote types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/quote/1.0.21")]
+#![doc(html_root_url = "https://docs.rs/quote/1.0.23")]
#![allow(
clippy::doc_markdown,
clippy::missing_errors_doc,
diff --git a/vendor/quote/tests/test.rs b/vendor/quote/tests/test.rs
index 5bfcf4b45..52ec7bcfb 100644
--- a/vendor/quote/tests/test.rs
+++ b/vendor/quote/tests/test.rs
@@ -1,6 +1,5 @@
#![allow(
- clippy::blacklisted_name,
- clippy::let_underscore_drop,
+ clippy::disallowed_names,
clippy::shadow_unrelated,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
diff --git a/vendor/quote/tests/ui/not-quotable.rs b/vendor/quote/tests/ui/not-quotable.rs
index f991c1883..220542d84 100644
--- a/vendor/quote/tests/ui/not-quotable.rs
+++ b/vendor/quote/tests/ui/not-quotable.rs
@@ -3,5 +3,5 @@ use std::net::Ipv4Addr;
fn main() {
let ip = Ipv4Addr::LOCALHOST;
- let _ = quote! { #ip };
+ _ = quote! { #ip };
}
diff --git a/vendor/quote/tests/ui/not-quotable.stderr b/vendor/quote/tests/ui/not-quotable.stderr
index 171cddd2b..c323d99b7 100644
--- a/vendor/quote/tests/ui/not-quotable.stderr
+++ b/vendor/quote/tests/ui/not-quotable.stderr
@@ -1,8 +1,11 @@
error[E0277]: the trait bound `Ipv4Addr: ToTokens` is not satisfied
- --> tests/ui/not-quotable.rs:6:13
+ --> tests/ui/not-quotable.rs:6:9
|
-6 | let _ = quote! { #ip };
- | ^^^^^^^^^^^^^^ the trait `ToTokens` is not implemented for `Ipv4Addr`
+6 | _ = quote! { #ip };
+ | ^^^^^^^^^^^^^^
+ | |
+ | the trait `ToTokens` is not implemented for `Ipv4Addr`
+ | required by a bound introduced by this call
|
= help: the following other types implement trait `ToTokens`:
&'a T
diff --git a/vendor/quote/tests/ui/not-repeatable.rs b/vendor/quote/tests/ui/not-repeatable.rs
index a8f0fe773..c1debf5f8 100644
--- a/vendor/quote/tests/ui/not-repeatable.rs
+++ b/vendor/quote/tests/ui/not-repeatable.rs
@@ -4,5 +4,5 @@ struct Ipv4Addr;
fn main() {
let ip = Ipv4Addr;
- let _ = quote! { #(#ip)* };
+ _ = quote! { #(#ip)* };
}
diff --git a/vendor/quote/tests/ui/not-repeatable.stderr b/vendor/quote/tests/ui/not-repeatable.stderr
index f75351e33..264a89f9e 100644
--- a/vendor/quote/tests/ui/not-repeatable.stderr
+++ b/vendor/quote/tests/ui/not-repeatable.stderr
@@ -1,35 +1,35 @@
error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but its trait bounds were not satisfied
- --> tests/ui/not-repeatable.rs:7:13
- |
-3 | struct Ipv4Addr;
- | ---------------
- | |
- | method `quote_into_iter` not found for this struct
- | doesn't satisfy `Ipv4Addr: Iterator`
- | doesn't satisfy `Ipv4Addr: ToTokens`
- | doesn't satisfy `Ipv4Addr: ext::RepIteratorExt`
- | doesn't satisfy `Ipv4Addr: ext::RepToTokensExt`
+ --> tests/ui/not-repeatable.rs:7:9
+ |
+3 | struct Ipv4Addr;
+ | ---------------
+ | |
+ | method `quote_into_iter` not found for this struct
+ | doesn't satisfy `Ipv4Addr: Iterator`
+ | doesn't satisfy `Ipv4Addr: ToTokens`
+ | doesn't satisfy `Ipv4Addr: ext::RepIteratorExt`
+ | doesn't satisfy `Ipv4Addr: ext::RepToTokensExt`
...
-7 | let _ = quote! { #(#ip)* };
- | ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due to unsatisfied trait bounds
- |
- = note: the following trait bounds were not satisfied:
- `Ipv4Addr: Iterator`
- which is required by `Ipv4Addr: ext::RepIteratorExt`
- `&Ipv4Addr: Iterator`
- which is required by `&Ipv4Addr: ext::RepIteratorExt`
- `Ipv4Addr: ToTokens`
- which is required by `Ipv4Addr: ext::RepToTokensExt`
- `&mut Ipv4Addr: Iterator`
- which is required by `&mut Ipv4Addr: ext::RepIteratorExt`
-note: the following traits must be implemented
- --> $RUST/core/src/iter/traits/iterator.rs
- |
- | pub trait Iterator {
- | ^^^^^^^^^^^^^^^^^^
- |
- ::: src/to_tokens.rs
- |
- | pub trait ToTokens {
- | ^^^^^^^^^^^^^^^^^^
- = note: this error originates in the macro `$crate::quote_bind_into_iter` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
+7 | _ = quote! { #(#ip)* };
+ | ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due to unsatisfied trait bounds
+ |
+ = note: the following trait bounds were not satisfied:
+ `Ipv4Addr: Iterator`
+ which is required by `Ipv4Addr: ext::RepIteratorExt`
+ `&Ipv4Addr: Iterator`
+ which is required by `&Ipv4Addr: ext::RepIteratorExt`
+ `Ipv4Addr: ToTokens`
+ which is required by `Ipv4Addr: ext::RepToTokensExt`
+ `&mut Ipv4Addr: Iterator`
+ which is required by `&mut Ipv4Addr: ext::RepIteratorExt`
+note: the traits `ToTokens` and `Iterator` must be implemented
+ --> src/to_tokens.rs
+ |
+ | pub trait ToTokens {
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ ::: $RUST/core/src/iter/traits/iterator.rs
+ |
+ | pub trait Iterator {
+ | ^^^^^^^^^^^^^^^^^^
+ = note: this error originates in the macro `$crate::quote_bind_into_iter` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/vendor/rayon/.cargo-checksum.json b/vendor/rayon/.cargo-checksum.json
index 24bf4ae59..df73a32db 100644
--- a/vendor/rayon/.cargo-checksum.json
+++ b/vendor/rayon/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"29c768a031b0c4c2b8012af25cee42f89d5e6d7bd89ec8cf8df4972420a55cc3","FAQ.md":"e963b051f6295a2944ff5abec368c7da914b913e21049978c2ef0017294abf8f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"d972f524a7de70888a57ed63625272d4bbc24ecbdffcdb7ba8d3d520e60ab79e","RELEASES.md":"bb8dae2b0c1ec1b6c8ecd142097b4a3f11011397f7d327024936c33678851066","src/array.rs":"edd4442a74c08128284a242a6874686b25d539e8a57886fef82216e5ff31f851","src/collections/binary_heap.rs":"58e2590ff52fa2cfe7b5797fdc37902d0c111cca844aac21a4024add2f28a8db","src/collections/btree_map.rs":"d3094ed5c54620f81a86133f52e74164bae9f0137745eb66e6d31db2e942bcab","src/collections/btree_set.rs":"7de12e388d36cb2f8672bc3b68aed5b44d047c9f5fe262a7260584634299480f","src/collections/hash_map.rs":"cdaf1aa3bedf9bde591fcb323d64bee9366feb35fee6886bedb5fec8d0018564","src/collections/hash_set.rs":"685690108fc20eca2cf346e74e48d6c751f5ceb6028996366d49f1f1ad6051e0","src/collections/linked_list.rs":"bef7f32da49b55db76cfefa0afa4789a6979e08a69427e49bab2874967163fef","src/collections/mod.rs":"3978222720a2df20be7fd0856d8515b6f9fa40f6556662bd3c0726b1392970ca","src/collections/vec_deque.rs":"d14aa4edc858cd3469fff3597fc35ab83c27f3bdf66ede06d1dd25021f383109","src/compile_fail/cannot_collect_filtermap_data.rs":"f76e6727ae8bd0dc8fff5f4a79df3fc598daf5d7371bdf8b2e730fba4ba82719","src/compile_fail/cannot_zip_filtered_data.rs":"3588fcf72e804ea867ea525b6dfa7a3d45fe14e28088e849fa9ddb029acc8a7a","src/compile_fail/cell_par_iter.rs":"ebf8804993c103f9d8228aba0bb2a5f0aadb957de9f1eb59cf546dbace285201","src/compile_fail/mod.rs":"2c346e4b52941fe0ac2667eeb753c0c21a1f998a544bb9d9167f200caba0a7bb","src/compile_fail/must_use.rs":"42ae57ed7cb909fad8d079ce6e3742a92ca7f72a9cc209b149d1d88df568d757","src/compile_fail/no_send_par_iter.rs":"b241446439c659f35060df12ba91590ea4267f373ddc688e4ffc203321db24b3","src/compile_fail/rc_par_iter.rs":"938776b08612e35c85b99962024d54af5a8c87d45a050a76375d16ef6fe4299f","src/delegate.rs":"aad2a11998240fb7dd96bd054b588cac469df3b716b8dffb05239102b6507d05","src/iter/chain.rs":"f82a25577ca36bac93b5a13e75dcd70e8cee381b9baa5993dd645f0714fb9eb6","src/iter/chunks.rs":"9b901441bd8684782d4e50bf24c261bdf3392f576e7ab25b2b83be2fc9361545","src/iter/cloned.rs":"35e1c864f99f7bc93c258c4976d15ccfc1d04df969dc878fd03d35f6799377f7","src/iter/collect/consumer.rs":"5f9728fdf2db728a3ea37849c2fc43c417b221e580860be4dfc6cab526e57f8e","src/iter/collect/mod.rs":"ede28d59713c3291709a842cd54b17760008f6854a3982404eca973bdc3d2f23","src/iter/collect/test.rs":"975875022cf45f9a991040c60f3ed1f9df3805e27f64bbca5b446774793eddef","src/iter/copied.rs":"1a3457f3b0fb020593049b8b1f1be2f7a6684c2fcc66c02c911cb14e0a1943d7","src/iter/empty.rs":"3cb2d05721aab1a4d9e9c5813473e1646116c1ea570e26d9ac81e083688a0b7d","src/iter/enumerate.rs":"3204255723a93b3275cf0f208939de8960b13a9b13b8c2ba6b664e65da21cd87","src/iter/extend.rs":"6b93885d0bce6b1e01d9e220083e421436142b003dbe21f914dcf6b08abe7f10","src/iter/filter.rs":"e48f317ee4d66dea6f19ac2607c6f764d20c023d847b66c6c01826e6a46f96ab","src/iter/filter_map.rs":"2530b726b27fe3a678a34722b1baf8e223a65a0770fa8ed5dca13786ea454580","src/iter/find.rs":"896ddb05b2fa7368462e0ff2a73982ced5f93266c0e0e8c27bb3fc4ec737af21","src/iter/find_first_last/mod.rs":"fa7d7692d81ecdbecb178606ad3b1b00c80e5e4159c57d34929b012b0bea0c82","src/iter/find_first_last/test.rs":"2052eb8b87c5a0a0d49a76c83d9d74f81be18aad52ceb1b06d7e209f4fefba94","src/iter/flat_map.rs":"4db54dcda5f1f916497d0162268e6cd554478bc7ea0190709cc7c73941a0e20f","src/iter/flat_map_iter.rs":"81886d1d71f4991963ec9b6d767d9ef391c9e84807d27f6135cd92a451dc1b32","src/iter/flatten.rs":"93407a8f44f4a265f47d7fe568508d6ef922597c4ca4558e8945cf1aacdd6b30","src/iter/flatten_iter.rs":"23cf3ddc25b6cad117932c4bac2e8b945c6b56626730fd3b1787f233495d15e6","src/iter/fold.rs":"874259b408d7f8cdc376d34276d37c1837950c035ff780de8281d1edf65ded9f","src/iter/fold_chunks.rs":"78c267cc341fa71206a587b548480038a7a9bbb29d341b43733515b34e4cec48","src/iter/fold_chunks_with.rs":"fc3a49dd6a2027ec8e0c78e459ed80da1185b365909694bf1395af12b22648a8","src/iter/for_each.rs":"7af0e21ed8479eec65831d1409f61a88d45a31764f2385ec759eda1a46d388b2","src/iter/from_par_iter.rs":"9439b1ae01db2c7545d6416108700dfbafcff3f584e81704cf985ab1d986b660","src/iter/inspect.rs":"d502b6e435a44f34ba49dfe46aa809a1bad3dbaefa048540a98e3026437b1016","src/iter/interleave.rs":"3c6d82fe13b52e2ce5e1b3dba93c47f479ff44d232059cd88523b422c51dea96","src/iter/interleave_shortest.rs":"a2b1f31ea4cb29f4761d3724feddcf5a96e1f21fd623f95c85a8659294a6745a","src/iter/intersperse.rs":"6b5d3d826ed3499ba78f0ff07468d96a0e104c7ee142a2ced6be8143b43241a5","src/iter/len.rs":"1d890dffb912134402c4778b1635af713df23df8bd2a98890209926a936d8407","src/iter/map.rs":"f778f1bd0a71c962375d2ce886b40ac365afed4a032e2b9dd6c385712d3d75eb","src/iter/map_with.rs":"f83950016bb02addecec049fda73663861c80232477a7b89b7e8718291a4b481","src/iter/mod.rs":"e54fbede51a3a93c338a8d37edb47f78895f8f9a3b0760ad92ea5f4c3de09665","src/iter/multizip.rs":"10ec107f6673c9bc6d1d611e11b716c37e8601ab2f4257a460c4bc4962771347","src/iter/noop.rs":"5be6332ddfbb8fdbae1ffdb00983950a8b37a295bcb58e9a265b33806ee504e6","src/iter/once.rs":"fcebffc374dcdd206d13311dcc2e7d7a04da5687658b2f3ec3409f03ed12774b","src/iter/panic_fuse.rs":"2a4d43fa4e717629de7f69eb180f13db90ef95004975cfa20dcfaacc80435015","src/iter/par_bridge.rs":"0066e4f2ebec979cf912f27fdccd2a670bc7f387036c9bd3c8c1402ce3692139","src/iter/plumbing/README.md":"28050be91cbaf1932e65607939722bf9a0563d25d98a4e000c3a5a320488e4aa","src/iter/plumbing/mod.rs":"1156c55a15b651d161f463cb86b2f602f6246a3c7f8a82fb484db12aa6a60091","src/iter/positions.rs":"b7abfb59c12b6cceb5e539e85f9eca836054ae88225e16cfc0ba8e68177c7917","src/iter/product.rs":"da69f4781c2275c4a176432994c3fd80ea1f296afe47b329de61b1d733d990df","src/iter/reduce.rs":"2f5d6e07d7c0de2360505fa5d9198c31fd43ba7e58a6ec40f79edec19319e502","src/iter/repeat.rs":"ed46b17b79d8569f9d67b50585b116ee0293e1d6c17c0dc683c66644f6a58fd5","src/iter/rev.rs":"c4c796d7cb6398e74bef043a080403acccdf70f6a4e74b242e530d1718969b8f","src/iter/skip.rs":"93d54e17316ae15ff3cc30ca7144cb448569b18ea967dd2cd8913ac6f1334390","src/iter/splitter.rs":"0024db04b4430c2a1e8c921cec86af641f612f877f3675b15df0da9122de5f00","src/iter/step_by.rs":"be7abe2c2fba252a1b69c8cf18adfe510036c30f8ee2994404c18ae15dde317e","src/iter/sum.rs":"cf11d996507ceba39524a102559b84289e776f8fe5772114e00ae2112b38c47c","src/iter/take.rs":"e47eeca1249553b0dfaf54cd8f99026be68db76b42f1f29e09c07a98c165c50a","src/iter/test.rs":"dad5c78aa8bbd7d62cf0d76194f3c02d8f12236c5f90897f3f97f884ac1b5171","src/iter/try_fold.rs":"d4f40a00995273b8803031da4a4b139a5f462a174ef1d3c8ba54524b47ab8180","src/iter/try_reduce.rs":"12317a649f777e76e6ae610d205104d7946fbe45804fbf1caa0843118531baed","src/iter/try_reduce_with.rs":"9171563fc22110d7a359f19de7ca66a6823d8f712099d05d01560795781fdeec","src/iter/unzip.rs":"9a16ea1f3b3019a090b7189f6c42c75beb3537bc849bd4c51093120a907cea6b","src/iter/update.rs":"0362185a002cdda0e73b13237017ddc3d5e72390bba6cb2e2f021e947ed861dc","src/iter/while_some.rs":"a514891d7a07031354b48e377c239ff330d0955f184abc57a69d2a193e7fcb45","src/iter/zip.rs":"4d908f75a10b5e9b68d012bbba289f3e5e9d6a9570ce0f56fc1b4f9d96860499","src/iter/zip_eq.rs":"4c18d8f7a78e197a3268c9ef74d16690f8c960407c18bb63dc6905a2fe2bde62","src/lib.rs":"3607c5d8efce43560fff74dfa66668c1d85fd12f0ffd60aa5c7c545e8c8e77fa","src/math.rs":"040e82a4ba7a6680eb65b65f4d1fc3dc477d902855d8651105b138ae2e71c8e8","src/option.rs":"00979a9bb8f42629f2b956a6cfbd286fc8a41ffbbec85f1b5d0f0da5615dac9c","src/par_either.rs":"afa4b04ba6ea1d37aed2d68eca44d7ba0d1d09ea985c9091540dd8d3c51974f1","src/prelude.rs":"b1af578abff57f9c8a285c39e3793125cf40d9901d0f2a4f0c5f1bb9252a08de","src/private.rs":"152f6d65ce4741616a1dec796b9442f78a018d38bb040f76c4cd85008333a3bb","src/range.rs":"93bd821b851d334a365211e14b60e0fc5052a7ee09e9a26ea7dd4f018c9bf7ae","src/range_inclusive.rs":"1f18ee155ab46e7398203163d49b4dfa87135e578de3d80e2641877c5b037126","src/result.rs":"0656f0000efcea10e571df90247925dfd00a0c2194043fcbc009711fb2f7af02","src/slice/chunks.rs":"2bf07a3522381e7747e40f787727747b62fbe1f1504eac6c383f0608a335ec91","src/slice/mergesort.rs":"effe59ecc40b330c364a3da868182b72b487025d9ba0256079f8a284b85a05ef","src/slice/mod.rs":"d293c9105edc3e51b11f873ac816f28232dd708120901ce3a518e03d5b748bcf","src/slice/quicksort.rs":"355506e39eac03b18cf35a6fa01d8f126267e01736e03c85f22414ebf894a9be","src/slice/rchunks.rs":"23229976840da07e8fff6657ca626810ed380682719e4d1f0693ac08839e1b7c","src/slice/test.rs":"fca5e5e6bb5464c9332d14e7d963098ad9a072801ea508ae9eabf5d278b66bb2","src/split_producer.rs":"2b143e16bc6540792f861e215a86cfea7f4ee500d4faca2476d4165619eac90d","src/str.rs":"fe3fca9218fd17a265619551602001addf80f83187b086b3c65b4edd7857abd2","src/string.rs":"6691dd31264bd93a0528fc584585415c12f56cfb4baebbfd31ea2f1b21550f77","src/vec.rs":"c9d2f99d9a3577d57640f52e33c2d8f62b0637a8ec21bbc4aefa51ce12f7059d","tests/chars.rs":"5a2bef1cd8a7d1740a4e5403830da7bbdd584e97d602a07645c4986ee34c2ad3","tests/clones.rs":"8b1bd752d0b39b0722fa30583962940a68c917b2f7dbe6b479da5a3e7d3c5efb","tests/collect.rs":"27173f932d5c65932ad63c215c561ecf06fcbeb7253a7bb175b819e0a40107aa","tests/cross-pool.rs":"103c121c8684eef0868e3982219e406500009c26d7602167f5a514b1dfd3b4cc","tests/debug.rs":"de22193e15ac078a9952e42d255bce0f30ac7f4f69957aa6b0aada0eac146ca0","tests/drain_vec.rs":"305ca40ba41ed3b459a334386cd742c66586d62aadfb624278aabdf10b872a12","tests/intersperse.rs":"bda4fb2179086e32c77c8293b9bb85d55509c282441837ba1849c2d3aa3186a7","tests/issue671-unzip.rs":"d5eb38d8a6d8f66fdf1c40293abbf58f3ac42b5acfc5dca62b02c7ace5bfc1a4","tests/issue671.rs":"52914cac517074deaedcb81bd76b867f0b99cc7b65c3e01cfe12dc9fe38c0266","tests/iter_panic.rs":"61f0c036a2fa555303bf317ac15b4d25f1316a3beb313c8e2fca7f85228450b4","tests/named-threads.rs":"48dd0fa0de13b4554dfc1fc0adaee093e19df8b1fc601f5b9b4f15b1fc705249","tests/octillion.rs":"65e12a22f5bc6e22316b93c57c82c2556ef8a5afd4d846a6431ce8c0499ce804","tests/producer_split_at.rs":"fbb28da59e85e4c253cc314f35f113ced8db2981ceda9977145af540f9996837","tests/sort-panic-safe.rs":"290a37a1cf7e297e622fc84caa5c7c3d50e83643a3c4f67a05cf2e1f7d6f3fc0","tests/str.rs":"49059384999be57994841652718cc048389380741be5c493423f8bd2206e4453"},"package":"1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"} \ No newline at end of file
+{"files":{"Cargo.toml":"c992b587eb0ba53f9040697feac44d5bfd4c102f90410407e1c2215ae6602961","FAQ.md":"e963b051f6295a2944ff5abec368c7da914b913e21049978c2ef0017294abf8f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"d972f524a7de70888a57ed63625272d4bbc24ecbdffcdb7ba8d3d520e60ab79e","RELEASES.md":"b040799ae385b37973176ba7ffea5f0b8a6434f94b99bca10ecf663f7ce80da2","src/array.rs":"edd4442a74c08128284a242a6874686b25d539e8a57886fef82216e5ff31f851","src/collections/binary_heap.rs":"58e2590ff52fa2cfe7b5797fdc37902d0c111cca844aac21a4024add2f28a8db","src/collections/btree_map.rs":"d3094ed5c54620f81a86133f52e74164bae9f0137745eb66e6d31db2e942bcab","src/collections/btree_set.rs":"7de12e388d36cb2f8672bc3b68aed5b44d047c9f5fe262a7260584634299480f","src/collections/hash_map.rs":"cdaf1aa3bedf9bde591fcb323d64bee9366feb35fee6886bedb5fec8d0018564","src/collections/hash_set.rs":"685690108fc20eca2cf346e74e48d6c751f5ceb6028996366d49f1f1ad6051e0","src/collections/linked_list.rs":"bef7f32da49b55db76cfefa0afa4789a6979e08a69427e49bab2874967163fef","src/collections/mod.rs":"3978222720a2df20be7fd0856d8515b6f9fa40f6556662bd3c0726b1392970ca","src/collections/vec_deque.rs":"d14aa4edc858cd3469fff3597fc35ab83c27f3bdf66ede06d1dd25021f383109","src/compile_fail/cannot_collect_filtermap_data.rs":"f76e6727ae8bd0dc8fff5f4a79df3fc598daf5d7371bdf8b2e730fba4ba82719","src/compile_fail/cannot_zip_filtered_data.rs":"3588fcf72e804ea867ea525b6dfa7a3d45fe14e28088e849fa9ddb029acc8a7a","src/compile_fail/cell_par_iter.rs":"ebf8804993c103f9d8228aba0bb2a5f0aadb957de9f1eb59cf546dbace285201","src/compile_fail/mod.rs":"2c346e4b52941fe0ac2667eeb753c0c21a1f998a544bb9d9167f200caba0a7bb","src/compile_fail/must_use.rs":"42ae57ed7cb909fad8d079ce6e3742a92ca7f72a9cc209b149d1d88df568d757","src/compile_fail/no_send_par_iter.rs":"b241446439c659f35060df12ba91590ea4267f373ddc688e4ffc203321db24b3","src/compile_fail/rc_par_iter.rs":"938776b08612e35c85b99962024d54af5a8c87d45a050a76375d16ef6fe4299f","src/delegate.rs":"aad2a11998240fb7dd96bd054b588cac469df3b716b8dffb05239102b6507d05","src/iter/chain.rs":"f82a25577ca36bac93b5a13e75dcd70e8cee381b9baa5993dd645f0714fb9eb6","src/iter/chunks.rs":"9b901441bd8684782d4e50bf24c261bdf3392f576e7ab25b2b83be2fc9361545","src/iter/cloned.rs":"35e1c864f99f7bc93c258c4976d15ccfc1d04df969dc878fd03d35f6799377f7","src/iter/collect/consumer.rs":"5f9728fdf2db728a3ea37849c2fc43c417b221e580860be4dfc6cab526e57f8e","src/iter/collect/mod.rs":"ede28d59713c3291709a842cd54b17760008f6854a3982404eca973bdc3d2f23","src/iter/collect/test.rs":"975875022cf45f9a991040c60f3ed1f9df3805e27f64bbca5b446774793eddef","src/iter/copied.rs":"1a3457f3b0fb020593049b8b1f1be2f7a6684c2fcc66c02c911cb14e0a1943d7","src/iter/empty.rs":"3cb2d05721aab1a4d9e9c5813473e1646116c1ea570e26d9ac81e083688a0b7d","src/iter/enumerate.rs":"3204255723a93b3275cf0f208939de8960b13a9b13b8c2ba6b664e65da21cd87","src/iter/extend.rs":"6b93885d0bce6b1e01d9e220083e421436142b003dbe21f914dcf6b08abe7f10","src/iter/filter.rs":"e48f317ee4d66dea6f19ac2607c6f764d20c023d847b66c6c01826e6a46f96ab","src/iter/filter_map.rs":"2530b726b27fe3a678a34722b1baf8e223a65a0770fa8ed5dca13786ea454580","src/iter/find.rs":"896ddb05b2fa7368462e0ff2a73982ced5f93266c0e0e8c27bb3fc4ec737af21","src/iter/find_first_last/mod.rs":"fa7d7692d81ecdbecb178606ad3b1b00c80e5e4159c57d34929b012b0bea0c82","src/iter/find_first_last/test.rs":"2052eb8b87c5a0a0d49a76c83d9d74f81be18aad52ceb1b06d7e209f4fefba94","src/iter/flat_map.rs":"4db54dcda5f1f916497d0162268e6cd554478bc7ea0190709cc7c73941a0e20f","src/iter/flat_map_iter.rs":"81886d1d71f4991963ec9b6d767d9ef391c9e84807d27f6135cd92a451dc1b32","src/iter/flatten.rs":"93407a8f44f4a265f47d7fe568508d6ef922597c4ca4558e8945cf1aacdd6b30","src/iter/flatten_iter.rs":"23cf3ddc25b6cad117932c4bac2e8b945c6b56626730fd3b1787f233495d15e6","src/iter/fold.rs":"874259b408d7f8cdc376d34276d37c1837950c035ff780de8281d1edf65ded9f","src/iter/fold_chunks.rs":"78c267cc341fa71206a587b548480038a7a9bbb29d341b43733515b34e4cec48","src/iter/fold_chunks_with.rs":"fc3a49dd6a2027ec8e0c78e459ed80da1185b365909694bf1395af12b22648a8","src/iter/for_each.rs":"7af0e21ed8479eec65831d1409f61a88d45a31764f2385ec759eda1a46d388b2","src/iter/from_par_iter.rs":"9439b1ae01db2c7545d6416108700dfbafcff3f584e81704cf985ab1d986b660","src/iter/inspect.rs":"d502b6e435a44f34ba49dfe46aa809a1bad3dbaefa048540a98e3026437b1016","src/iter/interleave.rs":"3c6d82fe13b52e2ce5e1b3dba93c47f479ff44d232059cd88523b422c51dea96","src/iter/interleave_shortest.rs":"a2b1f31ea4cb29f4761d3724feddcf5a96e1f21fd623f95c85a8659294a6745a","src/iter/intersperse.rs":"6b5d3d826ed3499ba78f0ff07468d96a0e104c7ee142a2ced6be8143b43241a5","src/iter/len.rs":"1d890dffb912134402c4778b1635af713df23df8bd2a98890209926a936d8407","src/iter/map.rs":"f778f1bd0a71c962375d2ce886b40ac365afed4a032e2b9dd6c385712d3d75eb","src/iter/map_with.rs":"f83950016bb02addecec049fda73663861c80232477a7b89b7e8718291a4b481","src/iter/mod.rs":"e54fbede51a3a93c338a8d37edb47f78895f8f9a3b0760ad92ea5f4c3de09665","src/iter/multizip.rs":"10ec107f6673c9bc6d1d611e11b716c37e8601ab2f4257a460c4bc4962771347","src/iter/noop.rs":"5be6332ddfbb8fdbae1ffdb00983950a8b37a295bcb58e9a265b33806ee504e6","src/iter/once.rs":"fcebffc374dcdd206d13311dcc2e7d7a04da5687658b2f3ec3409f03ed12774b","src/iter/panic_fuse.rs":"2a4d43fa4e717629de7f69eb180f13db90ef95004975cfa20dcfaacc80435015","src/iter/par_bridge.rs":"7b78e0180a95b6a6d429d2923cd48c07f9ae3137c7e5c8706eceaec029f77b48","src/iter/plumbing/README.md":"28050be91cbaf1932e65607939722bf9a0563d25d98a4e000c3a5a320488e4aa","src/iter/plumbing/mod.rs":"1156c55a15b651d161f463cb86b2f602f6246a3c7f8a82fb484db12aa6a60091","src/iter/positions.rs":"b7abfb59c12b6cceb5e539e85f9eca836054ae88225e16cfc0ba8e68177c7917","src/iter/product.rs":"da69f4781c2275c4a176432994c3fd80ea1f296afe47b329de61b1d733d990df","src/iter/reduce.rs":"2f5d6e07d7c0de2360505fa5d9198c31fd43ba7e58a6ec40f79edec19319e502","src/iter/repeat.rs":"ed46b17b79d8569f9d67b50585b116ee0293e1d6c17c0dc683c66644f6a58fd5","src/iter/rev.rs":"c4c796d7cb6398e74bef043a080403acccdf70f6a4e74b242e530d1718969b8f","src/iter/skip.rs":"93d54e17316ae15ff3cc30ca7144cb448569b18ea967dd2cd8913ac6f1334390","src/iter/splitter.rs":"0024db04b4430c2a1e8c921cec86af641f612f877f3675b15df0da9122de5f00","src/iter/step_by.rs":"be7abe2c2fba252a1b69c8cf18adfe510036c30f8ee2994404c18ae15dde317e","src/iter/sum.rs":"cf11d996507ceba39524a102559b84289e776f8fe5772114e00ae2112b38c47c","src/iter/take.rs":"e47eeca1249553b0dfaf54cd8f99026be68db76b42f1f29e09c07a98c165c50a","src/iter/test.rs":"dad5c78aa8bbd7d62cf0d76194f3c02d8f12236c5f90897f3f97f884ac1b5171","src/iter/try_fold.rs":"d4f40a00995273b8803031da4a4b139a5f462a174ef1d3c8ba54524b47ab8180","src/iter/try_reduce.rs":"12317a649f777e76e6ae610d205104d7946fbe45804fbf1caa0843118531baed","src/iter/try_reduce_with.rs":"9171563fc22110d7a359f19de7ca66a6823d8f712099d05d01560795781fdeec","src/iter/unzip.rs":"9a16ea1f3b3019a090b7189f6c42c75beb3537bc849bd4c51093120a907cea6b","src/iter/update.rs":"0362185a002cdda0e73b13237017ddc3d5e72390bba6cb2e2f021e947ed861dc","src/iter/while_some.rs":"a514891d7a07031354b48e377c239ff330d0955f184abc57a69d2a193e7fcb45","src/iter/zip.rs":"4d908f75a10b5e9b68d012bbba289f3e5e9d6a9570ce0f56fc1b4f9d96860499","src/iter/zip_eq.rs":"4c18d8f7a78e197a3268c9ef74d16690f8c960407c18bb63dc6905a2fe2bde62","src/lib.rs":"3607c5d8efce43560fff74dfa66668c1d85fd12f0ffd60aa5c7c545e8c8e77fa","src/math.rs":"040e82a4ba7a6680eb65b65f4d1fc3dc477d902855d8651105b138ae2e71c8e8","src/option.rs":"00979a9bb8f42629f2b956a6cfbd286fc8a41ffbbec85f1b5d0f0da5615dac9c","src/par_either.rs":"afa4b04ba6ea1d37aed2d68eca44d7ba0d1d09ea985c9091540dd8d3c51974f1","src/prelude.rs":"b1af578abff57f9c8a285c39e3793125cf40d9901d0f2a4f0c5f1bb9252a08de","src/private.rs":"152f6d65ce4741616a1dec796b9442f78a018d38bb040f76c4cd85008333a3bb","src/range.rs":"93bd821b851d334a365211e14b60e0fc5052a7ee09e9a26ea7dd4f018c9bf7ae","src/range_inclusive.rs":"1f18ee155ab46e7398203163d49b4dfa87135e578de3d80e2641877c5b037126","src/result.rs":"0656f0000efcea10e571df90247925dfd00a0c2194043fcbc009711fb2f7af02","src/slice/chunks.rs":"2bf07a3522381e7747e40f787727747b62fbe1f1504eac6c383f0608a335ec91","src/slice/mergesort.rs":"effe59ecc40b330c364a3da868182b72b487025d9ba0256079f8a284b85a05ef","src/slice/mod.rs":"d293c9105edc3e51b11f873ac816f28232dd708120901ce3a518e03d5b748bcf","src/slice/quicksort.rs":"355506e39eac03b18cf35a6fa01d8f126267e01736e03c85f22414ebf894a9be","src/slice/rchunks.rs":"23229976840da07e8fff6657ca626810ed380682719e4d1f0693ac08839e1b7c","src/slice/test.rs":"fca5e5e6bb5464c9332d14e7d963098ad9a072801ea508ae9eabf5d278b66bb2","src/split_producer.rs":"2b143e16bc6540792f861e215a86cfea7f4ee500d4faca2476d4165619eac90d","src/str.rs":"fe3fca9218fd17a265619551602001addf80f83187b086b3c65b4edd7857abd2","src/string.rs":"6691dd31264bd93a0528fc584585415c12f56cfb4baebbfd31ea2f1b21550f77","src/vec.rs":"c9d2f99d9a3577d57640f52e33c2d8f62b0637a8ec21bbc4aefa51ce12f7059d","tests/chars.rs":"5a2bef1cd8a7d1740a4e5403830da7bbdd584e97d602a07645c4986ee34c2ad3","tests/clones.rs":"8b1bd752d0b39b0722fa30583962940a68c917b2f7dbe6b479da5a3e7d3c5efb","tests/collect.rs":"27173f932d5c65932ad63c215c561ecf06fcbeb7253a7bb175b819e0a40107aa","tests/cross-pool.rs":"103c121c8684eef0868e3982219e406500009c26d7602167f5a514b1dfd3b4cc","tests/debug.rs":"de22193e15ac078a9952e42d255bce0f30ac7f4f69957aa6b0aada0eac146ca0","tests/drain_vec.rs":"305ca40ba41ed3b459a334386cd742c66586d62aadfb624278aabdf10b872a12","tests/intersperse.rs":"bda4fb2179086e32c77c8293b9bb85d55509c282441837ba1849c2d3aa3186a7","tests/issue671-unzip.rs":"d5eb38d8a6d8f66fdf1c40293abbf58f3ac42b5acfc5dca62b02c7ace5bfc1a4","tests/issue671.rs":"52914cac517074deaedcb81bd76b867f0b99cc7b65c3e01cfe12dc9fe38c0266","tests/iter_panic.rs":"61f0c036a2fa555303bf317ac15b4d25f1316a3beb313c8e2fca7f85228450b4","tests/named-threads.rs":"48dd0fa0de13b4554dfc1fc0adaee093e19df8b1fc601f5b9b4f15b1fc705249","tests/octillion.rs":"65e12a22f5bc6e22316b93c57c82c2556ef8a5afd4d846a6431ce8c0499ce804","tests/par_bridge_recursion.rs":"b8a3e1e48a14b6a98a797593487b63509c91ac961c603e3b3814110e296ae33d","tests/producer_split_at.rs":"fbb28da59e85e4c253cc314f35f113ced8db2981ceda9977145af540f9996837","tests/sort-panic-safe.rs":"290a37a1cf7e297e622fc84caa5c7c3d50e83643a3c4f67a05cf2e1f7d6f3fc0","tests/str.rs":"49059384999be57994841652718cc048389380741be5c493423f8bd2206e4453"},"package":"6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"} \ No newline at end of file
diff --git a/vendor/rayon/Cargo.toml b/vendor/rayon/Cargo.toml
index 8b1075d7f..4a208bc77 100644
--- a/vendor/rayon/Cargo.toml
+++ b/vendor/rayon/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.56"
name = "rayon"
-version = "1.6.0"
+version = "1.6.1"
authors = [
"Niko Matsakis <niko@alum.mit.edu>",
"Josh Stone <cuviper@gmail.com>",
@@ -38,9 +38,6 @@ categories = ["concurrency"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/rayon-rs/rayon"
-[dependencies.crossbeam-deque]
-version = "0.8.1"
-
[dependencies.either]
version = "1.0"
default-features = false
diff --git a/vendor/rayon/RELEASES.md b/vendor/rayon/RELEASES.md
index eed3445ee..f6757614c 100644
--- a/vendor/rayon/RELEASES.md
+++ b/vendor/rayon/RELEASES.md
@@ -1,3 +1,15 @@
+# Release rayon 1.6.1 (2022-12-09)
+
+- Simplified `par_bridge` to only pull one item at a time from the iterator,
+ without batching. Threads that are waiting for iterator items will now block
+ appropriately rather than spinning CPU. (Thanks @njaard!)
+- Added protection against recursion in `par_bridge`, so iterators that also
+ invoke rayon will not cause mutex recursion deadlocks.
+
+# Release rayon-core 1.10.1 (2022-11-18)
+
+- Fixed a race condition with threads going to sleep while a broadcast starts.
+
# Release rayon 1.6.0 / rayon-core 1.10.0 (2022-11-18)
- The minimum supported `rustc` is now 1.56.
diff --git a/vendor/rayon/src/iter/par_bridge.rs b/vendor/rayon/src/iter/par_bridge.rs
index 339ac1a32..8398274b3 100644
--- a/vendor/rayon/src/iter/par_bridge.rs
+++ b/vendor/rayon/src/iter/par_bridge.rs
@@ -1,12 +1,9 @@
-use crossbeam_deque::{Steal, Stealer, Worker};
-
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
-use std::sync::{Mutex, TryLockError};
-use std::thread::yield_now;
+use std::sync::Mutex;
-use crate::current_num_threads;
use crate::iter::plumbing::{bridge_unindexed, Folder, UnindexedConsumer, UnindexedProducer};
use crate::iter::ParallelIterator;
+use crate::{current_num_threads, current_thread_index};
/// Conversion trait to convert an `Iterator` to a `ParallelIterator`.
///
@@ -78,71 +75,46 @@ where
where
C: UnindexedConsumer<Self::Item>,
{
- let split_count = AtomicUsize::new(current_num_threads());
- let worker = Worker::new_fifo();
- let stealer = worker.stealer();
- let done = AtomicBool::new(false);
- let iter = Mutex::new((self.iter, worker));
+ let num_threads = current_num_threads();
+ let threads_started: Vec<_> = (0..num_threads).map(|_| AtomicBool::new(false)).collect();
bridge_unindexed(
- IterParallelProducer {
- split_count: &split_count,
- done: &done,
- iter: &iter,
- items: stealer,
+ &IterParallelProducer {
+ split_count: AtomicUsize::new(num_threads),
+ iter: Mutex::new(self.iter.fuse()),
+ threads_started: &threads_started,
},
consumer,
)
}
}
-struct IterParallelProducer<'a, Iter: Iterator> {
- split_count: &'a AtomicUsize,
- done: &'a AtomicBool,
- iter: &'a Mutex<(Iter, Worker<Iter::Item>)>,
- items: Stealer<Iter::Item>,
-}
-
-// manual clone because T doesn't need to be Clone, but the derive assumes it should be
-impl<'a, Iter: Iterator + 'a> Clone for IterParallelProducer<'a, Iter> {
- fn clone(&self) -> Self {
- IterParallelProducer {
- split_count: self.split_count,
- done: self.done,
- iter: self.iter,
- items: self.items.clone(),
- }
- }
+struct IterParallelProducer<'a, Iter> {
+ split_count: AtomicUsize,
+ iter: Mutex<std::iter::Fuse<Iter>>,
+ threads_started: &'a [AtomicBool],
}
-impl<'a, Iter: Iterator + Send + 'a> UnindexedProducer for IterParallelProducer<'a, Iter>
-where
- Iter::Item: Send,
-{
+impl<Iter: Iterator + Send> UnindexedProducer for &IterParallelProducer<'_, Iter> {
type Item = Iter::Item;
fn split(self) -> (Self, Option<Self>) {
let mut count = self.split_count.load(Ordering::SeqCst);
loop {
- // Check if the iterator is exhausted *and* we've consumed every item from it.
- let done = self.done.load(Ordering::SeqCst) && self.items.is_empty();
-
- match count.checked_sub(1) {
- Some(new_count) if !done => {
- match self.split_count.compare_exchange_weak(
- count,
- new_count,
- Ordering::SeqCst,
- Ordering::SeqCst,
- ) {
- Ok(_) => return (self.clone(), Some(self)),
- Err(last_count) => count = last_count,
- }
- }
- _ => {
- return (self, None);
+ // Check if the iterator is exhausted
+ if let Some(new_count) = count.checked_sub(1) {
+ match self.split_count.compare_exchange_weak(
+ count,
+ new_count,
+ Ordering::SeqCst,
+ Ordering::SeqCst,
+ ) {
+ Ok(_) => return (self, Some(self)),
+ Err(last_count) => count = last_count,
}
+ } else {
+ return (self, None);
}
}
}
@@ -151,66 +123,39 @@ where
where
F: Folder<Self::Item>,
{
+ // Guard against work-stealing-induced recursion, in case `Iter::next()`
+ // calls rayon internally, so we don't deadlock our mutex. We might also
+ // be recursing via `folder` methods, which doesn't present a mutex hazard,
+ // but it's lower overhead for us to just check this once, rather than
+ // updating additional shared state on every mutex lock/unlock.
+ // (If this isn't a rayon thread, then there's no work-stealing anyway...)
+ if let Some(i) = current_thread_index() {
+ // Note: If the number of threads in the pool ever grows dynamically, then
+ // we'll end up sharing flags and may falsely detect recursion -- that's
+ // still fine for overall correctness, just not optimal for parallelism.
+ let thread_started = &self.threads_started[i % self.threads_started.len()];
+ if thread_started.swap(true, Ordering::Relaxed) {
+ // We can't make progress with a nested mutex, so just return and let
+ // the outermost loop continue with the rest of the iterator items.
+ return folder;
+ }
+ }
+
loop {
- match self.items.steal() {
- Steal::Success(it) => {
+ if let Ok(mut iter) = self.iter.lock() {
+ if let Some(it) = iter.next() {
+ drop(iter);
folder = folder.consume(it);
if folder.full() {
return folder;
}
+ } else {
+ return folder;
}
- Steal::Empty => {
- // Don't storm the mutex if we're already done.
- if self.done.load(Ordering::SeqCst) {
- // Someone might have pushed more between our `steal()` and `done.load()`
- if self.items.is_empty() {
- // The iterator is out of items, no use in continuing
- return folder;
- }
- } else {
- // our cache is out of items, time to load more from the iterator
- match self.iter.try_lock() {
- Ok(mut guard) => {
- // Check `done` again in case we raced with the previous lock
- // holder on its way out.
- if self.done.load(Ordering::SeqCst) {
- if self.items.is_empty() {
- return folder;
- }
- continue;
- }
-
- let count = current_num_threads();
- let count = (count * count) * 2;
-
- let (ref mut iter, ref worker) = *guard;
-
- // while worker.len() < count {
- // FIXME the new deque doesn't let us count items. We can just
- // push a number of items, but that doesn't consider active
- // stealers elsewhere.
- for _ in 0..count {
- if let Some(it) = iter.next() {
- worker.push(it);
- } else {
- self.done.store(true, Ordering::SeqCst);
- break;
- }
- }
- }
- Err(TryLockError::WouldBlock) => {
- // someone else has the mutex, just sit tight until it's ready
- yield_now(); //TODO: use a thread-pool-aware yield? (#548)
- }
- Err(TryLockError::Poisoned(_)) => {
- // any panics from other threads will have been caught by the pool,
- // and will be re-thrown when joined - just exit
- return folder;
- }
- }
- }
- }
- Steal::Retry => (),
+ } else {
+ // any panics from other threads will have been caught by the pool,
+ // and will be re-thrown when joined - just exit
+ return folder;
}
}
}
diff --git a/vendor/rayon/tests/par_bridge_recursion.rs b/vendor/rayon/tests/par_bridge_recursion.rs
new file mode 100644
index 000000000..4def0a9e4
--- /dev/null
+++ b/vendor/rayon/tests/par_bridge_recursion.rs
@@ -0,0 +1,30 @@
+use rayon::prelude::*;
+use std::iter::once_with;
+
+const N: usize = 100_000;
+
+#[test]
+fn par_bridge_recursion() {
+ let pool = rayon::ThreadPoolBuilder::new()
+ .num_threads(10)
+ .build()
+ .unwrap();
+
+ let seq: Vec<_> = (0..N).map(|i| (i, i.to_string())).collect();
+
+ pool.broadcast(|_| {
+ let mut par: Vec<_> = (0..N)
+ .into_par_iter()
+ .flat_map(|i| {
+ once_with(move || {
+ // Using rayon within the serial iterator creates an opportunity for
+ // work-stealing to make par_bridge's mutex accidentally recursive.
+ rayon::join(move || i, move || i.to_string())
+ })
+ .par_bridge()
+ })
+ .collect();
+ par.par_sort_unstable();
+ assert_eq!(seq, par);
+ });
+}
diff --git a/vendor/regex-syntax/.cargo-checksum.json b/vendor/regex-syntax/.cargo-checksum.json
index 881437810..9c05f32bc 100644
--- a/vendor/regex-syntax/.cargo-checksum.json
+++ b/vendor/regex-syntax/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"91aed5795d8faeb9a9f43face557622d92417a17bb453b5f12ee87073040fb7d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"67a3e673a9da6826fd4db5be6902841c821b52b98dc22c300f6e327872392b0a","benches/bench.rs":"d2b6ae5b939abd6093064f144b981b7739d7f474ec0698a1268052fc92406635","src/ast/mod.rs":"91b277a9bb979f85a44a67e39f17f77bde033764eea1f1a93aad1b61f1250089","src/ast/parse.rs":"150b42e944f766fdca70d654dbe32f8a17498432729c78b9eb50b73ae7f91f86","src/ast/print.rs":"d12f2cc75cd62f35623e1eb7a77ab8ac804b971752082700d2c4f550f834b249","src/ast/visitor.rs":"1a7b473147e4f6b89623ef1744a9e87f665bcf160fe08a33ce8e35011811ba71","src/either.rs":"1758e3edd056884eccadd995708d1e374ba9aa65846bd0e13b1aae852607c560","src/error.rs":"b3c5903a8937d2aff229a3ec65d4571d01ec4d9874c9a242ed6562c32702bcbd","src/hir/interval.rs":"e767fed363bebe4bbda0d78b8f07e73f321eaf4f837e2d7bd14a1617387e9a89","src/hir/literal/mod.rs":"ffe9a0aff7827f97bffd29eb2f4ba96627b16953161dce6c50a2f760e76bbd98","src/hir/mod.rs":"7f83c828223a54d236d348e48d5cedf015c904812110b6c38e9d52039c2b1572","src/hir/print.rs":"651b5d9776532a78612a5f9081372a57bad693890639ac19e3128b4defa96662","src/hir/translate.rs":"c7cd9693f73760263fd49a968714d27e7985ebe840211b2d83bca6686b0602a8","src/hir/visitor.rs":"e5bf7f8c09f6155e59c9d676fe25437f7e3700f9bf5d91101d7e246a64c11d5a","src/lib.rs":"a004f65196dd5745b3112e4acc8c467b18495cecac64a58d6608b35de67371cb","src/parser.rs":"0dfb553a152e008b2755f115663e553ed99c4b8e6a4dcbcad1662737534de49d","src/unicode.rs":"2b575c75dcb8fd6becb06f2a8faa33d6f54779708bc6b103070b8acb2b3323bb","src/unicode_tables/LICENSE-UNICODE":"74db5baf44a41b1000312c673544b3374e4198af5605c7f9080a402cec42cfa3","src/unicode_tables/age.rs":"9b36dd7d359d2fa21e6ea9734a37415ba0ba9469e27b8536308f5b79139d4191","src/unicode_tables/case_folding_simple.rs":"52b74161fa1e8f2db66737156e081cece82c1f503dee604f901b2df095beb90d","src/unicode_tables/general_category.rs":"c10beb78cdab6ec14846573bfb7965b5b3a4c114d20352c21a666d8de740a049","src/unicode_tables/grapheme_cluster_break.rs":"10ba739e06f880570eaf90bddd78ec468d939c0be7cd6e39f52cfa68371a9885","src/unicode_tables/mod.rs":"26c837099cd934c8062e24bc9a0aaecf15fe1de03f9c6da3f3e1e5ac3ca24bee","src/unicode_tables/perl_decimal.rs":"2084b101c615ff368a47af72df760003a76e869dfc191e0224cd024bb88165ed","src/unicode_tables/perl_space.rs":"75b9f3c9e302fd0994ee6e9ee21ee0ab36efc513cfd083647aed9854b977a33d","src/unicode_tables/perl_word.rs":"c588d6b29b98c1160452b54e9275d43583bc3454e29aee1c07e6a18389011a9c","src/unicode_tables/property_bool.rs":"319740ac6074b2d4e6e22bf4dde7db2feb569f9b71467c893fb2553d149b9f1d","src/unicode_tables/property_names.rs":"f33b186d7d8ee5342d74ef214f7b2cfbb24345233fa49a2abf2578cae61fbdd3","src/unicode_tables/property_values.rs":"716eb87716a9a4b2a9ef2c2242d6831692564875c8e218ffa758266ca33c88b6","src/unicode_tables/script.rs":"f6020589e33bd3a058468a22ce51391f6e512f9eb88c8ac60635fae7cd641ee2","src/unicode_tables/script_extension.rs":"2ba03d13813161a064b11e9f87b87685fbf29699ef553acc6112606b7ca98169","src/unicode_tables/sentence_break.rs":"bf7635623e4dbe0195789ed8b21f83ce3394ed2c445a3005f929f4f75b2a83c3","src/unicode_tables/word_break.rs":"8f7e261a67d1adb32ea627e43cb61d6566c33a4e229113d911e86941e6997a41","src/utf8.rs":"de854b3bfb3f7dbefc422f6a25935aaeef55ead2c35386c712a1fe9bf81a7b6f","test":"8a9bd1bd9fb389e08288f951319a9bbb0d4c5284a2ba63cbdab7f6afa2c2f76e"},"package":"a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"} \ No newline at end of file
+{"files":{"Cargo.toml":"238d0bbc855edbecf9a6a6936efc20bd2759f36bc8fa4d53bdef33a1629a9a0f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"67a3e673a9da6826fd4db5be6902841c821b52b98dc22c300f6e327872392b0a","benches/bench.rs":"d2b6ae5b939abd6093064f144b981b7739d7f474ec0698a1268052fc92406635","src/ast/mod.rs":"91b277a9bb979f85a44a67e39f17f77bde033764eea1f1a93aad1b61f1250089","src/ast/parse.rs":"150b42e944f766fdca70d654dbe32f8a17498432729c78b9eb50b73ae7f91f86","src/ast/print.rs":"d12f2cc75cd62f35623e1eb7a77ab8ac804b971752082700d2c4f550f834b249","src/ast/visitor.rs":"1a7b473147e4f6b89623ef1744a9e87f665bcf160fe08a33ce8e35011811ba71","src/either.rs":"1758e3edd056884eccadd995708d1e374ba9aa65846bd0e13b1aae852607c560","src/error.rs":"b3c5903a8937d2aff229a3ec65d4571d01ec4d9874c9a242ed6562c32702bcbd","src/hir/interval.rs":"e767fed363bebe4bbda0d78b8f07e73f321eaf4f837e2d7bd14a1617387e9a89","src/hir/literal/mod.rs":"ffe9a0aff7827f97bffd29eb2f4ba96627b16953161dce6c50a2f760e76bbd98","src/hir/mod.rs":"7f83c828223a54d236d348e48d5cedf015c904812110b6c38e9d52039c2b1572","src/hir/print.rs":"651b5d9776532a78612a5f9081372a57bad693890639ac19e3128b4defa96662","src/hir/translate.rs":"c7cd9693f73760263fd49a968714d27e7985ebe840211b2d83bca6686b0602a8","src/hir/visitor.rs":"e5bf7f8c09f6155e59c9d676fe25437f7e3700f9bf5d91101d7e246a64c11d5a","src/lib.rs":"a004f65196dd5745b3112e4acc8c467b18495cecac64a58d6608b35de67371cb","src/parser.rs":"0dfb553a152e008b2755f115663e553ed99c4b8e6a4dcbcad1662737534de49d","src/unicode.rs":"2ad48193433fefbede0837bd645f4288f6b39b1facb59dbb7d541bce7bf19109","src/unicode_tables/LICENSE-UNICODE":"74db5baf44a41b1000312c673544b3374e4198af5605c7f9080a402cec42cfa3","src/unicode_tables/age.rs":"2a2599a4e406fbbd0efd16aa6ce385c3f97b87c34820d6686a9f9113a5231c67","src/unicode_tables/case_folding_simple.rs":"9583803d4a10486da372b76979dbd26349b40766229467238eff972c1d78e47b","src/unicode_tables/general_category.rs":"36a93ba1cdeed96a00ff29a5ab5afd2c578a89541bf4dd8b18478146cebda0aa","src/unicode_tables/grapheme_cluster_break.rs":"39c388e9805a8391d3d3e69d74d831ce4fb99aa7e13e52c64dd2bd16d4765301","src/unicode_tables/mod.rs":"26c837099cd934c8062e24bc9a0aaecf15fe1de03f9c6da3f3e1e5ac3ca24bee","src/unicode_tables/perl_decimal.rs":"a98ea4afe71c2947023ae12bd25c46bf4c7de48eeb40979eca5c96ba62cee02e","src/unicode_tables/perl_space.rs":"ea2b3b84b4a48334082dadc6c37d9fcc9c9ded84b40e8f5c9c9314898638967e","src/unicode_tables/perl_word.rs":"6f1156bd6af32151ecffea4abe07a38fa04b1fc1b227ec1a8dac5d5f08d9d74b","src/unicode_tables/property_bool.rs":"0bd64f6e3228eaecf47824e238bdf1f8a9eef113ace6e790a57f045a8106701c","src/unicode_tables/property_names.rs":"5ca25437927eb70c62adf7d038e99a601cfb8a718677fd6de832589664d3c481","src/unicode_tables/property_values.rs":"5b4cc02392d382cf7af60455fc87b9980e97409b62a4b8d6c5843190d2e2d21d","src/unicode_tables/script.rs":"ea1d771b6d0a4b12d143f9bad2ea9342a0887878cbbe3c11262b6eabedaf2dd4","src/unicode_tables/script_extension.rs":"beeb8349703d903ff861beb8401bfd2599e457dc25df872e69d6ad1615f8b5e9","src/unicode_tables/sentence_break.rs":"2befe2a27cc4e8aecb624e310ef9f371462470dd3b2f572cec1f5873a5e30aa9","src/unicode_tables/word_break.rs":"94679177731b515f0c360eff394286a1f99b59527bdbc826cbf51d32f9666187","src/utf8.rs":"de854b3bfb3f7dbefc422f6a25935aaeef55ead2c35386c712a1fe9bf81a7b6f","test":"8a9bd1bd9fb389e08288f951319a9bbb0d4c5284a2ba63cbdab7f6afa2c2f76e"},"package":"456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"} \ No newline at end of file
diff --git a/vendor/regex-syntax/Cargo.toml b/vendor/regex-syntax/Cargo.toml
index 10e6bbf53..8d87f7a42 100644
--- a/vendor/regex-syntax/Cargo.toml
+++ b/vendor/regex-syntax/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
authors = ["The Rust Project Developers"]
description = "A regular expression parser."
homepage = "https://github.com/rust-lang/regex"
diff --git a/vendor/regex-syntax/src/unicode.rs b/vendor/regex-syntax/src/unicode.rs
index 70d5954b7..8194d7f55 100644
--- a/vendor/regex-syntax/src/unicode.rs
+++ b/vendor/regex-syntax/src/unicode.rs
@@ -605,6 +605,7 @@ fn ages(canonical_age: &str) -> Result<impl Iterator<Item = Range>> {
("V12_1", age::V12_1),
("V13_0", age::V13_0),
("V14_0", age::V14_0),
+ ("V15_0", age::V15_0),
];
assert_eq!(AGES.len(), age::BY_NAME.len(), "ages are out of sync");
diff --git a/vendor/regex-syntax/src/unicode_tables/age.rs b/vendor/regex-syntax/src/unicode_tables/age.rs
index ffdfef316..71f4861e0 100644
--- a/vendor/regex-syntax/src/unicode_tables/age.rs
+++ b/vendor/regex-syntax/src/unicode_tables/age.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate age /tmp/ucd --chars
+// ucd-generate age ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("V10_0", V10_0),
@@ -13,6 +13,7 @@ pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("V12_1", V12_1),
("V13_0", V13_0),
("V14_0", V14_0),
+ ("V15_0", V15_0),
("V1_1", V1_1),
("V2_0", V2_0),
("V2_1", V2_1),
@@ -346,6 +347,41 @@ pub const V14_0: &'static [(char, char)] = &[
('𫜵', '𫜸'),
];
+pub const V15_0: &'static [(char, char)] = &[
+ ('ೳ', 'ೳ'),
+ ('\u{ece}', '\u{ece}'),
+ ('\u{10efd}', '\u{10eff}'),
+ ('𑈿', '\u{11241}'),
+ ('𑬀', '𑬉'),
+ ('\u{11f00}', '𑼐'),
+ ('𑼒', '\u{11f3a}'),
+ ('𑼾', '𑽙'),
+ ('𓐯', '𓐯'),
+ ('\u{13439}', '\u{13455}'),
+ ('𛄲', '𛄲'),
+ ('𛅕', '𛅕'),
+ ('𝋀', '𝋓'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
+ ('𞓐', '𞓹'),
+ ('🛜', '🛜'),
+ ('🝴', '🝶'),
+ ('🝻', '🝿'),
+ ('🟙', '🟙'),
+ ('🩵', '🩷'),
+ ('🪇', '🪈'),
+ ('🪭', '🪯'),
+ ('🪻', '🪽'),
+ ('🪿', '🪿'),
+ ('🫎', '🫏'),
+ ('🫚', '🫛'),
+ ('🫨', '🫨'),
+ ('🫷', '🫸'),
+ ('𫜹', '𫜹'),
+ ('𱍐', '𲎯'),
+];
+
pub const V1_1: &'static [(char, char)] = &[
('\0', 'ǵ'),
('Ǻ', 'ȗ'),
diff --git a/vendor/regex-syntax/src/unicode_tables/case_folding_simple.rs b/vendor/regex-syntax/src/unicode_tables/case_folding_simple.rs
index 766d21b48..23f9364ce 100644
--- a/vendor/regex-syntax/src/unicode_tables/case_folding_simple.rs
+++ b/vendor/regex-syntax/src/unicode_tables/case_folding_simple.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate case-folding-simple /tmp/ucd --chars --all-pairs
+// ucd-generate case-folding-simple ucd-15.0.0 --chars --all-pairs
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const CASE_FOLDING_SIMPLE: &'static [(char, &'static [char])] = &[
('A', &['a']),
diff --git a/vendor/regex-syntax/src/unicode_tables/general_category.rs b/vendor/regex-syntax/src/unicode_tables/general_category.rs
index 8aa6b0078..8fc928912 100644
--- a/vendor/regex-syntax/src/unicode_tables/general_category.rs
+++ b/vendor/regex-syntax/src/unicode_tables/general_category.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate general-category /tmp/ucd --chars --exclude surrogate
+// ucd-generate general-category ucd-15.0.0 --chars --exclude surrogate
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Cased_Letter", CASED_LETTER),
@@ -188,6 +188,7 @@ pub const CASED_LETTER: &'static [(char, char)] = &[
('𝟄', '𝟋'),
('𝼀', '𝼉'),
('𝼋', '𝼞'),
+ ('𝼥', '𝼪'),
('𞤀', '𞥃'),
];
@@ -383,12 +384,14 @@ pub const DECIMAL_NUMBER: &'static [(char, char)] = &[
('𑱐', '𑱙'),
('𑵐', '𑵙'),
('𑶠', '𑶩'),
+ ('𑽐', '𑽙'),
('𖩠', '𖩩'),
('𖫀', '𖫉'),
('𖭐', '𖭙'),
('𝟎', '𝟿'),
('𞅀', '𞅉'),
('𞋰', '𞋹'),
+ ('𞓰', '𞓹'),
('𞥐', '𞥙'),
('🯰', '🯹'),
];
@@ -431,7 +434,7 @@ pub const FORMAT: &'static [(char, char)] = &[
('\u{fff9}', '\u{fffb}'),
('\u{110bd}', '\u{110bd}'),
('\u{110cd}', '\u{110cd}'),
- ('\u{13430}', '\u{13438}'),
+ ('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0001}', '\u{e0001}'),
@@ -922,6 +925,7 @@ pub const LETTER: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -984,11 +988,15 @@ pub const LETTER: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -1011,7 +1019,9 @@ pub const LETTER: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -1049,11 +1059,14 @@ pub const LETTER: &'static [(char, char)] = &[
('𝞪', '𝟂'),
('𝟄', '𝟋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -1095,12 +1108,13 @@ pub const LETTER: &'static [(char, char)] = &[
('𞺥', '𞺩'),
('𞺫', '𞺻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const LETTER_NUMBER: &'static [(char, char)] = &[
@@ -1778,6 +1792,7 @@ pub const LOWERCASE_LETTER: &'static [(char, char)] = &[
('𝟋', '𝟋'),
('𝼀', '𝼉'),
('𝼋', '𝼞'),
+ ('𝼥', '𝼪'),
('𞤢', '𞥃'),
];
@@ -1862,6 +1877,7 @@ pub const MARK: &'static [(char, char)] = &[
('ೊ', '\u{ccd}'),
('\u{cd5}', '\u{cd6}'),
('\u{ce2}', '\u{ce3}'),
+ ('ೳ', 'ೳ'),
('\u{d00}', 'ഃ'),
('\u{d3b}', '\u{d3c}'),
('\u{d3e}', '\u{d44}'),
@@ -1880,7 +1896,7 @@ pub const MARK: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -1982,6 +1998,7 @@ pub const MARK: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('𑀀', '𑀂'),
@@ -2001,6 +2018,7 @@ pub const MARK: &'static [(char, char)] = &[
('𑇎', '\u{111cf}'),
('𑈬', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112ea}'),
('\u{11300}', '𑌃'),
('\u{1133b}', '\u{1133c}'),
@@ -2048,6 +2066,12 @@ pub const MARK: &'static [(char, char)] = &[
('\u{11d90}', '\u{11d91}'),
('𑶓', '\u{11d97}'),
('\u{11ef3}', '𑻶'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -2075,9 +2099,11 @@ pub const MARK: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('\u{e0100}', '\u{e01ef}'),
@@ -2218,7 +2244,9 @@ pub const MODIFIER_LETTER: &'static [(char, char)] = &[
('𚿰', '𚿳'),
('𚿵', '𚿻'),
('𚿽', '𚿾'),
+ ('𞀰', '𞁭'),
('𞄷', '𞄽'),
+ ('𞓫', '𞓫'),
('𞥋', '𞥋'),
];
@@ -2349,7 +2377,7 @@ pub const NONSPACING_MARK: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -2480,6 +2508,7 @@ pub const NONSPACING_MARK: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('\u{11001}', '\u{11001}'),
@@ -2502,6 +2531,7 @@ pub const NONSPACING_MARK: &'static [(char, char)] = &[
('\u{11234}', '\u{11234}'),
('\u{11236}', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112df}'),
('\u{112e3}', '\u{112ea}'),
('\u{11300}', '\u{11301}'),
@@ -2563,6 +2593,12 @@ pub const NONSPACING_MARK: &'static [(char, char)] = &[
('\u{11d95}', '\u{11d95}'),
('\u{11d97}', '\u{11d97}'),
('\u{11ef3}', '\u{11ef4}'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('\u{11f36}', '\u{11f3a}'),
+ ('\u{11f40}', '\u{11f40}'),
+ ('\u{11f42}', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -2587,9 +2623,11 @@ pub const NONSPACING_MARK: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('\u{e0100}', '\u{e01ef}'),
@@ -2709,6 +2747,7 @@ pub const NUMBER: &'static [(char, char)] = &[
('𑱐', '𑱬'),
('𑵐', '𑵙'),
('𑶠', '𑶩'),
+ ('𑽐', '𑽙'),
('𑿀', '𑿔'),
('𒐀', '𒑮'),
('𖩠', '𖩩'),
@@ -2716,11 +2755,13 @@ pub const NUMBER: &'static [(char, char)] = &[
('𖭐', '𖭙'),
('𖭛', '𖭡'),
('𖺀', '𖺖'),
+ ('𝋀', '𝋓'),
('𝋠', '𝋳'),
('𝍠', '𝍸'),
('𝟎', '𝟿'),
('𞅀', '𞅉'),
('𞋰', '𞋹'),
+ ('𞓰', '𞓹'),
('𞣇', '𞣏'),
('𞥐', '𞥙'),
('𞱱', '𞲫'),
@@ -2941,7 +2982,7 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{cdf}', '\u{cdf}'),
('\u{ce4}', '\u{ce5}'),
('\u{cf0}', '\u{cf0}'),
- ('\u{cf3}', '\u{cff}'),
+ ('\u{cf4}', '\u{cff}'),
('\u{d0d}', '\u{d0d}'),
('\u{d11}', '\u{d11}'),
('\u{d45}', '\u{d45}'),
@@ -2971,7 +3012,7 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{ebe}', '\u{ebf}'),
('\u{ec5}', '\u{ec5}'),
('\u{ec7}', '\u{ec7}'),
- ('\u{ece}', '\u{ecf}'),
+ ('\u{ecf}', '\u{ecf}'),
('\u{eda}', '\u{edb}'),
('\u{ee0}', '\u{eff}'),
('\u{f48}', '\u{f48}'),
@@ -3243,7 +3284,7 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{10e7f}', '\u{10e7f}'),
('\u{10eaa}', '\u{10eaa}'),
('\u{10eae}', '\u{10eaf}'),
- ('\u{10eb2}', '\u{10eff}'),
+ ('\u{10eb2}', '\u{10efc}'),
('\u{10f28}', '\u{10f2f}'),
('\u{10f5a}', '\u{10f6f}'),
('\u{10f8a}', '\u{10faf}'),
@@ -3261,7 +3302,7 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{111e0}', '\u{111e0}'),
('\u{111f5}', '\u{111ff}'),
('\u{11212}', '\u{11212}'),
- ('\u{1123f}', '\u{1127f}'),
+ ('\u{11242}', '\u{1127f}'),
('\u{11287}', '\u{11287}'),
('\u{11289}', '\u{11289}'),
('\u{1128e}', '\u{1128e}'),
@@ -3313,7 +3354,8 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{119e5}', '\u{119ff}'),
('\u{11a48}', '\u{11a4f}'),
('\u{11aa3}', '\u{11aaf}'),
- ('\u{11af9}', '\u{11bff}'),
+ ('\u{11af9}', '\u{11aff}'),
+ ('\u{11b0a}', '\u{11bff}'),
('\u{11c09}', '\u{11c09}'),
('\u{11c37}', '\u{11c37}'),
('\u{11c46}', '\u{11c4f}'),
@@ -3334,7 +3376,10 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{11d92}', '\u{11d92}'),
('\u{11d99}', '\u{11d9f}'),
('\u{11daa}', '\u{11edf}'),
- ('\u{11ef9}', '\u{11faf}'),
+ ('\u{11ef9}', '\u{11eff}'),
+ ('\u{11f11}', '\u{11f11}'),
+ ('\u{11f3b}', '\u{11f3d}'),
+ ('\u{11f5a}', '\u{11faf}'),
('\u{11fb1}', '\u{11fbf}'),
('\u{11ff2}', '\u{11ffe}'),
('\u{1239a}', '\u{123ff}'),
@@ -3342,7 +3387,8 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{12475}', '\u{1247f}'),
('\u{12544}', '\u{12f8f}'),
('\u{12ff3}', '\u{12fff}'),
- ('\u{1342f}', '\u{143ff}'),
+ ('\u{13430}', '\u{1343f}'),
+ ('\u{13456}', '\u{143ff}'),
('\u{14647}', '\u{167ff}'),
('\u{16a39}', '\u{16a3f}'),
('\u{16a5f}', '\u{16a5f}'),
@@ -3368,8 +3414,10 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{1aff4}', '\u{1aff4}'),
('\u{1affc}', '\u{1affc}'),
('\u{1afff}', '\u{1afff}'),
- ('\u{1b123}', '\u{1b14f}'),
- ('\u{1b153}', '\u{1b163}'),
+ ('\u{1b123}', '\u{1b131}'),
+ ('\u{1b133}', '\u{1b14f}'),
+ ('\u{1b153}', '\u{1b154}'),
+ ('\u{1b156}', '\u{1b163}'),
('\u{1b168}', '\u{1b16f}'),
('\u{1b2fc}', '\u{1bbff}'),
('\u{1bc6b}', '\u{1bc6f}'),
@@ -3384,7 +3432,8 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{1d127}', '\u{1d128}'),
('\u{1d173}', '\u{1d17a}'),
('\u{1d1eb}', '\u{1d1ff}'),
- ('\u{1d246}', '\u{1d2df}'),
+ ('\u{1d246}', '\u{1d2bf}'),
+ ('\u{1d2d4}', '\u{1d2df}'),
('\u{1d2f4}', '\u{1d2ff}'),
('\u{1d357}', '\u{1d35f}'),
('\u{1d379}', '\u{1d3ff}'),
@@ -3411,19 +3460,23 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{1da8c}', '\u{1da9a}'),
('\u{1daa0}', '\u{1daa0}'),
('\u{1dab0}', '\u{1deff}'),
- ('\u{1df1f}', '\u{1dfff}'),
+ ('\u{1df1f}', '\u{1df24}'),
+ ('\u{1df2b}', '\u{1dfff}'),
('\u{1e007}', '\u{1e007}'),
('\u{1e019}', '\u{1e01a}'),
('\u{1e022}', '\u{1e022}'),
('\u{1e025}', '\u{1e025}'),
- ('\u{1e02b}', '\u{1e0ff}'),
+ ('\u{1e02b}', '\u{1e02f}'),
+ ('\u{1e06e}', '\u{1e08e}'),
+ ('\u{1e090}', '\u{1e0ff}'),
('\u{1e12d}', '\u{1e12f}'),
('\u{1e13e}', '\u{1e13f}'),
('\u{1e14a}', '\u{1e14d}'),
('\u{1e150}', '\u{1e28f}'),
('\u{1e2af}', '\u{1e2bf}'),
('\u{1e2fa}', '\u{1e2fe}'),
- ('\u{1e300}', '\u{1e7df}'),
+ ('\u{1e300}', '\u{1e4cf}'),
+ ('\u{1e4fa}', '\u{1e7df}'),
('\u{1e7e7}', '\u{1e7e7}'),
('\u{1e7ec}', '\u{1e7ec}'),
('\u{1e7ef}', '\u{1e7ef}'),
@@ -3481,11 +3534,11 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{1f249}', '\u{1f24f}'),
('\u{1f252}', '\u{1f25f}'),
('\u{1f266}', '\u{1f2ff}'),
- ('\u{1f6d8}', '\u{1f6dc}'),
+ ('\u{1f6d8}', '\u{1f6db}'),
('\u{1f6ed}', '\u{1f6ef}'),
('\u{1f6fd}', '\u{1f6ff}'),
- ('\u{1f774}', '\u{1f77f}'),
- ('\u{1f7d9}', '\u{1f7df}'),
+ ('\u{1f777}', '\u{1f77a}'),
+ ('\u{1f7da}', '\u{1f7df}'),
('\u{1f7ec}', '\u{1f7ef}'),
('\u{1f7f1}', '\u{1f7ff}'),
('\u{1f80c}', '\u{1f80f}'),
@@ -3496,25 +3549,24 @@ pub const OTHER: &'static [(char, char)] = &[
('\u{1f8b2}', '\u{1f8ff}'),
('\u{1fa54}', '\u{1fa5f}'),
('\u{1fa6e}', '\u{1fa6f}'),
- ('\u{1fa75}', '\u{1fa77}'),
('\u{1fa7d}', '\u{1fa7f}'),
- ('\u{1fa87}', '\u{1fa8f}'),
- ('\u{1faad}', '\u{1faaf}'),
- ('\u{1fabb}', '\u{1fabf}'),
- ('\u{1fac6}', '\u{1facf}'),
- ('\u{1fada}', '\u{1fadf}'),
- ('\u{1fae8}', '\u{1faef}'),
- ('\u{1faf7}', '\u{1faff}'),
+ ('\u{1fa89}', '\u{1fa8f}'),
+ ('\u{1fabe}', '\u{1fabe}'),
+ ('\u{1fac6}', '\u{1facd}'),
+ ('\u{1fadc}', '\u{1fadf}'),
+ ('\u{1fae9}', '\u{1faef}'),
+ ('\u{1faf9}', '\u{1faff}'),
('\u{1fb93}', '\u{1fb93}'),
('\u{1fbcb}', '\u{1fbef}'),
('\u{1fbfa}', '\u{1ffff}'),
('\u{2a6e0}', '\u{2a6ff}'),
- ('\u{2b739}', '\u{2b73f}'),
+ ('\u{2b73a}', '\u{2b73f}'),
('\u{2b81e}', '\u{2b81f}'),
('\u{2cea2}', '\u{2ceaf}'),
('\u{2ebe1}', '\u{2f7ff}'),
('\u{2fa1e}', '\u{2ffff}'),
- ('\u{3134b}', '\u{e00ff}'),
+ ('\u{3134b}', '\u{3134f}'),
+ ('\u{323b0}', '\u{e00ff}'),
('\u{e01f0}', '\u{10ffff}'),
];
@@ -3883,6 +3935,7 @@ pub const OTHER_LETTER: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -3944,11 +3997,15 @@ pub const OTHER_LETTER: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -3963,7 +4020,9 @@ pub const OTHER_LETTER: &'static [(char, char)] = &[
('𘠀', '𘳕'),
('𘴀', '𘴈'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -3975,6 +4034,7 @@ pub const OTHER_LETTER: &'static [(char, char)] = &[
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓪'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -4014,12 +4074,13 @@ pub const OTHER_LETTER: &'static [(char, char)] = &[
('𞺥', '𞺩'),
('𞺫', '𞺻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const OTHER_NUMBER: &'static [(char, char)] = &[
@@ -4085,6 +4146,7 @@ pub const OTHER_NUMBER: &'static [(char, char)] = &[
('𑿀', '𑿔'),
('𖭛', '𖭡'),
('𖺀', '𖺖'),
+ ('𝋀', '𝋓'),
('𝋠', '𝋳'),
('𝍠', '𝍸'),
('𞣇', '𞣏'),
@@ -4267,9 +4329,11 @@ pub const OTHER_PUNCTUATION: &'static [(char, char)] = &[
('𑨿', '𑩆'),
('𑪚', '𑪜'),
('𑪞', '𑪢'),
+ ('𑬀', '𑬉'),
('𑱁', '𑱅'),
('𑱰', '𑱱'),
('𑻷', '𑻸'),
+ ('𑽃', '𑽏'),
('𑿿', '𑿿'),
('𒑰', '𒑴'),
('𒿱', '𒿲'),
@@ -4446,10 +4510,10 @@ pub const OTHER_SYMBOL: &'static [(char, char)] = &[
('🉠', '🉥'),
('🌀', '🏺'),
('🐀', '🛗'),
- ('🛝', '🛬'),
+ ('🛜', '🛬'),
('🛰', '🛼'),
- ('🜀', '🝳'),
- ('🞀', '🟘'),
+ ('🜀', '🝶'),
+ ('🝻', '🟙'),
('🟠', '🟫'),
('🟰', '🟰'),
('🠀', '🠋'),
@@ -4460,15 +4524,13 @@ pub const OTHER_SYMBOL: &'static [(char, char)] = &[
('🢰', '🢱'),
('🤀', '🩓'),
('🩠', '🩭'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
('🬀', '🮒'),
('🮔', '🯊'),
];
@@ -4657,9 +4719,11 @@ pub const PUNCTUATION: &'static [(char, char)] = &[
('𑨿', '𑩆'),
('𑪚', '𑪜'),
('𑪞', '𑪢'),
+ ('𑬀', '𑬉'),
('𑱁', '𑱅'),
('𑱰', '𑱱'),
('𑻷', '𑻸'),
+ ('𑽃', '𑽏'),
('𑿿', '𑿿'),
('𒑰', '𒑴'),
('𒿱', '𒿲'),
@@ -4731,6 +4795,7 @@ pub const SPACING_MARK: &'static [(char, char)] = &[
('ೇ', 'ೈ'),
('ೊ', 'ೋ'),
('\u{cd5}', '\u{cd6}'),
+ ('ೳ', 'ೳ'),
('ം', 'ഃ'),
('\u{d3e}', 'ീ'),
('െ', 'ൈ'),
@@ -4869,6 +4934,10 @@ pub const SPACING_MARK: &'static [(char, char)] = &[
('𑶓', '𑶔'),
('𑶖', '𑶖'),
('𑻵', '𑻶'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '𑼵'),
+ ('𑼾', '𑼿'),
+ ('𑽁', '𑽁'),
('𖽑', '𖾇'),
('𖿰', '𖿱'),
('\u{1d165}', '𝅦'),
@@ -5085,10 +5154,10 @@ pub const SYMBOL: &'static [(char, char)] = &[
('🉐', '🉑'),
('🉠', '🉥'),
('🌀', '🛗'),
- ('🛝', '🛬'),
+ ('🛜', '🛬'),
('🛰', '🛼'),
- ('🜀', '🝳'),
- ('🞀', '🟘'),
+ ('🜀', '🝶'),
+ ('🝻', '🟙'),
('🟠', '🟫'),
('🟰', '🟰'),
('🠀', '🠋'),
@@ -5099,15 +5168,13 @@ pub const SYMBOL: &'static [(char, char)] = &[
('🢰', '🢱'),
('🤀', '🩓'),
('🩠', '🩭'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
('🬀', '🮒'),
('🮔', '🯊'),
];
@@ -5247,7 +5314,7 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{cdf}', '\u{cdf}'),
('\u{ce4}', '\u{ce5}'),
('\u{cf0}', '\u{cf0}'),
- ('\u{cf3}', '\u{cff}'),
+ ('\u{cf4}', '\u{cff}'),
('\u{d0d}', '\u{d0d}'),
('\u{d11}', '\u{d11}'),
('\u{d45}', '\u{d45}'),
@@ -5277,7 +5344,7 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{ebe}', '\u{ebf}'),
('\u{ec5}', '\u{ec5}'),
('\u{ec7}', '\u{ec7}'),
- ('\u{ece}', '\u{ecf}'),
+ ('\u{ecf}', '\u{ecf}'),
('\u{eda}', '\u{edb}'),
('\u{ee0}', '\u{eff}'),
('\u{f48}', '\u{f48}'),
@@ -5547,7 +5614,7 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{10e7f}', '\u{10e7f}'),
('\u{10eaa}', '\u{10eaa}'),
('\u{10eae}', '\u{10eaf}'),
- ('\u{10eb2}', '\u{10eff}'),
+ ('\u{10eb2}', '\u{10efc}'),
('\u{10f28}', '\u{10f2f}'),
('\u{10f5a}', '\u{10f6f}'),
('\u{10f8a}', '\u{10faf}'),
@@ -5565,7 +5632,7 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{111e0}', '\u{111e0}'),
('\u{111f5}', '\u{111ff}'),
('\u{11212}', '\u{11212}'),
- ('\u{1123f}', '\u{1127f}'),
+ ('\u{11242}', '\u{1127f}'),
('\u{11287}', '\u{11287}'),
('\u{11289}', '\u{11289}'),
('\u{1128e}', '\u{1128e}'),
@@ -5617,7 +5684,8 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{119e5}', '\u{119ff}'),
('\u{11a48}', '\u{11a4f}'),
('\u{11aa3}', '\u{11aaf}'),
- ('\u{11af9}', '\u{11bff}'),
+ ('\u{11af9}', '\u{11aff}'),
+ ('\u{11b0a}', '\u{11bff}'),
('\u{11c09}', '\u{11c09}'),
('\u{11c37}', '\u{11c37}'),
('\u{11c46}', '\u{11c4f}'),
@@ -5638,7 +5706,10 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{11d92}', '\u{11d92}'),
('\u{11d99}', '\u{11d9f}'),
('\u{11daa}', '\u{11edf}'),
- ('\u{11ef9}', '\u{11faf}'),
+ ('\u{11ef9}', '\u{11eff}'),
+ ('\u{11f11}', '\u{11f11}'),
+ ('\u{11f3b}', '\u{11f3d}'),
+ ('\u{11f5a}', '\u{11faf}'),
('\u{11fb1}', '\u{11fbf}'),
('\u{11ff2}', '\u{11ffe}'),
('\u{1239a}', '\u{123ff}'),
@@ -5646,8 +5717,7 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{12475}', '\u{1247f}'),
('\u{12544}', '\u{12f8f}'),
('\u{12ff3}', '\u{12fff}'),
- ('\u{1342f}', '\u{1342f}'),
- ('\u{13439}', '\u{143ff}'),
+ ('\u{13456}', '\u{143ff}'),
('\u{14647}', '\u{167ff}'),
('\u{16a39}', '\u{16a3f}'),
('\u{16a5f}', '\u{16a5f}'),
@@ -5673,8 +5743,10 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{1aff4}', '\u{1aff4}'),
('\u{1affc}', '\u{1affc}'),
('\u{1afff}', '\u{1afff}'),
- ('\u{1b123}', '\u{1b14f}'),
- ('\u{1b153}', '\u{1b163}'),
+ ('\u{1b123}', '\u{1b131}'),
+ ('\u{1b133}', '\u{1b14f}'),
+ ('\u{1b153}', '\u{1b154}'),
+ ('\u{1b156}', '\u{1b163}'),
('\u{1b168}', '\u{1b16f}'),
('\u{1b2fc}', '\u{1bbff}'),
('\u{1bc6b}', '\u{1bc6f}'),
@@ -5688,7 +5760,8 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{1d0f6}', '\u{1d0ff}'),
('\u{1d127}', '\u{1d128}'),
('\u{1d1eb}', '\u{1d1ff}'),
- ('\u{1d246}', '\u{1d2df}'),
+ ('\u{1d246}', '\u{1d2bf}'),
+ ('\u{1d2d4}', '\u{1d2df}'),
('\u{1d2f4}', '\u{1d2ff}'),
('\u{1d357}', '\u{1d35f}'),
('\u{1d379}', '\u{1d3ff}'),
@@ -5715,19 +5788,23 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{1da8c}', '\u{1da9a}'),
('\u{1daa0}', '\u{1daa0}'),
('\u{1dab0}', '\u{1deff}'),
- ('\u{1df1f}', '\u{1dfff}'),
+ ('\u{1df1f}', '\u{1df24}'),
+ ('\u{1df2b}', '\u{1dfff}'),
('\u{1e007}', '\u{1e007}'),
('\u{1e019}', '\u{1e01a}'),
('\u{1e022}', '\u{1e022}'),
('\u{1e025}', '\u{1e025}'),
- ('\u{1e02b}', '\u{1e0ff}'),
+ ('\u{1e02b}', '\u{1e02f}'),
+ ('\u{1e06e}', '\u{1e08e}'),
+ ('\u{1e090}', '\u{1e0ff}'),
('\u{1e12d}', '\u{1e12f}'),
('\u{1e13e}', '\u{1e13f}'),
('\u{1e14a}', '\u{1e14d}'),
('\u{1e150}', '\u{1e28f}'),
('\u{1e2af}', '\u{1e2bf}'),
('\u{1e2fa}', '\u{1e2fe}'),
- ('\u{1e300}', '\u{1e7df}'),
+ ('\u{1e300}', '\u{1e4cf}'),
+ ('\u{1e4fa}', '\u{1e7df}'),
('\u{1e7e7}', '\u{1e7e7}'),
('\u{1e7ec}', '\u{1e7ec}'),
('\u{1e7ef}', '\u{1e7ef}'),
@@ -5785,11 +5862,11 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{1f249}', '\u{1f24f}'),
('\u{1f252}', '\u{1f25f}'),
('\u{1f266}', '\u{1f2ff}'),
- ('\u{1f6d8}', '\u{1f6dc}'),
+ ('\u{1f6d8}', '\u{1f6db}'),
('\u{1f6ed}', '\u{1f6ef}'),
('\u{1f6fd}', '\u{1f6ff}'),
- ('\u{1f774}', '\u{1f77f}'),
- ('\u{1f7d9}', '\u{1f7df}'),
+ ('\u{1f777}', '\u{1f77a}'),
+ ('\u{1f7da}', '\u{1f7df}'),
('\u{1f7ec}', '\u{1f7ef}'),
('\u{1f7f1}', '\u{1f7ff}'),
('\u{1f80c}', '\u{1f80f}'),
@@ -5800,25 +5877,24 @@ pub const UNASSIGNED: &'static [(char, char)] = &[
('\u{1f8b2}', '\u{1f8ff}'),
('\u{1fa54}', '\u{1fa5f}'),
('\u{1fa6e}', '\u{1fa6f}'),
- ('\u{1fa75}', '\u{1fa77}'),
('\u{1fa7d}', '\u{1fa7f}'),
- ('\u{1fa87}', '\u{1fa8f}'),
- ('\u{1faad}', '\u{1faaf}'),
- ('\u{1fabb}', '\u{1fabf}'),
- ('\u{1fac6}', '\u{1facf}'),
- ('\u{1fada}', '\u{1fadf}'),
- ('\u{1fae8}', '\u{1faef}'),
- ('\u{1faf7}', '\u{1faff}'),
+ ('\u{1fa89}', '\u{1fa8f}'),
+ ('\u{1fabe}', '\u{1fabe}'),
+ ('\u{1fac6}', '\u{1facd}'),
+ ('\u{1fadc}', '\u{1fadf}'),
+ ('\u{1fae9}', '\u{1faef}'),
+ ('\u{1faf9}', '\u{1faff}'),
('\u{1fb93}', '\u{1fb93}'),
('\u{1fbcb}', '\u{1fbef}'),
('\u{1fbfa}', '\u{1ffff}'),
('\u{2a6e0}', '\u{2a6ff}'),
- ('\u{2b739}', '\u{2b73f}'),
+ ('\u{2b73a}', '\u{2b73f}'),
('\u{2b81e}', '\u{2b81f}'),
('\u{2cea2}', '\u{2ceaf}'),
('\u{2ebe1}', '\u{2f7ff}'),
('\u{2fa1e}', '\u{2ffff}'),
- ('\u{3134b}', '\u{e0000}'),
+ ('\u{3134b}', '\u{3134f}'),
+ ('\u{323b0}', '\u{e0000}'),
('\u{e0002}', '\u{e001f}'),
('\u{e0080}', '\u{e00ff}'),
('\u{e01f0}', '\u{effff}'),
diff --git a/vendor/regex-syntax/src/unicode_tables/grapheme_cluster_break.rs b/vendor/regex-syntax/src/unicode_tables/grapheme_cluster_break.rs
index 38cfc73af..294dfbdcc 100644
--- a/vendor/regex-syntax/src/unicode_tables/grapheme_cluster_break.rs
+++ b/vendor/regex-syntax/src/unicode_tables/grapheme_cluster_break.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate grapheme-cluster-break /tmp/ucd --chars
+// ucd-generate grapheme-cluster-break ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("CR", CR),
@@ -38,7 +38,7 @@ pub const CONTROL: &'static [(char, char)] = &[
('\u{2060}', '\u{206f}'),
('\u{feff}', '\u{feff}'),
('\u{fff0}', '\u{fffb}'),
- ('\u{13430}', '\u{13438}'),
+ ('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0000}', '\u{e001f}'),
@@ -149,7 +149,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -278,6 +278,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('\u{11001}', '\u{11001}'),
@@ -300,6 +301,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{11234}', '\u{11234}'),
('\u{11236}', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112df}'),
('\u{112e3}', '\u{112ea}'),
('\u{11300}', '\u{11301}'),
@@ -367,6 +369,12 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{11d95}', '\u{11d95}'),
('\u{11d97}', '\u{11d97}'),
('\u{11ef3}', '\u{11ef4}'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('\u{11f36}', '\u{11f3a}'),
+ ('\u{11f40}', '\u{11f40}'),
+ ('\u{11f42}', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -393,9 +401,11 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('🏻', '🏿'),
@@ -1226,6 +1236,7 @@ pub const PREPEND: &'static [(char, char)] = &[
('𑨺', '𑨺'),
('𑪄', '𑪉'),
('𑵆', '𑵆'),
+ ('𑼂', '𑼂'),
];
pub const REGIONAL_INDICATOR: &'static [(char, char)] = &[('🇦', '🇿')];
@@ -1262,6 +1273,7 @@ pub const SPACINGMARK: &'static [(char, char)] = &[
('ೃ', 'ೄ'),
('ೇ', 'ೈ'),
('ೊ', 'ೋ'),
+ ('ೳ', 'ೳ'),
('ം', 'ഃ'),
('ി', 'ീ'),
('െ', 'ൈ'),
@@ -1387,6 +1399,10 @@ pub const SPACINGMARK: &'static [(char, char)] = &[
('𑶓', '𑶔'),
('𑶖', '𑶖'),
('𑻵', '𑻶'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '𑼵'),
+ ('𑼾', '𑼿'),
+ ('𑽁', '𑽁'),
('𖽑', '𖾇'),
('𖿰', '𖿱'),
('𝅦', '𝅦'),
diff --git a/vendor/regex-syntax/src/unicode_tables/perl_decimal.rs b/vendor/regex-syntax/src/unicode_tables/perl_decimal.rs
index 9a14e4395..4f4c08a12 100644
--- a/vendor/regex-syntax/src/unicode_tables/perl_decimal.rs
+++ b/vendor/regex-syntax/src/unicode_tables/perl_decimal.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate general-category /tmp/ucd --chars --include decimalnumber
+// ucd-generate general-category ucd-15.0.0 --chars --include decimalnumber
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] =
&[("Decimal_Number", DECIMAL_NUMBER)];
@@ -64,12 +64,14 @@ pub const DECIMAL_NUMBER: &'static [(char, char)] = &[
('𑱐', '𑱙'),
('𑵐', '𑵙'),
('𑶠', '𑶩'),
+ ('𑽐', '𑽙'),
('𖩠', '𖩩'),
('𖫀', '𖫉'),
('𖭐', '𖭙'),
('𝟎', '𝟿'),
('𞅀', '𞅉'),
('𞋰', '𞋹'),
+ ('𞓰', '𞓹'),
('𞥐', '𞥙'),
('🯰', '🯹'),
];
diff --git a/vendor/regex-syntax/src/unicode_tables/perl_space.rs b/vendor/regex-syntax/src/unicode_tables/perl_space.rs
index bb69ce1ba..174169579 100644
--- a/vendor/regex-syntax/src/unicode_tables/perl_space.rs
+++ b/vendor/regex-syntax/src/unicode_tables/perl_space.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate property-bool /tmp/ucd --chars --include whitespace
+// ucd-generate property-bool ucd-15.0.0 --chars --include whitespace
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] =
&[("White_Space", WHITE_SPACE)];
diff --git a/vendor/regex-syntax/src/unicode_tables/perl_word.rs b/vendor/regex-syntax/src/unicode_tables/perl_word.rs
index 2c8171b2b..c1b66bd9a 100644
--- a/vendor/regex-syntax/src/unicode_tables/perl_word.rs
+++ b/vendor/regex-syntax/src/unicode_tables/perl_word.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate perl-word /tmp/ucd --chars
+// ucd-generate perl-word ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const PERL_WORD: &'static [(char, char)] = &[
('0', '9'),
@@ -164,7 +164,7 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('\u{d00}', 'ഌ'),
('എ', 'ഐ'),
('ഒ', '\u{d44}'),
@@ -197,7 +197,7 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('ວ', 'ຽ'),
('ເ', 'ໄ'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('໐', '໙'),
('ໜ', 'ໟ'),
('ༀ', 'ༀ'),
@@ -511,7 +511,7 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('𐺀', '𐺩'),
('\u{10eab}', '\u{10eac}'),
('𐺰', '𐺱'),
- ('𐼀', '𐼜'),
+ ('\u{10efd}', '𐼜'),
('𐼧', '𐼧'),
('𐼰', '\u{10f50}'),
('𐽰', '\u{10f85}'),
@@ -534,7 +534,7 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '\u{11237}'),
- ('\u{1123e}', '\u{1123e}'),
+ ('\u{1123e}', '\u{11241}'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -615,12 +615,17 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('𑶓', '𑶘'),
('𑶠', '𑶩'),
('𑻠', '𑻶'),
+ ('\u{11f00}', '𑼐'),
+ ('𑼒', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('𑽐', '𑽙'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('\u{13440}', '\u{13455}'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -648,7 +653,9 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -702,17 +709,21 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('\u{1da9b}', '\u{1da9f}'),
('\u{1daa1}', '\u{1daaf}'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
('\u{1e000}', '\u{1e006}'),
('\u{1e008}', '\u{1e018}'),
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
('𞄀', '𞄬'),
('\u{1e130}', '𞄽'),
('𞅀', '𞅉'),
('𞅎', '𞅎'),
('𞊐', '\u{1e2ae}'),
('𞋀', '𞋹'),
+ ('𞓐', '𞓹'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -759,11 +770,12 @@ pub const PERL_WORD: &'static [(char, char)] = &[
('🅰', '🆉'),
('🯰', '🯹'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
('\u{e0100}', '\u{e01ef}'),
];
diff --git a/vendor/regex-syntax/src/unicode_tables/property_bool.rs b/vendor/regex-syntax/src/unicode_tables/property_bool.rs
index 8fb211030..a3e84b519 100644
--- a/vendor/regex-syntax/src/unicode_tables/property_bool.rs
+++ b/vendor/regex-syntax/src/unicode_tables/property_bool.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate property-bool /tmp/ucd --chars
+// ucd-generate property-bool ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("ASCII_Hex_Digit", ASCII_HEX_DIGIT),
@@ -207,8 +207,7 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('ொ', 'ௌ'),
('ௐ', 'ௐ'),
('\u{bd7}', '\u{bd7}'),
- ('\u{c00}', 'ః'),
- ('అ', 'ఌ'),
+ ('\u{c00}', 'ఌ'),
('ఎ', 'ఐ'),
('ఒ', 'న'),
('ప', 'హ'),
@@ -231,7 +230,7 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('\u{cd5}', '\u{cd6}'),
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('\u{d00}', 'ഌ'),
('എ', 'ഐ'),
('ഒ', 'ഺ'),
@@ -269,7 +268,7 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('ༀ', 'ༀ'),
('ཀ', 'ཇ'),
('ཉ', 'ཬ'),
- ('\u{f71}', '\u{f81}'),
+ ('\u{f71}', '\u{f83}'),
('ྈ', '\u{f97}'),
('\u{f99}', '\u{fbc}'),
('က', '\u{1036}'),
@@ -571,7 +570,7 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('𐿠', '𐿶'),
('𑀀', '\u{11045}'),
('𑁱', '𑁵'),
- ('𑂂', '𑂸'),
+ ('\u{11080}', '𑂸'),
('\u{110c2}', '\u{110c2}'),
('𑃐', '𑃨'),
('\u{11100}', '\u{11132}'),
@@ -586,7 +585,7 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('𑈀', '𑈑'),
('𑈓', '\u{11234}'),
('\u{11237}', '\u{11237}'),
- ('\u{1123e}', '\u{1123e}'),
+ ('\u{1123e}', '\u{11241}'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -666,12 +665,16 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('𑶓', '𑶖'),
('𑶘', '𑶘'),
('𑻠', '𑻶'),
+ ('\u{11f00}', '𑼐'),
+ ('𑼒', '\u{11f3a}'),
+ ('𑼾', '\u{11f40}'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -695,7 +698,9 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -734,16 +739,20 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('𝞪', '𝟂'),
('𝟄', '𝟋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
('\u{1e000}', '\u{1e006}'),
('\u{1e008}', '\u{1e018}'),
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -789,12 +798,13 @@ pub const ALPHABETIC: &'static [(char, char)] = &[
('🅐', '🅩'),
('🅰', '🆉'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const BIDI_CONTROL: &'static [(char, char)] = &[
@@ -1035,7 +1045,7 @@ pub const CASE_IGNORABLE: &'static [(char, char)] = &[
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -1224,6 +1234,7 @@ pub const CASE_IGNORABLE: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('\u{11001}', '\u{11001}'),
@@ -1248,6 +1259,7 @@ pub const CASE_IGNORABLE: &'static [(char, char)] = &[
('\u{11234}', '\u{11234}'),
('\u{11236}', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112df}'),
('\u{112e3}', '\u{112ea}'),
('\u{11300}', '\u{11301}'),
@@ -1309,7 +1321,12 @@ pub const CASE_IGNORABLE: &'static [(char, char)] = &[
('\u{11d95}', '\u{11d95}'),
('\u{11d97}', '\u{11d97}'),
('\u{11ef3}', '\u{11ef4}'),
- ('\u{13430}', '\u{13438}'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('\u{11f36}', '\u{11f3a}'),
+ ('\u{11f40}', '\u{11f40}'),
+ ('\u{11f42}', '\u{11f42}'),
+ ('\u{13430}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('𖭀', '𖭃'),
@@ -1340,9 +1357,12 @@ pub const CASE_IGNORABLE: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '𞄽'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('𞓫', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '𞥋'),
('🏻', '🏿'),
@@ -1383,7 +1403,7 @@ pub const CASED: &'static [(char, char)] = &[
('Ⴧ', 'Ⴧ'),
('Ⴭ', 'Ⴭ'),
('ა', 'ჺ'),
- ('ჽ', 'ჿ'),
+ ('ჼ', 'ჿ'),
('Ꭰ', 'Ᏽ'),
('ᏸ', 'ᏽ'),
('ᲀ', 'ᲈ'),
@@ -1443,10 +1463,10 @@ pub const CASED: &'static [(char, char)] = &[
('Ꟑ', 'ꟑ'),
('ꟓ', 'ꟓ'),
('ꟕ', 'ꟙ'),
- ('Ꟶ', 'ꟶ'),
+ ('ꟲ', 'ꟶ'),
('ꟸ', 'ꟺ'),
('ꬰ', 'ꭚ'),
- ('ꭜ', 'ꭨ'),
+ ('ꭜ', 'ꭩ'),
('ꭰ', 'ꮿ'),
('ff', 'st'),
('ﬓ', 'ﬗ'),
@@ -1503,6 +1523,8 @@ pub const CASED: &'static [(char, char)] = &[
('𝟄', '𝟋'),
('𝼀', '𝼉'),
('𝼋', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞤀', '𞥃'),
('🄰', '🅉'),
('🅐', '🅩'),
@@ -4336,6 +4358,7 @@ pub const DIACRITIC: &'static [(char, char)] = &[
('𐞲', '𐞺'),
('\u{10ae5}', '\u{10ae6}'),
('𐴢', '\u{10d27}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('\u{11046}', '\u{11046}'),
@@ -4369,6 +4392,7 @@ pub const DIACRITIC: &'static [(char, char)] = &[
('\u{11d42}', '\u{11d42}'),
('\u{11d44}', '\u{11d45}'),
('\u{11d97}', '\u{11d97}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f8f}', '𖾟'),
@@ -4383,6 +4407,7 @@ pub const DIACRITIC: &'static [(char, char)] = &[
('\u{1d17b}', '\u{1d182}'),
('\u{1d185}', '\u{1d18b}'),
('\u{1d1aa}', '\u{1d1ad}'),
+ ('𞀰', '𞁭'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
@@ -4526,7 +4551,7 @@ pub const EMOJI: &'static [(char, char)] = &[
('🚀', '🛅'),
('🛋', '🛒'),
('🛕', '🛗'),
- ('🛝', '🛥'),
+ ('🛜', '🛥'),
('🛩', '🛩'),
('🛫', '🛬'),
('🛰', '🛰'),
@@ -4536,15 +4561,13 @@ pub const EMOJI: &'static [(char, char)] = &[
('🤌', '🤺'),
('🤼', '🥅'),
('🥇', '🧿'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
];
pub const EMOJI_COMPONENT: &'static [(char, char)] = &[
@@ -4602,7 +4625,7 @@ pub const EMOJI_MODIFIER_BASE: &'static [(char, char)] = &[
('🧍', '🧏'),
('🧑', '🧝'),
('🫃', '🫅'),
- ('🫰', '🫶'),
+ ('🫰', '🫸'),
];
pub const EMOJI_PRESENTATION: &'static [(char, char)] = &[
@@ -4672,7 +4695,7 @@ pub const EMOJI_PRESENTATION: &'static [(char, char)] = &[
('🛌', '🛌'),
('🛐', '🛒'),
('🛕', '🛗'),
- ('🛝', '🛟'),
+ ('🛜', '🛟'),
('🛫', '🛬'),
('🛴', '🛼'),
('🟠', '🟫'),
@@ -4680,15 +4703,13 @@ pub const EMOJI_PRESENTATION: &'static [(char, char)] = &[
('🤌', '🤺'),
('🤼', '🥅'),
('🥇', '🧿'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
];
pub const EXTENDED_PICTOGRAPHIC: &'static [(char, char)] = &[
@@ -4759,7 +4780,7 @@ pub const EXTENDED_PICTOGRAPHIC: &'static [(char, char)] = &[
('🐀', '🔽'),
('🕆', '🙏'),
('🚀', '\u{1f6ff}'),
- ('\u{1f774}', '\u{1f77f}'),
+ ('🝴', '🝿'),
('🟕', '\u{1f7ff}'),
('\u{1f80c}', '\u{1f80f}'),
('\u{1f848}', '\u{1f84f}'),
@@ -4958,7 +4979,7 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', 'ೡ'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('ം', 'ഌ'),
('എ', 'ഐ'),
('ഒ', 'ഺ'),
@@ -5377,6 +5398,7 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𑈲', '𑈳'),
('𑈵', '𑈵'),
('𑈸', '𑈽'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -5463,6 +5485,7 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𑪗', '𑪗'),
('𑪚', '𑪢'),
('𑪰', '𑫸'),
+ ('𑬀', '𑬉'),
('𑰀', '𑰈'),
('𑰊', '𑰯'),
('𑰾', '𑰾'),
@@ -5486,6 +5509,11 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𑶠', '𑶩'),
('𑻠', '𑻲'),
('𑻵', '𑻸'),
+ ('𑼂', '𑼐'),
+ ('𑼒', '𑼵'),
+ ('𑼾', '𑼿'),
+ ('𑽁', '𑽁'),
+ ('𑽃', '𑽙'),
('𑾰', '𑾰'),
('𑿀', '𑿱'),
('𑿿', '𒎙'),
@@ -5493,7 +5521,8 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𒑰', '𒑴'),
('𒒀', '𒕃'),
('𒾐', '𒿲'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -5521,7 +5550,9 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -5541,6 +5572,7 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𝆮', '𝇪'),
('𝈀', '𝉁'),
('𝉅', '𝉅'),
+ ('𝋀', '𝋓'),
('𝋠', '𝋳'),
('𝌀', '𝍖'),
('𝍠', '𝍸'),
@@ -5570,6 +5602,8 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𝩶', '𝪃'),
('𝪅', '𝪋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅀', '𞅉'),
@@ -5578,6 +5612,8 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('𞋀', '𞋫'),
('𞋰', '𞋹'),
('𞋿', '𞋿'),
+ ('𞓐', '𞓫'),
+ ('𞓰', '𞓹'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -5637,10 +5673,10 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('🉐', '🉑'),
('🉠', '🉥'),
('🌀', '🛗'),
- ('🛝', '🛬'),
+ ('🛜', '🛬'),
('🛰', '🛼'),
- ('🜀', '🝳'),
- ('🞀', '🟘'),
+ ('🜀', '🝶'),
+ ('🝻', '🟙'),
('🟠', '🟫'),
('🟰', '🟰'),
('🠀', '🠋'),
@@ -5651,25 +5687,24 @@ pub const GRAPHEME_BASE: &'static [(char, char)] = &[
('🢰', '🢱'),
('🤀', '🩓'),
('🩠', '🩭'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
('🬀', '🮒'),
('🮔', '🯊'),
('🯰', '🯹'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
@@ -5775,7 +5810,7 @@ pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -5904,6 +5939,7 @@ pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('\u{11001}', '\u{11001}'),
@@ -5926,6 +5962,7 @@ pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
('\u{11234}', '\u{11234}'),
('\u{11236}', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112df}'),
('\u{112e3}', '\u{112ea}'),
('\u{11300}', '\u{11301}'),
@@ -5993,6 +6030,12 @@ pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
('\u{11d95}', '\u{11d95}'),
('\u{11d97}', '\u{11d97}'),
('\u{11ef3}', '\u{11ef4}'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('\u{11f36}', '\u{11f3a}'),
+ ('\u{11f40}', '\u{11f40}'),
+ ('\u{11f42}', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -6019,9 +6062,11 @@ pub const GRAPHEME_EXTEND: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('\u{e0020}', '\u{e007f}'),
@@ -6084,6 +6129,7 @@ pub const GRAPHEME_LINK: &'static [(char, char)] = &[
('\u{11c3f}', '\u{11c3f}'),
('\u{11d44}', '\u{11d45}'),
('\u{11d97}', '\u{11d97}'),
+ ('𑽁', '\u{11f42}'),
];
pub const HEX_DIGIT: &'static [(char, char)] = &[
@@ -6272,7 +6318,7 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('\u{d00}', 'ഌ'),
('എ', 'ഐ'),
('ഒ', '\u{d44}'),
@@ -6305,7 +6351,7 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('ວ', 'ຽ'),
('ເ', 'ໄ'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('໐', '໙'),
('ໜ', 'ໟ'),
('ༀ', 'ༀ'),
@@ -6618,7 +6664,7 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('𐺀', '𐺩'),
('\u{10eab}', '\u{10eac}'),
('𐺰', '𐺱'),
- ('𐼀', '𐼜'),
+ ('\u{10efd}', '𐼜'),
('𐼧', '𐼧'),
('𐼰', '\u{10f50}'),
('𐽰', '\u{10f85}'),
@@ -6641,7 +6687,7 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '\u{11237}'),
- ('\u{1123e}', '\u{1123e}'),
+ ('\u{1123e}', '\u{11241}'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -6722,12 +6768,17 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('𑶓', '𑶘'),
('𑶠', '𑶩'),
('𑻠', '𑻶'),
+ ('\u{11f00}', '𑼐'),
+ ('𑼒', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('𑽐', '𑽙'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('\u{13440}', '\u{13455}'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -6755,7 +6806,9 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -6809,17 +6862,21 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('\u{1da9b}', '\u{1da9f}'),
('\u{1daa1}', '\u{1daaf}'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
('\u{1e000}', '\u{1e006}'),
('\u{1e008}', '\u{1e018}'),
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
('𞄀', '𞄬'),
('\u{1e130}', '𞄽'),
('𞅀', '𞅉'),
('𞅎', '𞅎'),
('𞊐', '\u{1e2ae}'),
('𞋀', '𞋹'),
+ ('𞓐', '𞓹'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -6863,12 +6920,13 @@ pub const ID_CONTINUE: &'static [(char, char)] = &[
('𞺫', '𞺻'),
('🯰', '🯹'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
('\u{e0100}', '\u{e01ef}'),
];
@@ -7341,6 +7399,7 @@ pub const ID_START: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -7403,12 +7462,16 @@ pub const ID_START: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -7431,7 +7494,9 @@ pub const ID_START: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -7469,11 +7534,14 @@ pub const ID_START: &'static [(char, char)] = &[
('𝞪', '𝟂'),
('𝟄', '𝟋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -7515,12 +7583,13 @@ pub const ID_START: &'static [(char, char)] = &[
('𞺥', '𞺩'),
('𞺫', '𞺻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const IDEOGRAPHIC: &'static [(char, char)] = &[
@@ -7537,12 +7606,13 @@ pub const IDEOGRAPHIC: &'static [(char, char)] = &[
('𘴀', '𘴈'),
('𛅰', '𛋻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const JOIN_CONTROL: &'static [(char, char)] = &[('\u{200c}', '\u{200d}')];
@@ -7834,7 +7904,7 @@ pub const LOWERCASE: &'static [(char, char)] = &[
('ԯ', 'ԯ'),
('ՠ', 'ֈ'),
('ა', 'ჺ'),
- ('ჽ', 'ჿ'),
+ ('ჼ', 'ჿ'),
('ᏸ', 'ᏽ'),
('ᲀ', 'ᲈ'),
('ᴀ', 'ᶿ'),
@@ -8174,10 +8244,11 @@ pub const LOWERCASE: &'static [(char, char)] = &[
('ꟕ', 'ꟕ'),
('ꟗ', 'ꟗ'),
('ꟙ', 'ꟙ'),
+ ('ꟲ', 'ꟴ'),
('ꟶ', 'ꟶ'),
('ꟸ', 'ꟺ'),
('ꬰ', 'ꭚ'),
- ('ꭜ', 'ꭨ'),
+ ('ꭜ', 'ꭩ'),
('ꭰ', 'ꮿ'),
('ff', 'st'),
('ﬓ', 'ﬗ'),
@@ -8225,6 +8296,8 @@ pub const LOWERCASE: &'static [(char, char)] = &[
('𝟋', '𝟋'),
('𝼀', '𝼉'),
('𝼋', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞤢', '𞥃'),
];
@@ -8450,7 +8523,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('ெ', 'ை'),
('ொ', 'ௌ'),
('\u{bd7}', '\u{bd7}'),
- ('\u{c00}', 'ః'),
+ ('\u{c00}', '\u{c04}'),
('\u{c3e}', 'ౄ'),
('\u{c46}', '\u{c48}'),
('\u{c4a}', '\u{c4c}'),
@@ -8462,6 +8535,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('ೊ', '\u{ccc}'),
('\u{cd5}', '\u{cd6}'),
('\u{ce2}', '\u{ce3}'),
+ ('ೳ', 'ೳ'),
('\u{d00}', 'ഃ'),
('\u{d3e}', '\u{d44}'),
('െ', 'ൈ'),
@@ -8480,7 +8554,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('\u{eb4}', '\u{eb9}'),
('\u{ebb}', '\u{ebc}'),
('\u{ecd}', '\u{ecd}'),
- ('\u{f71}', '\u{f81}'),
+ ('\u{f71}', '\u{f83}'),
('\u{f8d}', '\u{f97}'),
('\u{f99}', '\u{fbc}'),
('ါ', '\u{1036}'),
@@ -8553,7 +8627,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('𑀀', '𑀂'),
('\u{11038}', '\u{11045}'),
('\u{11073}', '\u{11074}'),
- ('𑂂', '𑂂'),
+ ('\u{11080}', '𑂂'),
('𑂰', '𑂸'),
('\u{110c2}', '\u{110c2}'),
('\u{11100}', '\u{11102}'),
@@ -8565,6 +8639,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('𑈬', '\u{11234}'),
('\u{11237}', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112e8}'),
('\u{11300}', '𑌃'),
('\u{1133e}', '𑍄'),
@@ -8610,6 +8685,10 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('\u{11d90}', '\u{11d91}'),
('𑶓', '𑶖'),
('\u{11ef3}', '𑻶'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '\u{11f3a}'),
+ ('𑼾', '\u{11f40}'),
('\u{16f4f}', '\u{16f4f}'),
('𖽑', '𖾇'),
('\u{16f8f}', '\u{16f92}'),
@@ -8620,6 +8699,7 @@ pub const OTHER_ALPHABETIC: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e947}', '\u{1e947}'),
('🄰', '🅉'),
('🅐', '🅩'),
@@ -8682,6 +8762,7 @@ pub const OTHER_LOWERCASE: &'static [(char, char)] = &[
('ˠ', 'ˤ'),
('\u{345}', '\u{345}'),
('ͺ', 'ͺ'),
+ ('ჼ', 'ჼ'),
('ᴬ', 'ᵪ'),
('ᵸ', 'ᵸ'),
('ᶛ', 'ᶿ'),
@@ -8693,12 +8774,15 @@ pub const OTHER_LOWERCASE: &'static [(char, char)] = &[
('ⱼ', 'ⱽ'),
('ꚜ', 'ꚝ'),
('ꝰ', 'ꝰ'),
+ ('ꟲ', 'ꟴ'),
('ꟸ', 'ꟹ'),
('ꭜ', 'ꭟ'),
+ ('ꭩ', 'ꭩ'),
('𐞀', '𐞀'),
('𐞃', '𐞅'),
('𐞇', '𐞰'),
('𐞲', '𐞺'),
+ ('𞀰', '𞁭'),
];
pub const OTHER_MATH: &'static [(char, char)] = &[
@@ -8984,6 +9068,7 @@ pub const SENTENCE_TERMINAL: &'static [(char, char)] = &[
('𑪛', '𑪜'),
('𑱁', '𑱂'),
('𑻷', '𑻸'),
+ ('𑽃', '𑽄'),
('𖩮', '𖩯'),
('𖫵', '𖫵'),
('𖬷', '𖬸'),
@@ -9026,6 +9111,8 @@ pub const SOFT_DOTTED: &'static [(char, char)] = &[
('𝙞', '𝙟'),
('𝚒', '𝚓'),
('𝼚', '𝼚'),
+ ('𞁌', '𞁍'),
+ ('𞁨', '𞁨'),
];
pub const TERMINAL_PUNCTUATION: &'static [(char, char)] = &[
@@ -9128,6 +9215,7 @@ pub const TERMINAL_PUNCTUATION: &'static [(char, char)] = &[
('𑱁', '𑱃'),
('𑱱', '𑱱'),
('𑻷', '𑻸'),
+ ('𑽃', '𑽄'),
('𒑰', '𒑴'),
('𖩮', '𖩯'),
('𖫵', '𖫵'),
@@ -9149,11 +9237,12 @@ pub const UNIFIED_IDEOGRAPH: &'static [(char, char)] = &[
('﨣', '﨤'),
('﨧', '﨩'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const UPPERCASE: &'static [(char, char)] = &[
@@ -9989,7 +10078,7 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('\u{d00}', 'ഌ'),
('എ', 'ഐ'),
('ഒ', '\u{d44}'),
@@ -10022,7 +10111,7 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('ວ', 'ຽ'),
('ເ', 'ໄ'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('໐', '໙'),
('ໜ', 'ໟ'),
('ༀ', 'ༀ'),
@@ -10342,7 +10431,7 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('𐺀', '𐺩'),
('\u{10eab}', '\u{10eac}'),
('𐺰', '𐺱'),
- ('𐼀', '𐼜'),
+ ('\u{10efd}', '𐼜'),
('𐼧', '𐼧'),
('𐼰', '\u{10f50}'),
('𐽰', '\u{10f85}'),
@@ -10365,7 +10454,7 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '\u{11237}'),
- ('\u{1123e}', '\u{1123e}'),
+ ('\u{1123e}', '\u{11241}'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -10446,12 +10535,17 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('𑶓', '𑶘'),
('𑶠', '𑶩'),
('𑻠', '𑻶'),
+ ('\u{11f00}', '𑼐'),
+ ('𑼒', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('𑽐', '𑽙'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('\u{13440}', '\u{13455}'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -10479,7 +10573,9 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -10533,17 +10629,21 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('\u{1da9b}', '\u{1da9f}'),
('\u{1daa1}', '\u{1daaf}'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
('\u{1e000}', '\u{1e006}'),
('\u{1e008}', '\u{1e018}'),
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
('𞄀', '𞄬'),
('\u{1e130}', '𞄽'),
('𞅀', '𞅉'),
('𞅎', '𞅎'),
('𞊐', '\u{1e2ae}'),
('𞋀', '𞋹'),
+ ('𞓐', '𞓹'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -10587,12 +10687,13 @@ pub const XID_CONTINUE: &'static [(char, char)] = &[
('𞺫', '𞺻'),
('🯰', '🯹'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
('\u{e0100}', '\u{e01ef}'),
];
@@ -11072,6 +11173,7 @@ pub const XID_START: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -11134,12 +11236,16 @@ pub const XID_START: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -11162,7 +11268,9 @@ pub const XID_START: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -11200,11 +11308,14 @@ pub const XID_START: &'static [(char, char)] = &[
('𝞪', '𝟂'),
('𝟄', '𝟋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -11246,10 +11357,11 @@ pub const XID_START: &'static [(char, char)] = &[
('𞺥', '𞺩'),
('𞺫', '𞺻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
diff --git a/vendor/regex-syntax/src/unicode_tables/property_names.rs b/vendor/regex-syntax/src/unicode_tables/property_names.rs
index 3fce1bf03..599a123ae 100644
--- a/vendor/regex-syntax/src/unicode_tables/property_names.rs
+++ b/vendor/regex-syntax/src/unicode_tables/property_names.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate property-names /tmp/ucd
+// ucd-generate property-names ucd-15.0.0
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const PROPERTY_NAMES: &'static [(&'static str, &'static str)] = &[
("age", "Age"),
diff --git a/vendor/regex-syntax/src/unicode_tables/property_values.rs b/vendor/regex-syntax/src/unicode_tables/property_values.rs
index 08a91477b..cb2d32fb7 100644
--- a/vendor/regex-syntax/src/unicode_tables/property_values.rs
+++ b/vendor/regex-syntax/src/unicode_tables/property_values.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate property-values /tmp/ucd --include gc,script,scx,age,gcb,wb,sb
+// ucd-generate property-values ucd-15.0.0 --include gc,script,scx,age,gcb,wb,sb
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const PROPERTY_VALUES: &'static [(
&'static str,
@@ -20,6 +20,7 @@ pub const PROPERTY_VALUES: &'static [(
("12.1", "V12_1"),
("13.0", "V13_0"),
("14.0", "V14_0"),
+ ("15.0", "V15_0"),
("2.0", "V2_0"),
("2.1", "V2_1"),
("3.0", "V3_0"),
@@ -46,6 +47,7 @@ pub const PROPERTY_VALUES: &'static [(
("v121", "V12_1"),
("v130", "V13_0"),
("v140", "V14_0"),
+ ("v150", "V15_0"),
("v20", "V2_0"),
("v21", "V2_1"),
("v30", "V3_0"),
@@ -308,6 +310,7 @@ pub const PROPERTY_VALUES: &'static [(
("kannada", "Kannada"),
("katakana", "Katakana"),
("katakanaorhiragana", "Katakana_Or_Hiragana"),
+ ("kawi", "Kawi"),
("kayahli", "Kayah_Li"),
("khar", "Kharoshthi"),
("kharoshthi", "Kharoshthi"),
@@ -372,6 +375,8 @@ pub const PROPERTY_VALUES: &'static [(
("myanmar", "Myanmar"),
("mymr", "Myanmar"),
("nabataean", "Nabataean"),
+ ("nagm", "Nag_Mundari"),
+ ("nagmundari", "Nag_Mundari"),
("nand", "Nandinagari"),
("nandinagari", "Nandinagari"),
("narb", "Old_North_Arabian"),
@@ -634,6 +639,7 @@ pub const PROPERTY_VALUES: &'static [(
("kannada", "Kannada"),
("katakana", "Katakana"),
("katakanaorhiragana", "Katakana_Or_Hiragana"),
+ ("kawi", "Kawi"),
("kayahli", "Kayah_Li"),
("khar", "Kharoshthi"),
("kharoshthi", "Kharoshthi"),
@@ -698,6 +704,8 @@ pub const PROPERTY_VALUES: &'static [(
("myanmar", "Myanmar"),
("mymr", "Myanmar"),
("nabataean", "Nabataean"),
+ ("nagm", "Nag_Mundari"),
+ ("nagmundari", "Nag_Mundari"),
("nand", "Nandinagari"),
("nandinagari", "Nandinagari"),
("narb", "Old_North_Arabian"),
diff --git a/vendor/regex-syntax/src/unicode_tables/script.rs b/vendor/regex-syntax/src/unicode_tables/script.rs
index 3327b76ae..cc5c400dd 100644
--- a/vendor/regex-syntax/src/unicode_tables/script.rs
+++ b/vendor/regex-syntax/src/unicode_tables/script.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate script /tmp/ucd --chars
+// ucd-generate script ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Adlam", ADLAM),
@@ -69,6 +69,7 @@ pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Kaithi", KAITHI),
("Kannada", KANNADA),
("Katakana", KATAKANA),
+ ("Kawi", KAWI),
("Kayah_Li", KAYAH_LI),
("Kharoshthi", KHAROSHTHI),
("Khitan_Small_Script", KHITAN_SMALL_SCRIPT),
@@ -103,6 +104,7 @@ pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Multani", MULTANI),
("Myanmar", MYANMAR),
("Nabataean", NABATAEAN),
+ ("Nag_Mundari", NAG_MUNDARI),
("Nandinagari", NANDINAGARI),
("New_Tai_Lue", NEW_TAI_LUE),
("Newa", NEWA),
@@ -202,6 +204,7 @@ pub const ARABIC: &'static [(char, char)] = &[
('ﹰ', 'ﹴ'),
('ﹶ', 'ﻼ'),
('𐹠', '𐹾'),
+ ('\u{10efd}', '\u{10eff}'),
('𞸀', '𞸃'),
('𞸅', '𞸟'),
('𞸡', '𞸢'),
@@ -410,6 +413,7 @@ pub const COMMON: &'static [(char, char)] = &[
('𝆃', '𝆄'),
('𝆌', '𝆩'),
('𝆮', '𝇪'),
+ ('𝋀', '𝋓'),
('𝋠', '𝋳'),
('𝌀', '𝍖'),
('𝍠', '𝍸'),
@@ -450,10 +454,10 @@ pub const COMMON: &'static [(char, char)] = &[
('🉐', '🉑'),
('🉠', '🉥'),
('🌀', '🛗'),
- ('🛝', '🛬'),
+ ('🛜', '🛬'),
('🛰', '🛼'),
- ('🜀', '🝳'),
- ('🞀', '🟘'),
+ ('🜀', '🝶'),
+ ('🝻', '🟙'),
('🟠', '🟫'),
('🟰', '🟰'),
('🠀', '🠋'),
@@ -464,15 +468,13 @@ pub const COMMON: &'static [(char, char)] = &[
('🢰', '🢱'),
('🤀', '🩓'),
('🩠', '🩭'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
('🬀', '🮒'),
('🮔', '🯊'),
('🯰', '🯹'),
@@ -500,6 +502,8 @@ pub const CYRILLIC: &'static [(char, char)] = &[
('\u{2de0}', '\u{2dff}'),
('Ꙁ', '\u{a69f}'),
('\u{fe2e}', '\u{fe2f}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
];
pub const DESERET: &'static [(char, char)] = &[('𐐀', '𐑏')];
@@ -509,6 +513,7 @@ pub const DEVANAGARI: &'static [(char, char)] = &[
('\u{955}', '\u{963}'),
('०', 'ॿ'),
('\u{a8e0}', '\u{a8ff}'),
+ ('𑬀', '𑬉'),
];
pub const DIVES_AKURU: &'static [(char, char)] = &[
@@ -528,7 +533,7 @@ pub const DUPLOYAN: &'static [(char, char)] =
&[('𛰀', '𛱪'), ('𛱰', '𛱼'), ('𛲀', '𛲈'), ('𛲐', '𛲙'), ('𛲜', '𛲟')];
pub const EGYPTIAN_HIEROGLYPHS: &'static [(char, char)] =
- &[('𓀀', '𓐮'), ('\u{13430}', '\u{13438}')];
+ &[('𓀀', '\u{13455}')];
pub const ELBASAN: &'static [(char, char)] = &[('𐔀', '𐔧')];
@@ -714,12 +719,13 @@ pub const HAN: &'static [(char, char)] = &[
('𖿢', '𖿣'),
('𖿰', '𖿱'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const HANGUL: &'static [(char, char)] = &[
@@ -759,8 +765,14 @@ pub const HEBREW: &'static [(char, char)] = &[
('צּ', 'ﭏ'),
];
-pub const HIRAGANA: &'static [(char, char)] =
- &[('ぁ', 'ゖ'), ('ゝ', 'ゟ'), ('𛀁', '𛄟'), ('𛅐', '𛅒'), ('🈀', '🈀')];
+pub const HIRAGANA: &'static [(char, char)] = &[
+ ('ぁ', 'ゖ'),
+ ('ゝ', 'ゟ'),
+ ('𛀁', '𛄟'),
+ ('𛄲', '𛄲'),
+ ('𛅐', '𛅒'),
+ ('🈀', '🈀'),
+];
pub const IMPERIAL_ARAMAIC: &'static [(char, char)] =
&[('𐡀', '𐡕'), ('𐡗', '𐡟')];
@@ -822,7 +834,7 @@ pub const KANNADA: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
];
pub const KATAKANA: &'static [(char, char)] = &[
@@ -838,9 +850,13 @@ pub const KATAKANA: &'static [(char, char)] = &[
('𚿽', '𚿾'),
('𛀀', '𛀀'),
('𛄠', '𛄢'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
];
+pub const KAWI: &'static [(char, char)] =
+ &[('\u{11f00}', '𑼐'), ('𑼒', '\u{11f3a}'), ('𑼾', '𑽙')];
+
pub const KAYAH_LI: &'static [(char, char)] = &[('꤀', '\u{a92d}'), ('꤯', '꤯')];
pub const KHAROSHTHI: &'static [(char, char)] = &[
@@ -860,7 +876,7 @@ pub const KHITAN_SMALL_SCRIPT: &'static [(char, char)] =
pub const KHMER: &'static [(char, char)] =
&[('ក', '\u{17dd}'), ('០', '៩'), ('៰', '៹'), ('᧠', '᧿')];
-pub const KHOJKI: &'static [(char, char)] = &[('𑈀', '𑈑'), ('𑈓', '\u{1123e}')];
+pub const KHOJKI: &'static [(char, char)] = &[('𑈀', '𑈑'), ('𑈓', '\u{11241}')];
pub const KHUDAWADI: &'static [(char, char)] =
&[('𑊰', '\u{112ea}'), ('𑋰', '𑋹')];
@@ -874,7 +890,7 @@ pub const LAO: &'static [(char, char)] = &[
('ວ', 'ຽ'),
('ເ', 'ໄ'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('໐', '໙'),
('ໜ', 'ໟ'),
];
@@ -918,6 +934,7 @@ pub const LATIN: &'static [(char, char)] = &[
('𐞇', '𐞰'),
('𐞲', '𐞺'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
];
pub const LEPCHA: &'static [(char, char)] =
@@ -1013,6 +1030,8 @@ pub const MYANMAR: &'static [(char, char)] =
pub const NABATAEAN: &'static [(char, char)] = &[('𐢀', '𐢞'), ('𐢧', '𐢯')];
+pub const NAG_MUNDARI: &'static [(char, char)] = &[('𞓐', '𞓹')];
+
pub const NANDINAGARI: &'static [(char, char)] =
&[('𑦠', '𑦧'), ('𑦪', '\u{119d7}'), ('\u{119da}', '𑧤')];
diff --git a/vendor/regex-syntax/src/unicode_tables/script_extension.rs b/vendor/regex-syntax/src/unicode_tables/script_extension.rs
index c970e0104..42625e21b 100644
--- a/vendor/regex-syntax/src/unicode_tables/script_extension.rs
+++ b/vendor/regex-syntax/src/unicode_tables/script_extension.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate script-extension /tmp/ucd --chars
+// ucd-generate script-extension ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Adlam", ADLAM),
@@ -69,6 +69,7 @@ pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Kaithi", KAITHI),
("Kannada", KANNADA),
("Katakana", KATAKANA),
+ ("Kawi", KAWI),
("Kayah_Li", KAYAH_LI),
("Kharoshthi", KHAROSHTHI),
("Khitan_Small_Script", KHITAN_SMALL_SCRIPT),
@@ -103,6 +104,7 @@ pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("Multani", MULTANI),
("Myanmar", MYANMAR),
("Nabataean", NABATAEAN),
+ ("Nag_Mundari", NAG_MUNDARI),
("Nandinagari", NANDINAGARI),
("New_Tai_Lue", NEW_TAI_LUE),
("Newa", NEWA),
@@ -196,6 +198,7 @@ pub const ARABIC: &'static [(char, char)] = &[
('ﹶ', 'ﻼ'),
('\u{102e0}', '𐋻'),
('𐹠', '𐹾'),
+ ('\u{10efd}', '\u{10eff}'),
('𞸀', '𞸃'),
('𞸅', '𞸟'),
('𞸡', '𞸢'),
@@ -404,6 +407,7 @@ pub const COMMON: &'static [(char, char)] = &[
('𝆃', '𝆄'),
('𝆌', '𝆩'),
('𝆮', '𝇪'),
+ ('𝋀', '𝋓'),
('𝋠', '𝋳'),
('𝌀', '𝍖'),
('𝍲', '𝍸'),
@@ -443,10 +447,10 @@ pub const COMMON: &'static [(char, char)] = &[
('🉀', '🉈'),
('🉠', '🉥'),
('🌀', '🛗'),
- ('🛝', '🛬'),
+ ('🛜', '🛬'),
('🛰', '🛼'),
- ('🜀', '🝳'),
- ('🞀', '🟘'),
+ ('🜀', '🝶'),
+ ('🝻', '🟙'),
('🟠', '🟫'),
('🟰', '🟰'),
('🠀', '🠋'),
@@ -457,15 +461,13 @@ pub const COMMON: &'static [(char, char)] = &[
('🢰', '🢱'),
('🤀', '🩓'),
('🩠', '🩭'),
- ('🩰', '🩴'),
- ('🩸', '🩼'),
- ('🪀', '🪆'),
- ('🪐', '🪬'),
- ('🪰', '🪺'),
- ('🫀', '🫅'),
- ('🫐', '🫙'),
- ('🫠', '🫧'),
- ('🫰', '🫶'),
+ ('🩰', '🩼'),
+ ('🪀', '🪈'),
+ ('🪐', '🪽'),
+ ('🪿', '🫅'),
+ ('🫎', '🫛'),
+ ('🫠', '🫨'),
+ ('🫰', '🫸'),
('🬀', '🮒'),
('🮔', '🯊'),
('🯰', '🯹'),
@@ -503,6 +505,8 @@ pub const CYRILLIC: &'static [(char, char)] = &[
('⹃', '⹃'),
('Ꙁ', '\u{a69f}'),
('\u{fe2e}', '\u{fe2f}'),
+ ('𞀰', '𞁭'),
+ ('\u{1e08f}', '\u{1e08f}'),
];
pub const DESERET: &'static [(char, char)] = &[('𐐀', '𐑏')];
@@ -515,6 +519,7 @@ pub const DEVANAGARI: &'static [(char, char)] = &[
('\u{20f0}', '\u{20f0}'),
('꠰', '꠹'),
('\u{a8e0}', '\u{a8ff}'),
+ ('𑬀', '𑬉'),
];
pub const DIVES_AKURU: &'static [(char, char)] = &[
@@ -535,7 +540,7 @@ pub const DUPLOYAN: &'static [(char, char)] =
&[('𛰀', '𛱪'), ('𛱰', '𛱼'), ('𛲀', '𛲈'), ('𛲐', '𛲙'), ('𛲜', '\u{1bca3}')];
pub const EGYPTIAN_HIEROGLYPHS: &'static [(char, char)] =
- &[('𓀀', '𓐮'), ('\u{13430}', '\u{13438}')];
+ &[('𓀀', '\u{13455}')];
pub const ELBASAN: &'static [(char, char)] = &[('𐔀', '𐔧')];
@@ -760,12 +765,13 @@ pub const HAN: &'static [(char, char)] = &[
('𝍠', '𝍱'),
('🉐', '🉑'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const HANGUL: &'static [(char, char)] = &[
@@ -834,6 +840,7 @@ pub const HIRAGANA: &'static [(char, char)] = &[
('ー', 'ー'),
('\u{ff9e}', '\u{ff9f}'),
('𛀁', '𛄟'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
('🈀', '🈀'),
];
@@ -895,7 +902,7 @@ pub const KANNADA: &'static [(char, char)] = &[
('ೝ', 'ೞ'),
('ೠ', '\u{ce3}'),
('೦', '೯'),
- ('ೱ', 'ೲ'),
+ ('ೱ', 'ೳ'),
('\u{1cd0}', '\u{1cd0}'),
('\u{1cd2}', '\u{1cd2}'),
('\u{1cda}', '\u{1cda}'),
@@ -923,9 +930,13 @@ pub const KATAKANA: &'static [(char, char)] = &[
('𚿽', '𚿾'),
('𛀀', '𛀀'),
('𛄠', '𛄢'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
];
+pub const KAWI: &'static [(char, char)] =
+ &[('\u{11f00}', '𑼐'), ('𑼒', '\u{11f3a}'), ('𑼾', '𑽙')];
+
pub const KAYAH_LI: &'static [(char, char)] = &[('꤀', '꤯')];
pub const KHAROSHTHI: &'static [(char, char)] = &[
@@ -946,7 +957,7 @@ pub const KHMER: &'static [(char, char)] =
&[('ក', '\u{17dd}'), ('០', '៩'), ('៰', '៹'), ('᧠', '᧿')];
pub const KHOJKI: &'static [(char, char)] =
- &[('૦', '૯'), ('꠰', '꠹'), ('𑈀', '𑈑'), ('𑈓', '\u{1123e}')];
+ &[('૦', '૯'), ('꠰', '꠹'), ('𑈀', '𑈑'), ('𑈓', '\u{11241}')];
pub const KHUDAWADI: &'static [(char, char)] =
&[('।', '॥'), ('꠰', '꠹'), ('𑊰', '\u{112ea}'), ('𑋰', '𑋹')];
@@ -960,7 +971,7 @@ pub const LAO: &'static [(char, char)] = &[
('ວ', 'ຽ'),
('ເ', 'ໄ'),
('ໆ', 'ໆ'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('໐', '໙'),
('ໜ', 'ໟ'),
];
@@ -1012,6 +1023,7 @@ pub const LATIN: &'static [(char, char)] = &[
('𐞇', '𐞰'),
('𐞲', '𐞺'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
];
pub const LEPCHA: &'static [(char, char)] =
@@ -1124,6 +1136,8 @@ pub const MYANMAR: &'static [(char, char)] =
pub const NABATAEAN: &'static [(char, char)] = &[('𐢀', '𐢞'), ('𐢧', '𐢯')];
+pub const NAG_MUNDARI: &'static [(char, char)] = &[('𞓐', '𞓹')];
+
pub const NANDINAGARI: &'static [(char, char)] = &[
('।', '॥'),
('೦', '೯'),
diff --git a/vendor/regex-syntax/src/unicode_tables/sentence_break.rs b/vendor/regex-syntax/src/unicode_tables/sentence_break.rs
index db8ad282b..24348736f 100644
--- a/vendor/regex-syntax/src/unicode_tables/sentence_break.rs
+++ b/vendor/regex-syntax/src/unicode_tables/sentence_break.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate sentence-break /tmp/ucd --chars
+// ucd-generate sentence-break ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("ATerm", ATERM),
@@ -157,6 +157,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('ೊ', '\u{ccd}'),
('\u{cd5}', '\u{cd6}'),
('\u{ce2}', '\u{ce3}'),
+ ('ೳ', 'ೳ'),
('\u{d00}', 'ഃ'),
('\u{d3b}', '\u{d3c}'),
('\u{d3e}', '\u{d44}'),
@@ -175,7 +176,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -279,6 +280,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('𑀀', '𑀂'),
@@ -298,6 +300,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('𑇎', '\u{111cf}'),
('𑈬', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112ea}'),
('\u{11300}', '𑌃'),
('\u{1133b}', '\u{1133c}'),
@@ -345,6 +348,12 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{11d90}', '\u{11d91}'),
('𑶓', '\u{11d97}'),
('\u{11ef3}', '𑻶'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -372,9 +381,11 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('\u{e0020}', '\u{e007f}'),
@@ -399,7 +410,7 @@ pub const FORMAT: &'static [(char, char)] = &[
('\u{fff9}', '\u{fffb}'),
('\u{110bd}', '\u{110bd}'),
('\u{110cd}', '\u{110cd}'),
- ('\u{13430}', '\u{13438}'),
+ ('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0001}', '\u{e0001}'),
@@ -682,6 +693,7 @@ pub const LOWER: &'static [(char, char)] = &[
('ԭ', 'ԭ'),
('ԯ', 'ԯ'),
('ՠ', 'ֈ'),
+ ('ჼ', 'ჼ'),
('ᏸ', 'ᏽ'),
('ᲀ', 'ᲈ'),
('ᴀ', 'ᶿ'),
@@ -1021,10 +1033,11 @@ pub const LOWER: &'static [(char, char)] = &[
('ꟕ', 'ꟕ'),
('ꟗ', 'ꟗ'),
('ꟙ', 'ꟙ'),
+ ('ꟲ', 'ꟴ'),
('ꟶ', 'ꟶ'),
('ꟸ', 'ꟺ'),
('ꬰ', 'ꭚ'),
- ('ꭜ', 'ꭨ'),
+ ('ꭜ', 'ꭩ'),
('ꭰ', 'ꮿ'),
('ff', 'st'),
('ﬓ', 'ﬗ'),
@@ -1072,6 +1085,8 @@ pub const LOWER: &'static [(char, char)] = &[
('𝟋', '𝟋'),
('𝼀', '𝼉'),
('𝼋', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞤢', '𞥃'),
];
@@ -1131,12 +1146,14 @@ pub const NUMERIC: &'static [(char, char)] = &[
('𑱐', '𑱙'),
('𑵐', '𑵙'),
('𑶠', '𑶩'),
+ ('𑽐', '𑽙'),
('𖩠', '𖩩'),
('𖫀', '𖫉'),
('𖭐', '𖭙'),
('𝟎', '𝟿'),
('𞅀', '𞅉'),
('𞋰', '𞋹'),
+ ('𞓰', '𞓹'),
('𞥐', '𞥙'),
('🯰', '🯹'),
];
@@ -1294,7 +1311,7 @@ pub const OLETTER: &'static [(char, char)] = &[
('ၵ', 'ႁ'),
('ႎ', 'ႎ'),
('ა', 'ჺ'),
- ('ჼ', 'ቈ'),
+ ('ჽ', 'ቈ'),
('ቊ', 'ቍ'),
('ቐ', 'ቖ'),
('ቘ', 'ቘ'),
@@ -1390,7 +1407,6 @@ pub const OLETTER: &'static [(char, char)] = &[
('ꜗ', 'ꜟ'),
('ꞈ', 'ꞈ'),
('ꞏ', 'ꞏ'),
- ('ꟲ', 'ꟴ'),
('ꟷ', 'ꟷ'),
('ꟻ', 'ꠁ'),
('ꠃ', 'ꠅ'),
@@ -1428,7 +1444,6 @@ pub const OLETTER: &'static [(char, char)] = &[
('ꬑ', 'ꬖ'),
('ꬠ', 'ꬦ'),
('ꬨ', 'ꬮ'),
- ('ꭩ', 'ꭩ'),
('ꯀ', 'ꯢ'),
('가', '힣'),
('ힰ', 'ퟆ'),
@@ -1531,6 +1546,7 @@ pub const OLETTER: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -1592,12 +1608,16 @@ pub const OLETTER: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -1619,7 +1639,9 @@ pub const OLETTER: &'static [(char, char)] = &[
('𚿵', '𚿻'),
('𚿽', '𚿾'),
('𛀀', '𛄢'),
+ ('𛄲', '𛄲'),
('𛅐', '𛅒'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
('𛅰', '𛋻'),
('𛰀', '𛱪'),
@@ -1632,6 +1654,7 @@ pub const OLETTER: &'static [(char, char)] = &[
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -1672,12 +1695,13 @@ pub const OLETTER: &'static [(char, char)] = &[
('𞺥', '𞺩'),
('𞺫', '𞺻'),
('𠀀', '𪛟'),
- ('𪜀', '𫜸'),
+ ('𪜀', '𫜹'),
('𫝀', '𫠝'),
('𫠠', '𬺡'),
('𬺰', '𮯠'),
('丽', '𪘀'),
('𰀀', '𱍊'),
+ ('𱍐', '𲎯'),
];
pub const SCONTINUE: &'static [(char, char)] = &[
@@ -1772,6 +1796,7 @@ pub const STERM: &'static [(char, char)] = &[
('𑪛', '𑪜'),
('𑱁', '𑱂'),
('𑻷', '𑻸'),
+ ('𑽃', '𑽄'),
('𖩮', '𖩯'),
('𖫵', '𖫵'),
('𖬷', '𖬸'),
diff --git a/vendor/regex-syntax/src/unicode_tables/word_break.rs b/vendor/regex-syntax/src/unicode_tables/word_break.rs
index 19b2a1c64..c0714956f 100644
--- a/vendor/regex-syntax/src/unicode_tables/word_break.rs
+++ b/vendor/regex-syntax/src/unicode_tables/word_break.rs
@@ -1,10 +1,10 @@
// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
//
-// ucd-generate word-break /tmp/ucd --chars
+// ucd-generate word-break ucd-15.0.0 --chars
//
-// Unicode version: 14.0.0.
+// Unicode version: 15.0.0.
//
-// ucd-generate 0.2.11 is available on crates.io.
+// ucd-generate 0.2.14 is available on crates.io.
pub const BY_NAME: &'static [(&'static str, &'static [(char, char)])] = &[
("ALetter", ALETTER),
@@ -434,6 +434,7 @@ pub const ALETTER: &'static [(char, char)] = &[
('𑇜', '𑇜'),
('𑈀', '𑈑'),
('𑈓', '𑈫'),
+ ('𑈿', '𑉀'),
('𑊀', '𑊆'),
('𑊈', '𑊈'),
('𑊊', '𑊍'),
@@ -494,12 +495,16 @@ pub const ALETTER: &'static [(char, char)] = &[
('𑵪', '𑶉'),
('𑶘', '𑶘'),
('𑻠', '𑻲'),
+ ('𑼂', '𑼂'),
+ ('𑼄', '𑼐'),
+ ('𑼒', '𑼳'),
('𑾰', '𑾰'),
('𒀀', '𒎙'),
('𒐀', '𒑮'),
('𒒀', '𒕃'),
('𒾐', '𒿰'),
- ('𓀀', '𓐮'),
+ ('𓀀', '𓐯'),
+ ('𓑁', '𓑆'),
('𔐀', '𔙆'),
('𖠀', '𖨸'),
('𖩀', '𖩞'),
@@ -550,11 +555,14 @@ pub const ALETTER: &'static [(char, char)] = &[
('𝞪', '𝟂'),
('𝟄', '𝟋'),
('𝼀', '𝼞'),
+ ('𝼥', '𝼪'),
+ ('𞀰', '𞁭'),
('𞄀', '𞄬'),
('𞄷', '𞄽'),
('𞅎', '𞅎'),
('𞊐', '𞊭'),
('𞋀', '𞋫'),
+ ('𞓐', '𞓫'),
('𞟠', '𞟦'),
('𞟨', '𞟫'),
('𞟭', '𞟮'),
@@ -685,6 +693,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('ೊ', '\u{ccd}'),
('\u{cd5}', '\u{cd6}'),
('\u{ce2}', '\u{ce3}'),
+ ('ೳ', 'ೳ'),
('\u{d00}', 'ഃ'),
('\u{d3b}', '\u{d3c}'),
('\u{d3e}', '\u{d44}'),
@@ -703,7 +712,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{e47}', '\u{e4e}'),
('\u{eb1}', '\u{eb1}'),
('\u{eb4}', '\u{ebc}'),
- ('\u{ec8}', '\u{ecd}'),
+ ('\u{ec8}', '\u{ece}'),
('\u{f18}', '\u{f19}'),
('\u{f35}', '\u{f35}'),
('\u{f37}', '\u{f37}'),
@@ -807,6 +816,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{10ae5}', '\u{10ae6}'),
('\u{10d24}', '\u{10d27}'),
('\u{10eab}', '\u{10eac}'),
+ ('\u{10efd}', '\u{10eff}'),
('\u{10f46}', '\u{10f50}'),
('\u{10f82}', '\u{10f85}'),
('𑀀', '𑀂'),
@@ -826,6 +836,7 @@ pub const EXTEND: &'static [(char, char)] = &[
('𑇎', '\u{111cf}'),
('𑈬', '\u{11237}'),
('\u{1123e}', '\u{1123e}'),
+ ('\u{11241}', '\u{11241}'),
('\u{112df}', '\u{112ea}'),
('\u{11300}', '𑌃'),
('\u{1133b}', '\u{1133c}'),
@@ -873,6 +884,12 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{11d90}', '\u{11d91}'),
('𑶓', '\u{11d97}'),
('\u{11ef3}', '𑻶'),
+ ('\u{11f00}', '\u{11f01}'),
+ ('𑼃', '𑼃'),
+ ('𑼴', '\u{11f3a}'),
+ ('𑼾', '\u{11f42}'),
+ ('\u{13440}', '\u{13440}'),
+ ('\u{13447}', '\u{13455}'),
('\u{16af0}', '\u{16af4}'),
('\u{16b30}', '\u{16b36}'),
('\u{16f4f}', '\u{16f4f}'),
@@ -900,9 +917,11 @@ pub const EXTEND: &'static [(char, char)] = &[
('\u{1e01b}', '\u{1e021}'),
('\u{1e023}', '\u{1e024}'),
('\u{1e026}', '\u{1e02a}'),
+ ('\u{1e08f}', '\u{1e08f}'),
('\u{1e130}', '\u{1e136}'),
('\u{1e2ae}', '\u{1e2ae}'),
('\u{1e2ec}', '\u{1e2ef}'),
+ ('\u{1e4ec}', '\u{1e4ef}'),
('\u{1e8d0}', '\u{1e8d6}'),
('\u{1e944}', '\u{1e94a}'),
('🏻', '🏿'),
@@ -937,7 +956,7 @@ pub const FORMAT: &'static [(char, char)] = &[
('\u{fff9}', '\u{fffb}'),
('\u{110bd}', '\u{110bd}'),
('\u{110cd}', '\u{110cd}'),
- ('\u{13430}', '\u{13438}'),
+ ('\u{13430}', '\u{1343f}'),
('\u{1bca0}', '\u{1bca3}'),
('\u{1d173}', '\u{1d17a}'),
('\u{e0001}', '\u{e0001}'),
@@ -970,6 +989,7 @@ pub const KATAKANA: &'static [(char, char)] = &[
('𚿽', '𚿾'),
('𛀀', '𛀀'),
('𛄠', '𛄢'),
+ ('𛅕', '𛅕'),
('𛅤', '𛅧'),
];
@@ -1072,12 +1092,14 @@ pub const NUMERIC: &'static [(char, char)] = &[
('𑱐', '𑱙'),
('𑵐', '𑵙'),
('𑶠', '𑶩'),
+ ('𑽐', '𑽙'),
('𖩠', '𖩩'),
('𖫀', '𖫉'),
('𖭐', '𖭙'),
('𝟎', '𝟿'),
('𞅀', '𞅉'),
('𞋰', '𞋹'),
+ ('𞓰', '𞓹'),
('𞥐', '𞥙'),
('🯰', '🯹'),
];
diff --git a/vendor/regex/.cargo-checksum.json b/vendor/regex/.cargo-checksum.json
index 463368b15..1623d8918 100644
--- a/vendor/regex/.cargo-checksum.json
+++ b/vendor/regex/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"1034edbdcb6cbd83b9e9a1dc3dfcdfd6c852112ab402a1cc83734191ea7b3a29","Cargo.lock":"27cdf849031e8c491581b3a436b119e2cd0106fcd94e159fc84751d3337b1684","Cargo.toml":"4a10b9986b5b858cb6ff3a8537492f4b6f7394289b991e182bfc10ad29c3323b","HACKING.md":"17818f7a17723608f6bdbe6388ad0a913d4f96f76a16649aaf4e274b1fa0ea97","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","PERFORMANCE.md":"0d5ef3866386918dfdefb1aa9a28cfe33cb3c8ceeb79f3f8ba5b88253dd95991","README.md":"6125b1c70b9b560412529c54dc6aacdfd39cc82f69b5ad7776fa86e4cb720347","UNICODE.md":"a8a8399540eed000d19420135a527f400247a04572e44d124c786b870f518776","examples/regexdna-input.txt":"156a49710bb3e1ed4bc2bbb0af0f383b747b3d0281453cfff39c296124c598f8","examples/regexdna-output.txt":"35e85b19b70a893d752fd43e54e1e9da08bac43559191cea85b33387c24c4cc1","examples/shootout-regex-dna-bytes.rs":"fa2daedb4e0a05f64f33f4af62fbb0176db998e3676f8637ab684b725367a7b4","examples/shootout-regex-dna-cheat.rs":"1f871a6eaaf8372299fa3c762051112fa89a14235b03f734fc50ebd51ecaee72","examples/shootout-regex-dna-replace.rs":"32ffdf13ac6c4ce3fc32116a048e9cc682aa34cdb8e5beaf565a22addbdcd9ab","examples/shootout-regex-dna-single-cheat.rs":"809f75bf1e1917a53623eb6f1a3ce3b7d2ed98a6a1dbc0bd4853bec49a0c6f94","examples/shootout-regex-dna-single.rs":"1ab14f5703cd4be2e75a2e792e0ba1d322b9e4b14535d396805a4316d577f5bb","examples/shootout-regex-dna.rs":"20ea46ab63f91e3ac6a64e997eadd436a9cbc2f1bdade28e4512052f0e25bc34","rustfmt.toml":"1ca600239a27401c4a43f363cf3f38183a212affc1f31bff3ae93234bbaec228","src/backtrack.rs":"52987d80448f3d7f5d4e3545ddfc09f1f30de7602d9b5489961db4b215a377fd","src/compile.rs":"79a59be2d2db650b5a322e15e9bf1d3227944410bc780fc6089da8f4d2609b77","src/dfa.rs":"10273980d1f08aaff495e11efa240249a2b2c08a4db7c49c8d6759bc65a3b174","src/error.rs":"71c85db839514f26ee024a689061743ea94a34eb7a3291e6c2b69b45a9682d09","src/exec.rs":"21495ab6813598204a444aeea3a0121674081389fd0f07fc3443eb8858b1c677","src/expand.rs":"71220309a3bac797f55129f49e79c03e96efec894ea338c735b78695367e04ca","src/find_byte.rs":"b387247b77e3269f057c3399aefe5a815032c3af918c876f80eb4b282e4eb95e","src/freqs.rs":"255555f3d95b08a5bb3bc2f38d5a06cc100a39c0f0127fe4f50c33afa1cadc65","src/input.rs":"13f49c1bce2fadd04a45b421d374cd0f8b72bef83f7e8fda958962aaccbe799a","src/lib.rs":"de28e1ad68d4b35750667c7fbb47915e6c159ef04b148f16c3507a0a7a682f96","src/literal/imp.rs":"b7f63a861c299bea4baaab17353a420ee339c2cf76d3858c95f39342bd4463e7","src/literal/mod.rs":"533f1d68af088e9485170145e27518368e541a0337fdb44f63249ebf97310300","src/pattern.rs":"993d8b6b4bcea5e02bee3c76e17c356a5a47f8fc53c5555edfd1ebb71c0878bf","src/pikevm.rs":"6c0eaa7e878c945ac4c3c545c98f5706ad04846fc432a5086c8ee78eb030dfa7","src/pool.rs":"942e991ae31ef349bd76efd78b2a712c01166dec965bf93742977ed0870d5a10","src/prog.rs":"bebb3e50745bbc05d6c8240d972ba55a1818c51b1161dc1c21f3fe13c11d4884","src/re_builder.rs":"943344bf6e2fc90902ee04b11b741c32418ac6814b21b7982cc0a3a817713f3e","src/re_bytes.rs":"e2eddc896cea1e878716e77798a8146a67d1d8d9bcf4d053155c1caf3b8f5518","src/re_set.rs":"7921ac4a919b7a5deffe82d099a9ccaf5487aebd890dfb7a661e602c6ad3f1a9","src/re_trait.rs":"d237121b6f6b606836c72305cbcb3bbdbc54d1f6827d19a19cd0fbb4372e0145","src/re_unicode.rs":"ba4d793ff194bfd33a3735e3664c7590f5f166c452e7632a25e4558ffba14e5a","src/sparse.rs":"0da3ddb7972109869248a764dbb10254555f4bb51c375e89fb3fab9cafa47320","src/testdata/LICENSE":"58cf078acc03da3e280a938c2bd9943f554fc9b6ced89ad93ba35ca436872899","src/testdata/README":"45f869e37f798905c773bfbe0ef19a5fb7e585cbf0b7c21b5b5a784e8cec3c14","src/testdata/basic.dat":"b5b33aa89d48a61cd67cb1fbfd8f70e62c83e30b86256f9f915a5190dd38ff06","src/testdata/nullsubexpr.dat":"496ac0278eec3b6d9170faace14554569032dd3d909618364d9326156de39ecf","src/testdata/repetition.dat":"1f7959063015b284b18a4a2c1c8b416d438a2d6c4b1a362da43406b865f50e69","src/utf8.rs":"f85a356ff5d5b19e417b73ce1dd84581b21d283f6dddd195547c30af9c60bd1a","test":"0d62fdca7da12fc19ea5306b5de1d83e68d9365a029c043d524334da138b0304","tests/api.rs":"7b2a0ef75e99b9776094967bd66e9cdeaa8e11359f5f0a12bd08ef0e8d0c11fc","tests/api_str.rs":"2ae38c04e7e8fac008b609a820d0b1561ba75f39b0edc0987d6d3d06132da77f","tests/bytes.rs":"edc50f526c5fee43df89d639ef18b237e4eb91e9d533bfc43f3cbab7417d38ba","tests/consistent.rs":"d69435154c09478076497216e43081a835ac65147181a4fbddad7bff469605b2","tests/crates_regex.rs":"91a59d470e0700b4bcb3ff735d06799f3107b8ef4875a2e9904607b164be0326","tests/crazy.rs":"c0d56380dff19bdd5d7a3eb731d0e2dc564e169a1b73c81e1879b1e87f5f5f77","tests/flags.rs":"05caace2c81a99d2168037f3a38035d4dffe9f85ef3ebd7ef18b1bc6612f1ea8","tests/fowler.rs":"d78cf914de40b1e125cc92b65ccb444d462586bd07b5e05de4e4a1b5de16aa76","tests/macros.rs":"6db70c16fc90df13e6b30d2b606f8b6dd4dc976697967f6ee001b15aab6d0b19","tests/macros_bytes.rs":"a049f528a93173a1bb176cd46932dce1880679f4a1752e099be920f0e4546fd0","tests/macros_str.rs":"e585b1461374c45a2eca44ca045bc3c1fe984b2b4212e432b0c695b420e708b7","tests/misc.rs":"395f52793fa022e4cdda78675b6a6fba1a3106b4b99c834c39f7801574054bd1","tests/multiline.rs":"1b1a3326ed976437c1357f01d81833ece7ea244f38826246eab55cacd5d0862a","tests/noparse.rs":"12b6be0eff3d80779d33c6459396c74c0f6ebf4ddc9f1d33c3e747ea9e3bf268","tests/regression.rs":"1c965fefb8c7a2b1dfdab3e3fdeebaf47846555c50c8005e5537f96a52a3e252","tests/regression_fuzz.rs":"a504ec563e0d23bd2039493b7b1767fe1f831d7d668f6f4b2ecd124fc7899bcd","tests/replace.rs":"0efa042c0d531911e8ac41ce98a6b60236cbf40954102c59f9f6dea78d9d74dd","tests/searcher.rs":"ce35e47b0a276a7e8c9060c6a0b225ffba163aebc61fbc15555a6897fa0e552c","tests/set.rs":"f1e2af6baeeaed3cc99ed347ff516fe7b2eb0027ef64b891502e1486598eaf8a","tests/shortest_match.rs":"a2c94390c0d61bc24796b4c1288c924e90c8c9c6156fdebb858175177a194a42","tests/suffix_reverse.rs":"b95f89397404871227d9efe6df23b9ded147f183db81597e608f693955c668b5","tests/test_backtrack.rs":"b70c5e5f1241efd76dd9f9dd4a4df8a7b38113bd407d1f5f56867f1176177a59","tests/test_backtrack_bytes.rs":"b8a111d4b4109c8bba7e2afb650572c495a14d357fb1f743c1076fb001f704b5","tests/test_backtrack_utf8bytes.rs":"c0c279785d18beac2b4e178e7bf6c14ed235d65f00ca467cfd9c333d79487649","tests/test_crates_regex.rs":"fd9525c2eef0e2f8cb7f787bc2b721bcd0b5d84f3bca49adfe48d657a99c721a","tests/test_default.rs":"c2dfa0298896f86f1be2abf6b0c347a7ca12f95aeac92bf614dc3b86bdfff269","tests/test_default_bytes.rs":"831d3e6bfb882feb15f700e30304bd34328f888fb4c15c7169371e25024ce9a7","tests/test_nfa.rs":"f119fc43a018249c39c813d57096b0654ff69f337345f2bbd9b0e61cc9137285","tests/test_nfa_bytes.rs":"89eae3bef6a1d0bcea6b5de5be35ad72f613f2ceb8b58fe82a6c6ef2ccdc07d0","tests/test_nfa_utf8bytes.rs":"7d830b4aa401887d7cf098b62fed4cd8017ef8b61f625c7c9a2159a6b4cfeb71","tests/unicode.rs":"1af9db7f09a6b0113b8a64733e06c8415fef720b2fdef227ae398d94332287cd","tests/word_boundary.rs":"7081317ddcec1e82dd4a2090a571c6abf2ff4bbfa8cd10395e1eb3f386157fae","tests/word_boundary_ascii.rs":"cd0be5b5b485de0ba7994b42e2864585556c3d2d8bf5eab05b58931d9aaf4b87","tests/word_boundary_unicode.rs":"75dbcc35d3abc0f9795c2ea99e216dc227b0a5b58e9ca5eef767815ff0513921"},"package":"4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"47b22859157339150c957dd72be9cf87aee341ebb3711efac5930efb10436368","Cargo.lock":"3445929e595d109e2f37d349ffad3dd6cb76e7203a029cf1955838d0438d68a4","Cargo.toml":"0abdf3ce883520254d94a04dcf831fb6f0b75bfda7bcf9c8500ca9a2d1f8ff44","HACKING.md":"17818f7a17723608f6bdbe6388ad0a913d4f96f76a16649aaf4e274b1fa0ea97","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","PERFORMANCE.md":"0d5ef3866386918dfdefb1aa9a28cfe33cb3c8ceeb79f3f8ba5b88253dd95991","README.md":"f69204a0f446047d8f4d1f3d84b75f235adb5c26477f3a37b671411bc954d14c","UNICODE.md":"a8a8399540eed000d19420135a527f400247a04572e44d124c786b870f518776","examples/regexdna-input.txt":"156a49710bb3e1ed4bc2bbb0af0f383b747b3d0281453cfff39c296124c598f8","examples/regexdna-output.txt":"35e85b19b70a893d752fd43e54e1e9da08bac43559191cea85b33387c24c4cc1","examples/shootout-regex-dna-bytes.rs":"fa2daedb4e0a05f64f33f4af62fbb0176db998e3676f8637ab684b725367a7b4","examples/shootout-regex-dna-cheat.rs":"1f871a6eaaf8372299fa3c762051112fa89a14235b03f734fc50ebd51ecaee72","examples/shootout-regex-dna-replace.rs":"32ffdf13ac6c4ce3fc32116a048e9cc682aa34cdb8e5beaf565a22addbdcd9ab","examples/shootout-regex-dna-single-cheat.rs":"809f75bf1e1917a53623eb6f1a3ce3b7d2ed98a6a1dbc0bd4853bec49a0c6f94","examples/shootout-regex-dna-single.rs":"1ab14f5703cd4be2e75a2e792e0ba1d322b9e4b14535d396805a4316d577f5bb","examples/shootout-regex-dna.rs":"20ea46ab63f91e3ac6a64e997eadd436a9cbc2f1bdade28e4512052f0e25bc34","rustfmt.toml":"1ca600239a27401c4a43f363cf3f38183a212affc1f31bff3ae93234bbaec228","src/backtrack.rs":"52987d80448f3d7f5d4e3545ddfc09f1f30de7602d9b5489961db4b215a377fd","src/compile.rs":"79a59be2d2db650b5a322e15e9bf1d3227944410bc780fc6089da8f4d2609b77","src/dfa.rs":"10273980d1f08aaff495e11efa240249a2b2c08a4db7c49c8d6759bc65a3b174","src/error.rs":"71c85db839514f26ee024a689061743ea94a34eb7a3291e6c2b69b45a9682d09","src/exec.rs":"21495ab6813598204a444aeea3a0121674081389fd0f07fc3443eb8858b1c677","src/expand.rs":"71220309a3bac797f55129f49e79c03e96efec894ea338c735b78695367e04ca","src/find_byte.rs":"b387247b77e3269f057c3399aefe5a815032c3af918c876f80eb4b282e4eb95e","src/freqs.rs":"255555f3d95b08a5bb3bc2f38d5a06cc100a39c0f0127fe4f50c33afa1cadc65","src/input.rs":"13f49c1bce2fadd04a45b421d374cd0f8b72bef83f7e8fda958962aaccbe799a","src/lib.rs":"982fadba415c4c5b93f4d7d4a73a23ec88e2d96daaa03b679d14490ea0f63197","src/literal/imp.rs":"b7f63a861c299bea4baaab17353a420ee339c2cf76d3858c95f39342bd4463e7","src/literal/mod.rs":"533f1d68af088e9485170145e27518368e541a0337fdb44f63249ebf97310300","src/pattern.rs":"993d8b6b4bcea5e02bee3c76e17c356a5a47f8fc53c5555edfd1ebb71c0878bf","src/pikevm.rs":"6c0eaa7e878c945ac4c3c545c98f5706ad04846fc432a5086c8ee78eb030dfa7","src/pool.rs":"942e991ae31ef349bd76efd78b2a712c01166dec965bf93742977ed0870d5a10","src/prog.rs":"bebb3e50745bbc05d6c8240d972ba55a1818c51b1161dc1c21f3fe13c11d4884","src/re_builder.rs":"943344bf6e2fc90902ee04b11b741c32418ac6814b21b7982cc0a3a817713f3e","src/re_bytes.rs":"63ee1db1637a3764addb10e27248129acffaf78bb0a69624add4d9d6f1e97040","src/re_set.rs":"7921ac4a919b7a5deffe82d099a9ccaf5487aebd890dfb7a661e602c6ad3f1a9","src/re_trait.rs":"d237121b6f6b606836c72305cbcb3bbdbc54d1f6827d19a19cd0fbb4372e0145","src/re_unicode.rs":"4ca66d6e835df7c0f570c8cde52667ef90ba1687d5285f12fedef2e38ae925b4","src/sparse.rs":"0da3ddb7972109869248a764dbb10254555f4bb51c375e89fb3fab9cafa47320","src/testdata/LICENSE":"58cf078acc03da3e280a938c2bd9943f554fc9b6ced89ad93ba35ca436872899","src/testdata/README":"45f869e37f798905c773bfbe0ef19a5fb7e585cbf0b7c21b5b5a784e8cec3c14","src/testdata/basic.dat":"b5b33aa89d48a61cd67cb1fbfd8f70e62c83e30b86256f9f915a5190dd38ff06","src/testdata/nullsubexpr.dat":"496ac0278eec3b6d9170faace14554569032dd3d909618364d9326156de39ecf","src/testdata/repetition.dat":"1f7959063015b284b18a4a2c1c8b416d438a2d6c4b1a362da43406b865f50e69","src/utf8.rs":"f85a356ff5d5b19e417b73ce1dd84581b21d283f6dddd195547c30af9c60bd1a","test":"0d62fdca7da12fc19ea5306b5de1d83e68d9365a029c043d524334da138b0304","tests/api.rs":"7b2a0ef75e99b9776094967bd66e9cdeaa8e11359f5f0a12bd08ef0e8d0c11fc","tests/api_str.rs":"2ae38c04e7e8fac008b609a820d0b1561ba75f39b0edc0987d6d3d06132da77f","tests/bytes.rs":"edc50f526c5fee43df89d639ef18b237e4eb91e9d533bfc43f3cbab7417d38ba","tests/consistent.rs":"d69435154c09478076497216e43081a835ac65147181a4fbddad7bff469605b2","tests/crates_regex.rs":"91a59d470e0700b4bcb3ff735d06799f3107b8ef4875a2e9904607b164be0326","tests/crazy.rs":"c0d56380dff19bdd5d7a3eb731d0e2dc564e169a1b73c81e1879b1e87f5f5f77","tests/flags.rs":"05caace2c81a99d2168037f3a38035d4dffe9f85ef3ebd7ef18b1bc6612f1ea8","tests/fowler.rs":"d78cf914de40b1e125cc92b65ccb444d462586bd07b5e05de4e4a1b5de16aa76","tests/macros.rs":"6db70c16fc90df13e6b30d2b606f8b6dd4dc976697967f6ee001b15aab6d0b19","tests/macros_bytes.rs":"a049f528a93173a1bb176cd46932dce1880679f4a1752e099be920f0e4546fd0","tests/macros_str.rs":"e585b1461374c45a2eca44ca045bc3c1fe984b2b4212e432b0c695b420e708b7","tests/misc.rs":"395f52793fa022e4cdda78675b6a6fba1a3106b4b99c834c39f7801574054bd1","tests/multiline.rs":"1b1a3326ed976437c1357f01d81833ece7ea244f38826246eab55cacd5d0862a","tests/noparse.rs":"12b6be0eff3d80779d33c6459396c74c0f6ebf4ddc9f1d33c3e747ea9e3bf268","tests/regression.rs":"1c965fefb8c7a2b1dfdab3e3fdeebaf47846555c50c8005e5537f96a52a3e252","tests/regression_fuzz.rs":"a504ec563e0d23bd2039493b7b1767fe1f831d7d668f6f4b2ecd124fc7899bcd","tests/replace.rs":"66f97532e40697934e2a77605b9002dfd22c46b6033ccb755e7660d855229f41","tests/searcher.rs":"ce35e47b0a276a7e8c9060c6a0b225ffba163aebc61fbc15555a6897fa0e552c","tests/set.rs":"f1e2af6baeeaed3cc99ed347ff516fe7b2eb0027ef64b891502e1486598eaf8a","tests/shortest_match.rs":"a2c94390c0d61bc24796b4c1288c924e90c8c9c6156fdebb858175177a194a42","tests/suffix_reverse.rs":"b95f89397404871227d9efe6df23b9ded147f183db81597e608f693955c668b5","tests/test_backtrack.rs":"b70c5e5f1241efd76dd9f9dd4a4df8a7b38113bd407d1f5f56867f1176177a59","tests/test_backtrack_bytes.rs":"b8a111d4b4109c8bba7e2afb650572c495a14d357fb1f743c1076fb001f704b5","tests/test_backtrack_utf8bytes.rs":"c0c279785d18beac2b4e178e7bf6c14ed235d65f00ca467cfd9c333d79487649","tests/test_crates_regex.rs":"fd9525c2eef0e2f8cb7f787bc2b721bcd0b5d84f3bca49adfe48d657a99c721a","tests/test_default.rs":"c2dfa0298896f86f1be2abf6b0c347a7ca12f95aeac92bf614dc3b86bdfff269","tests/test_default_bytes.rs":"831d3e6bfb882feb15f700e30304bd34328f888fb4c15c7169371e25024ce9a7","tests/test_nfa.rs":"f119fc43a018249c39c813d57096b0654ff69f337345f2bbd9b0e61cc9137285","tests/test_nfa_bytes.rs":"89eae3bef6a1d0bcea6b5de5be35ad72f613f2ceb8b58fe82a6c6ef2ccdc07d0","tests/test_nfa_utf8bytes.rs":"7d830b4aa401887d7cf098b62fed4cd8017ef8b61f625c7c9a2159a6b4cfeb71","tests/unicode.rs":"1af9db7f09a6b0113b8a64733e06c8415fef720b2fdef227ae398d94332287cd","tests/word_boundary.rs":"7081317ddcec1e82dd4a2090a571c6abf2ff4bbfa8cd10395e1eb3f386157fae","tests/word_boundary_ascii.rs":"cd0be5b5b485de0ba7994b42e2864585556c3d2d8bf5eab05b58931d9aaf4b87","tests/word_boundary_unicode.rs":"75dbcc35d3abc0f9795c2ea99e216dc227b0a5b58e9ca5eef767815ff0513921"},"package":"48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"} \ No newline at end of file
diff --git a/vendor/regex/CHANGELOG.md b/vendor/regex/CHANGELOG.md
index f2d45ed73..466f5a9c9 100644
--- a/vendor/regex/CHANGELOG.md
+++ b/vendor/regex/CHANGELOG.md
@@ -1,3 +1,29 @@
+1.7.1 (2023-01-09)
+==================
+This release was done principally to try and fix the doc.rs rendering for the
+regex crate.
+
+Performance improvements:
+
+* [PERF #930](https://github.com/rust-lang/regex/pull/930):
+ Optimize `replacen`. This also applies to `replace`, but not `replace_all`.
+
+Bug fixes:
+
+* [BUG #945](https://github.com/rust-lang/regex/issues/945):
+ Maybe fix rustdoc rendering by just bumping a new release?
+
+
+1.7.0 (2022-11-05)
+==================
+This release principally includes an upgrade to Unicode 15.
+
+New features:
+
+* [FEATURE #832](https://github.com/rust-lang/regex/issues/916):
+ Upgrade to Unicode 15.
+
+
1.6.0 (2022-07-05)
==================
This release principally includes an upgrade to Unicode 14.
diff --git a/vendor/regex/Cargo.lock b/vendor/regex/Cargo.lock
index fd2c1101d..031b3647e 100644
--- a/vendor/regex/Cargo.lock
+++ b/vendor/regex/Cargo.lock
@@ -4,9 +4,9 @@ version = 3
[[package]]
name = "aho-corasick"
-version = "0.7.18"
+version = "0.7.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
dependencies = [
"memchr",
]
@@ -19,9 +19,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -36,9 +36,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.126"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "memchr"
@@ -66,16 +66,16 @@ dependencies = [
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
dependencies = [
"aho-corasick",
"lazy_static",
@@ -87,9 +87,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "wasi"
diff --git a/vendor/regex/Cargo.toml b/vendor/regex/Cargo.toml
index abe6ac033..4f8673ea9 100644
--- a/vendor/regex/Cargo.toml
+++ b/vendor/regex/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
authors = ["The Rust Project Developers"]
exclude = [
"/scripts/*",
diff --git a/vendor/regex/README.md b/vendor/regex/README.md
index 9acd5bb4a..861417da6 100644
--- a/vendor/regex/README.md
+++ b/vendor/regex/README.md
@@ -23,12 +23,8 @@ can be found on the
### Usage
-Add this to your `Cargo.toml`:
-
-```toml
-[dependencies]
-regex = "1.5"
-```
+To bring this crate into your repository, either add `regex` to your
+`Cargo.toml`, or run `cargo add regex`.
Here's a simple example that matches a date in YYYY-MM-DD format and prints the
year, month and day:
diff --git a/vendor/regex/src/lib.rs b/vendor/regex/src/lib.rs
index 3e3b0a007..6b95739c5 100644
--- a/vendor/regex/src/lib.rs
+++ b/vendor/regex/src/lib.rs
@@ -353,6 +353,9 @@ $ the end of text (or end-of-line with multi-line mode)
\B not a Unicode word boundary
</pre>
+The empty regex is valid and matches the empty string. For example, the empty
+regex matches `abc` at positions `0`, `1`, `2` and `3`.
+
## Grouping and flags
<pre class="rust">
diff --git a/vendor/regex/src/re_bytes.rs b/vendor/regex/src/re_bytes.rs
index d71969257..07e9f98ac 100644
--- a/vendor/regex/src/re_bytes.rs
+++ b/vendor/regex/src/re_bytes.rs
@@ -496,12 +496,12 @@ impl Regex {
let mut new = Vec::with_capacity(text.len());
let mut last_match = 0;
for (i, m) in it {
- if limit > 0 && i >= limit {
- break;
- }
new.extend_from_slice(&text[last_match..m.start()]);
new.extend_from_slice(&rep);
last_match = m.end();
+ if limit > 0 && i >= limit - 1 {
+ break;
+ }
}
new.extend_from_slice(&text[last_match..]);
return Cow::Owned(new);
@@ -516,14 +516,14 @@ impl Regex {
let mut new = Vec::with_capacity(text.len());
let mut last_match = 0;
for (i, cap) in it {
- if limit > 0 && i >= limit {
- break;
- }
// unwrap on 0 is OK because captures only reports matches
let m = cap.get(0).unwrap();
new.extend_from_slice(&text[last_match..m.start()]);
rep.replace_append(&cap, &mut new);
last_match = m.end();
+ if limit > 0 && i >= limit - 1 {
+ break;
+ }
}
new.extend_from_slice(&text[last_match..]);
Cow::Owned(new)
diff --git a/vendor/regex/src/re_unicode.rs b/vendor/regex/src/re_unicode.rs
index 60d81a7d9..197510ea0 100644
--- a/vendor/regex/src/re_unicode.rs
+++ b/vendor/regex/src/re_unicode.rs
@@ -554,12 +554,12 @@ impl Regex {
let mut new = String::with_capacity(text.len());
let mut last_match = 0;
for (i, m) in it {
- if limit > 0 && i >= limit {
- break;
- }
new.push_str(&text[last_match..m.start()]);
new.push_str(&rep);
last_match = m.end();
+ if limit > 0 && i >= limit - 1 {
+ break;
+ }
}
new.push_str(&text[last_match..]);
return Cow::Owned(new);
@@ -574,14 +574,14 @@ impl Regex {
let mut new = String::with_capacity(text.len());
let mut last_match = 0;
for (i, cap) in it {
- if limit > 0 && i >= limit {
- break;
- }
// unwrap on 0 is OK because captures only reports matches
let m = cap.get(0).unwrap();
new.push_str(&text[last_match..m.start()]);
rep.replace_append(&cap, &mut new);
last_match = m.end();
+ if limit > 0 && i >= limit - 1 {
+ break;
+ }
}
new.push_str(&text[last_match..]);
Cow::Owned(new)
diff --git a/vendor/regex/tests/replace.rs b/vendor/regex/tests/replace.rs
index 1dc610635..d65be072f 100644
--- a/vendor/regex/tests/replace.rs
+++ b/vendor/regex/tests/replace.rs
@@ -228,3 +228,21 @@ replace!(
bytes!(&std::borrow::Cow::<'_, [u8]>::Owned(vec![b'Z'])),
"age: Z6"
);
+
+#[test]
+fn replacen_no_captures() {
+ let re = regex!(r"[0-9]");
+ assert_eq!(
+ re.replacen(text!("age: 1234"), 2, t!("Z")),
+ text!("age: ZZ34")
+ );
+}
+
+#[test]
+fn replacen_with_captures() {
+ let re = regex!(r"([0-9])");
+ assert_eq!(
+ re.replacen(text!("age: 1234"), 2, t!("${1}Z")),
+ text!("age: 1Z2Z34")
+ );
+}
diff --git a/vendor/rls-data/.cargo-checksum.json b/vendor/rls-data/.cargo-checksum.json
deleted file mode 100644
index 523d566f5..000000000
--- a/vendor/rls-data/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"74218651d16ab1b9a87db98ae2e88999a41df06e0e6e06fff7626a37dda4ad27","README.md":"7bbd124ce5419c1a600dc4d10091f3c822a1b9a7ab51713c53f900e34126ecdf","src/config.rs":"89199590ca29eefae416f815dbd320ea85944f41fdc10df76cf6f966006593a5","src/lib.rs":"740b35604810e91a3a76363e878ef819a629abc27ce76a330daffb345ee461b8"},"package":"a58135eb039f3a3279a33779192f0ee78b56f57ae636e25cec83530e41debb99"} \ No newline at end of file
diff --git a/vendor/rls-data/README.md b/vendor/rls-data/README.md
deleted file mode 100644
index c9c2638dc..000000000
--- a/vendor/rls-data/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# RLS-data
-
-Data structures used by the RLS and the Rust compiler.
-
-These are used by the save-analysis functionality in the compiler
-(`rustc -Zsave-analysis`). In that use, the compiler translates info in its
-internal data structures to these data structures then serialises them as JSON.
-Clients (such as the RLS) can use this crate when deserialising.
-
-The data can also be passed directly from compiler to client if the compiler is
-used as a library.
diff --git a/vendor/rls-data/src/config.rs b/vendor/rls-data/src/config.rs
deleted file mode 100644
index 0b0f8ebb8..000000000
--- a/vendor/rls-data/src/config.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-#[cfg(feature = "derive")]
-use serde::{Deserialize, Serialize};
-
-/// Used to configure save-analysis.
-#[derive(Debug, Clone, Default)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Config {
- /// File to output save-analysis data to.
- pub output_file: Option<String>,
- /// Include all documentation for items. (If `false`, only includes the
- /// summary (first paragraph) for each item).
- pub full_docs: bool,
- /// If true only includes data for public items in a crate (useful for
- /// library crates).
- pub pub_only: bool,
- /// If true only includes data for items reachable from the crate root.
- pub reachable_only: bool,
- /// True if and only if the analysed crate is part of the standard Rust distro.
- pub distro_crate: bool,
- /// Include signature information.
- pub signatures: bool,
- /// Include experimental borrow data.
- pub borrow_data: bool,
-}
diff --git a/vendor/rls-data/src/lib.rs b/vendor/rls-data/src/lib.rs
deleted file mode 100644
index 8352a5bbc..000000000
--- a/vendor/rls-data/src/lib.rs
+++ /dev/null
@@ -1,272 +0,0 @@
-use rls_span as span;
-
-use std::path::PathBuf;
-
-#[cfg(feature = "derive")]
-use serde::{Deserialize, Serialize};
-
-pub mod config;
-pub use config::Config;
-
-#[derive(Debug, Clone, Default)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[repr(C)]
-pub struct Analysis {
- /// The Config used to generate this analysis data.
- pub config: Config,
- pub version: Option<String>,
- pub compilation: Option<CompilationOptions>,
- pub prelude: Option<CratePreludeData>,
- pub imports: Vec<Import>,
- pub defs: Vec<Def>,
- pub impls: Vec<Impl>,
- pub refs: Vec<Ref>,
- pub macro_refs: Vec<MacroRef>,
- pub relations: Vec<Relation>,
-}
-
-impl Analysis {
- /// Returns an initialized `Analysis` struct with `config` and also
- /// `version` field to Cargo package version.
- pub fn new(config: Config) -> Analysis {
- Analysis {
- config,
- version: option_env!("CARGO_PKG_VERSION").map(ToString::to_string),
- ..Analysis::default()
- }
- }
-}
-
-// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
-// we use our own Id which is the same, but without the newtype.
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Id {
- pub krate: u32,
- pub index: u32,
-}
-
-/// Crate name, along with its disambiguator (128-bit hash) represents a globally
-/// unique crate identifier, which should allow for differentiation between
-/// different crate targets or versions and should point to the same crate when
-/// pulled by different other, dependent crates.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct GlobalCrateId {
- pub name: String,
- pub disambiguator: (u64, u64),
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct SpanData {
- pub file_name: PathBuf,
- pub byte_start: u32,
- pub byte_end: u32,
- pub line_start: span::Row<span::OneIndexed>,
- pub line_end: span::Row<span::OneIndexed>,
- // Character offset.
- pub column_start: span::Column<span::OneIndexed>,
- pub column_end: span::Column<span::OneIndexed>,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct CompilationOptions {
- pub directory: PathBuf,
- pub program: String,
- pub arguments: Vec<String>,
- pub output: PathBuf,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct CratePreludeData {
- pub crate_id: GlobalCrateId,
- pub crate_root: String,
- pub external_crates: Vec<ExternalCrateData>,
- pub span: SpanData,
-}
-
-/// Data for external crates in the prelude of a crate.
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct ExternalCrateData {
- /// Source file where the external crate is declared.
- pub file_name: String,
- /// A crate-local crate index of an external crate. Local crate index is
- /// always 0, so these should start from 1 and range should be contiguous,
- /// e.g. from 1 to n for n external crates.
- pub num: u32,
- pub id: GlobalCrateId,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Import {
- pub kind: ImportKind,
- pub ref_id: Option<Id>,
- pub span: SpanData,
- pub alias_span: Option<SpanData>,
- pub name: String,
- pub value: String,
- pub parent: Option<Id>,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub enum ImportKind {
- ExternCrate,
- Use,
- GlobUse,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Def {
- pub kind: DefKind,
- pub id: Id,
- pub span: SpanData,
- pub name: String,
- pub qualname: String,
- pub value: String,
- pub parent: Option<Id>,
- pub children: Vec<Id>,
- pub decl_id: Option<Id>,
- pub docs: String,
- pub sig: Option<Signature>,
- pub attributes: Vec<Attribute>,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub enum DefKind {
- // value = variant names
- Enum,
- // value = enum name + variant name + types
- TupleVariant,
- // value = enum name + name + fields
- StructVariant,
- // value = variant name + types
- Tuple,
- // value = name + fields
- Struct,
- Union,
- // value = signature
- Trait,
- // value = type + generics
- Function,
- ForeignFunction,
- // value = type + generics
- Method,
- // No id, no value.
- Macro,
- // value = file_name
- Mod,
- // value = aliased type
- Type,
- // value = type and init expression (for all variable kinds).
- Local,
- Static,
- ForeignStatic,
- Const,
- Field,
- // no value
- ExternType,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Impl {
- pub id: u32,
- pub kind: ImplKind,
- pub span: SpanData,
- pub value: String,
- pub parent: Option<Id>,
- pub children: Vec<Id>,
- pub docs: String,
- pub sig: Option<Signature>,
- pub attributes: Vec<Attribute>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub enum ImplKind {
- // impl Foo { ... }
- Inherent,
- // impl Bar for Foo { ... }
- Direct,
- // impl Bar for &Foo { ... }
- Indirect,
- // impl<T: Baz> Bar for T { ... }
- // where Foo: Baz
- Blanket,
- // impl Bar for Baz { ... } or impl Baz { ... }, etc.
- // where Foo: Deref<Target = Baz>
- // Args are name and id of Baz
- Deref(String, Id),
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Attribute {
- pub value: String,
- pub span: SpanData,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Ref {
- pub kind: RefKind,
- pub span: SpanData,
- pub ref_id: Id,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub enum RefKind {
- Function,
- Mod,
- Type,
- Variable,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct MacroRef {
- pub span: SpanData,
- pub qualname: String,
- pub callee_span: SpanData,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Relation {
- pub span: SpanData,
- pub kind: RelationKind,
- pub from: Id,
- pub to: Id,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub enum RelationKind {
- Impl { id: u32 },
- SuperTrait,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct Signature {
- pub text: String,
- pub defs: Vec<SigElement>,
- pub refs: Vec<SigElement>,
-}
-
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-pub struct SigElement {
- pub id: Id,
- pub start: usize,
- pub end: usize,
-}
diff --git a/vendor/rls-span/.cargo-checksum.json b/vendor/rls-span/.cargo-checksum.json
deleted file mode 100644
index c19eba924..000000000
--- a/vendor/rls-span/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"cf0c687986a67306e3bd71081f7386b15a92b9902683d387aa44cce8e4dc61ac","src/compiler.rs":"b313cdc064a940a5abc452ec0b92833b150ff9b4da9fb68e0c9900fea1d97cfc","src/lib.rs":"cbfe46ec5c42dfeb28fca214b26e034cb01e5f5b644abad68c838af7c7209d12"},"package":"f0eea58478fc06e15f71b03236612173a1b81e9770314edecfa664375e3e4c86"} \ No newline at end of file
diff --git a/vendor/rls-span/Cargo.toml b/vendor/rls-span/Cargo.toml
deleted file mode 100644
index ad48306e7..000000000
--- a/vendor/rls-span/Cargo.toml
+++ /dev/null
@@ -1,33 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-edition = "2018"
-name = "rls-span"
-version = "0.5.3"
-authors = ["Nick Cameron <ncameron@mozilla.com>"]
-description = "Types for identifying code spans/ranges"
-categories = ["development-tools"]
-license = "Apache-2.0/MIT"
-repository = "https://github.com/rust-lang/rls"
-[dependencies.rustc-serialize]
-version = "0.3.24"
-optional = true
-
-[dependencies.serde]
-version = "1.0"
-
-[features]
-default = []
-derive = ["serde/derive"]
-nightly = []
-serialize-rustc = ["rustc-serialize"]
diff --git a/vendor/rls-span/src/compiler.rs b/vendor/rls-span/src/compiler.rs
deleted file mode 100644
index 60151a561..000000000
--- a/vendor/rls-span/src/compiler.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-///! These are the structures emitted by the compiler as part of JSON errors.
-///! The original source can be found at
-///! https://github.com/rust-lang/rust/blob/master/src/librustc_errors/json.rs
-use std::path::PathBuf;
-
-#[cfg(feature = "derive")]
-use serde::Deserialize;
-
-use crate::{Column, OneIndexed, Row, Span};
-
-#[cfg_attr(feature = "derive", derive(Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable))]
-#[derive(Debug, Clone)]
-pub struct DiagnosticSpan {
- pub file_name: String,
- pub byte_start: u32,
- pub byte_end: u32,
- /// 1-based.
- pub line_start: usize,
- pub line_end: usize,
- /// 1-based, character offset.
- pub column_start: usize,
- pub column_end: usize,
- /// Is this a "primary" span -- meaning the point, or one of the points,
- /// where the error occurred?
- pub is_primary: bool,
- /// Source text from the start of line_start to the end of line_end.
- pub text: Vec<DiagnosticSpanLine>,
- /// Label that should be placed at this location (if any)
- pub label: Option<String>,
- /// If we are suggesting a replacement, this will contain text
- /// that should be sliced in atop this span. You may prefer to
- /// load the fully rendered version from the parent `Diagnostic`,
- /// however.
- pub suggested_replacement: Option<String>,
- /// Macro invocations that created the code at this span, if any.
- pub expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
-}
-
-impl DiagnosticSpan {
- pub fn rls_span(&self) -> Span<OneIndexed> {
- Span::new(
- Row::new(self.line_start as u32),
- Row::new(self.line_end as u32),
- Column::new(self.column_start as u32),
- Column::new(self.column_end as u32),
- PathBuf::from(&self.file_name),
- )
- }
-}
-
-#[cfg_attr(feature = "derive", derive(Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable))]
-#[derive(Debug, Clone)]
-pub struct DiagnosticSpanLine {
- pub text: String,
-
- /// 1-based, character offset in self.text.
- pub highlight_start: usize,
-
- pub highlight_end: usize,
-}
-
-#[cfg_attr(feature = "derive", derive(Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable))]
-#[derive(Debug, Clone)]
-pub struct DiagnosticSpanMacroExpansion {
- /// span where macro was applied to generate this code; note that
- /// this may itself derive from a macro (if
- /// `span.expansion.is_some()`)
- pub span: DiagnosticSpan,
-
- /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
- pub macro_decl_name: String,
-
- /// span where macro was defined (if known)
- pub def_site_span: Option<DiagnosticSpan>,
-}
diff --git a/vendor/rls-span/src/lib.rs b/vendor/rls-span/src/lib.rs
deleted file mode 100644
index 1d997f605..000000000
--- a/vendor/rls-span/src/lib.rs
+++ /dev/null
@@ -1,383 +0,0 @@
-#![cfg_attr(feature = "nightly", feature(step_trait, step_trait_ext))]
-
-use std::marker::PhantomData;
-use std::path::PathBuf;
-
-#[cfg(feature = "nightly")]
-use std::iter::Step;
-
-use serde::{Deserialize, Serialize};
-
-pub mod compiler;
-
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Column<I: Indexed>(pub u32, PhantomData<I>);
-
-impl<I: Indexed> Column<I> {
- fn new(c: u32) -> Column<I> {
- Column(c, PhantomData)
- }
-}
-
-impl<I: Indexed> Clone for Column<I> {
- fn clone(&self) -> Column<I> {
- *self
- }
-}
-
-impl<I: Indexed> Copy for Column<I> {}
-
-impl<I: Indexed> Serialize for Column<I> {
- fn serialize<S: serde::Serializer>(
- &self,
- s: S,
- ) -> Result<<S as serde::Serializer>::Ok, <S as serde::Serializer>::Error> {
- s.serialize_u32(self.0)
- }
-}
-
-impl<'dt, I: Indexed> Deserialize<'dt> for Column<I> {
- fn deserialize<D: serde::Deserializer<'dt>>(
- d: D,
- ) -> std::result::Result<Self, <D as serde::Deserializer<'dt>>::Error> {
- <u32 as Deserialize>::deserialize(d).map(Column::new)
- }
-}
-
-#[cfg(feature = "serialize-rustc")]
-impl<I: Indexed> rustc_serialize::Decodable for Column<I> {
- fn decode<D: rustc_serialize::Decoder>(d: &mut D) -> Result<Column<I>, D::Error> {
- d.read_u32().map(Column::new)
- }
-}
-
-#[cfg(feature = "serialize-rustc")]
-impl<I: Indexed> rustc_serialize::Encodable for Column<I> {
- fn encode<S: rustc_serialize::Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_u32(self.0)
- }
-}
-
-impl Column<OneIndexed> {
- pub fn new_one_indexed(c: u32) -> Column<OneIndexed> {
- Column(c, PhantomData)
- }
-
- pub fn zero_indexed(self) -> Column<ZeroIndexed> {
- Column(self.0 - 1, PhantomData)
- }
-}
-
-impl Column<ZeroIndexed> {
- pub fn new_zero_indexed(c: u32) -> Column<ZeroIndexed> {
- Column(c, PhantomData)
- }
-
- pub fn one_indexed(self) -> Column<OneIndexed> {
- Column(self.0 + 1, PhantomData)
- }
-}
-
-#[cfg(feature = "nightly")]
-macro_rules! impl_step {
- ($target: ty) => {
- unsafe impl Step for $target {
- fn steps_between(start: &Self, end: &Self) -> Option<usize> {
- Step::steps_between(&start.0, &end.0)
- }
- fn forward_checked(arg: Self, count: usize) -> Option<Self> {
- Step::forward_checked(arg.0, count).map(|x| Self(x, PhantomData))
- }
- fn backward_checked(arg: Self, count: usize) -> Option<Self> {
- Step::backward_checked(arg.0, count).map(|x| Self(x, PhantomData))
- }
- }
- };
-}
-
-#[cfg(feature = "nightly")]
-impl_step!(Column<ZeroIndexed>);
-#[cfg(feature = "nightly")]
-impl_step!(Column<OneIndexed>);
-#[cfg(feature = "nightly")]
-impl_step!(Row<ZeroIndexed>);
-#[cfg(feature = "nightly")]
-impl_step!(Row<OneIndexed>);
-
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Row<I: Indexed>(pub u32, PhantomData<I>);
-
-impl<I: Indexed> Row<I> {
- fn new(c: u32) -> Row<I> {
- Row(c, PhantomData)
- }
-}
-
-impl<I: Indexed> Clone for Row<I> {
- fn clone(&self) -> Row<I> {
- *self
- }
-}
-
-impl<I: Indexed> Copy for Row<I> {}
-
-impl<I: Indexed> serde::Serialize for Row<I> {
- fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
- s.serialize_u32(self.0)
- }
-}
-
-impl<'dt, I: Indexed> serde::Deserialize<'dt> for Row<I> {
- fn deserialize<D: serde::Deserializer<'dt>>(d: D) -> std::result::Result<Self, D::Error> {
- <u32 as Deserialize>::deserialize(d).map(Row::new)
- }
-}
-
-#[cfg(feature = "serialize-rustc")]
-impl<I: Indexed> rustc_serialize::Decodable for Row<I> {
- fn decode<D: rustc_serialize::Decoder>(d: &mut D) -> Result<Row<I>, D::Error> {
- d.read_u32().map(Row::new)
- }
-}
-
-#[cfg(feature = "serialize-rustc")]
-impl<I: Indexed> rustc_serialize::Encodable for Row<I> {
- fn encode<S: rustc_serialize::Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_u32(self.0)
- }
-}
-
-impl Row<OneIndexed> {
- pub fn new_one_indexed(c: u32) -> Row<OneIndexed> {
- Row(c, PhantomData)
- }
-
- pub fn zero_indexed(self) -> Row<ZeroIndexed> {
- Row(self.0 - 1, PhantomData)
- }
-}
-
-impl Row<ZeroIndexed> {
- pub fn new_zero_indexed(c: u32) -> Row<ZeroIndexed> {
- Row(c, PhantomData)
- }
-
- pub fn one_indexed(self) -> Row<OneIndexed> {
- Row(self.0 + 1, PhantomData)
- }
-}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Position<I: Indexed> {
- pub row: Row<I>,
- pub col: Column<I>,
-}
-
-impl<I: Indexed> Position<I> {
- pub fn new(row: Row<I>, col: Column<I>) -> Position<I> {
- Position { row, col }
- }
-}
-
-impl<I: Indexed> Clone for Position<I> {
- fn clone(&self) -> Position<I> {
- *self
- }
-}
-
-impl<I: Indexed> Copy for Position<I> {}
-
-impl Position<OneIndexed> {
- pub fn zero_indexed(self) -> Position<ZeroIndexed> {
- Position { row: self.row.zero_indexed(), col: self.col.zero_indexed() }
- }
-}
-
-impl Position<ZeroIndexed> {
- pub fn one_indexed(self) -> Position<OneIndexed> {
- Position { row: self.row.one_indexed(), col: self.col.one_indexed() }
- }
-}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Range<I: Indexed> {
- pub row_start: Row<I>,
- pub row_end: Row<I>,
- pub col_start: Column<I>,
- pub col_end: Column<I>,
-}
-
-impl<I: Indexed> Range<I> {
- pub fn new(
- row_start: Row<I>,
- row_end: Row<I>,
- col_start: Column<I>,
- col_end: Column<I>,
- ) -> Range<I> {
- Range { row_start, row_end, col_start, col_end }
- }
-
- pub fn from_positions(start: Position<I>, end: Position<I>) -> Range<I> {
- Range { row_start: start.row, row_end: end.row, col_start: start.col, col_end: end.col }
- }
-
- pub fn start(self) -> Position<I> {
- Position { row: self.row_start, col: self.col_start }
- }
-
- pub fn end(self) -> Position<I> {
- Position { row: self.row_end, col: self.col_end }
- }
-}
-
-impl<I: Indexed> Clone for Range<I> {
- fn clone(&self) -> Range<I> {
- *self
- }
-}
-
-impl<I: Indexed> Copy for Range<I> {}
-
-impl Range<OneIndexed> {
- pub fn zero_indexed(self) -> Range<ZeroIndexed> {
- Range {
- row_start: self.row_start.zero_indexed(),
- row_end: self.row_end.zero_indexed(),
- col_start: self.col_start.zero_indexed(),
- col_end: self.col_end.zero_indexed(),
- }
- }
-}
-
-impl Range<ZeroIndexed> {
- pub fn one_indexed(self) -> Range<OneIndexed> {
- Range {
- row_start: self.row_start.one_indexed(),
- row_end: self.row_end.one_indexed(),
- col_start: self.col_start.one_indexed(),
- col_end: self.col_end.one_indexed(),
- }
- }
-}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Location<I: Indexed> {
- pub file: PathBuf,
- pub position: Position<I>,
-}
-
-impl<I: Indexed> Location<I> {
- pub fn new<F: Into<PathBuf>>(row: Row<I>, col: Column<I>, file: F) -> Location<I> {
- Location { position: Position { row, col }, file: file.into() }
- }
-
- pub fn from_position<F: Into<PathBuf>>(position: Position<I>, file: F) -> Location<I> {
- Location { position, file: file.into() }
- }
-}
-
-impl<I: Indexed> Clone for Location<I> {
- fn clone(&self) -> Location<I> {
- Location { position: self.position, file: self.file.clone() }
- }
-}
-
-impl Location<OneIndexed> {
- pub fn zero_indexed(&self) -> Location<ZeroIndexed> {
- Location { position: self.position.zero_indexed(), file: self.file.clone() }
- }
-}
-
-impl Location<ZeroIndexed> {
- pub fn one_indexed(&self) -> Location<OneIndexed> {
- Location { position: self.position.one_indexed(), file: self.file.clone() }
- }
-}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Span<I: Indexed> {
- pub file: PathBuf,
- pub range: Range<I>,
-}
-
-impl<I: Indexed> Span<I> {
- pub fn new<F: Into<PathBuf>>(
- row_start: Row<I>,
- row_end: Row<I>,
- col_start: Column<I>,
- col_end: Column<I>,
- file: F,
- ) -> Span<I> {
- Span { range: Range { row_start, row_end, col_start, col_end }, file: file.into() }
- }
-
- pub fn from_range<F: Into<PathBuf>>(range: Range<I>, file: F) -> Span<I> {
- Span { range, file: file.into() }
- }
-
- pub fn from_positions<F: Into<PathBuf>>(
- start: Position<I>,
- end: Position<I>,
- file: F,
- ) -> Span<I> {
- Span { range: Range::from_positions(start, end), file: file.into() }
- }
-}
-
-impl<I: Indexed> Clone for Span<I> {
- fn clone(&self) -> Span<I> {
- Span { range: self.range, file: self.file.clone() }
- }
-}
-
-impl Span<OneIndexed> {
- pub fn zero_indexed(&self) -> Span<ZeroIndexed> {
- Span { range: self.range.zero_indexed(), file: self.file.clone() }
- }
-}
-
-impl Span<ZeroIndexed> {
- pub fn one_indexed(&self) -> Span<OneIndexed> {
- Span { range: self.range.one_indexed(), file: self.file.clone() }
- }
-}
-
-pub trait Indexed {}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Hash, PartialEq, Eq, Debug, PartialOrd, Ord)]
-pub struct ZeroIndexed;
-impl Indexed for ZeroIndexed {}
-
-#[cfg_attr(feature = "derive", derive(Serialize, Deserialize))]
-#[cfg_attr(feature = "serialize-rustc", derive(RustcDecodable, RustcEncodable))]
-#[derive(Hash, PartialEq, Eq, Debug, PartialOrd, Ord)]
-pub struct OneIndexed;
-impl Indexed for OneIndexed {}
-
-#[cfg(feature = "nightly")]
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[test]
- fn iter_row() {
- assert_eq!((Row::new_one_indexed(4)..Row::new_one_indexed(8)).count(), 4);
- assert_eq!(
- &*(Row::new_zero_indexed(0)..=Row::new_zero_indexed(8))
- .filter(|r| r.0 < 3)
- .map(|r| r.0)
- .collect::<Vec<_>>(),
- &[0, 1, 2],
- );
- }
-}
diff --git a/vendor/rustc-ap-rustc_lexer/.cargo-checksum.json b/vendor/rustc-ap-rustc_lexer/.cargo-checksum.json
index b211139e8..3df6fdfb5 100644
--- a/vendor/rustc-ap-rustc_lexer/.cargo-checksum.json
+++ b/vendor/rustc-ap-rustc_lexer/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"dc430a17d2d107587d485d97cd5f15bc1c5cb55d145435d4d7267223c1fd8d33","src/cursor.rs":"1b58d5746e41eb7c58212885260543a359a6aa825b3bc47d8cb9733a75e90158","src/lib.rs":"41ffa950ae7a03f68a0ef610e61faeadd81f7b3d90da0cc999ad18bf0769dd38","src/tests.rs":"0a8e21db0da8e336b41c145652a4a41d235e1c36d5284a10cf837dba2b471c4d","src/unescape.rs":"913036ef4cecc7735de8c2f576dda584b0ba976ca71723fc1590447676b60ac8","src/unescape/tests.rs":"5f73f809bbb287b116c7db1416f01bf54ccdb78882f5f22a1da42f2f48f63af3"},"package":"f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"} \ No newline at end of file
+{"files":{"Cargo.toml":"7f25776fe1328d925567faf1db5afd37b8e4cd135492e0b5aece88059202ba81","src/cursor.rs":"1b58d5746e41eb7c58212885260543a359a6aa825b3bc47d8cb9733a75e90158","src/lib.rs":"630b5b8e8c1527912a854903ff8e4ddc10789904698db61f837cef7fc21f2181","src/tests.rs":"0a8e21db0da8e336b41c145652a4a41d235e1c36d5284a10cf837dba2b471c4d","src/unescape.rs":"913036ef4cecc7735de8c2f576dda584b0ba976ca71723fc1590447676b60ac8","src/unescape/tests.rs":"5f73f809bbb287b116c7db1416f01bf54ccdb78882f5f22a1da42f2f48f63af3"},"package":"8f40f26e7abdcd3b982f36c09a634cc6187988fbf6ec466c91f8d30a12ac0237"} \ No newline at end of file
diff --git a/vendor/rustc-ap-rustc_lexer/Cargo.toml b/vendor/rustc-ap-rustc_lexer/Cargo.toml
index f11e5ce15..fd16ad110 100644
--- a/vendor/rustc-ap-rustc_lexer/Cargo.toml
+++ b/vendor/rustc-ap-rustc_lexer/Cargo.toml
@@ -13,9 +13,9 @@
[package]
edition = "2018"
name = "rustc-ap-rustc_lexer"
-version = "725.0.0"
+version = "727.0.0"
authors = ["The Rust Project Developers"]
-description = "Automatically published version of the package `rustc_lexer` in the rust-lang/rust repository from commit c38111c4fb9c22a36f9a9195d1884052bb670af2 The publishing script for this crate lives at: https://github.com/alexcrichton/rustc-auto-publish\n "
+description = "Automatically published version of the package `rustc_lexer` in the rust-lang/rust repository from commit 9a27044f42ace9eb652781b53f598e25d4e7e918 The publishing script for this crate lives at: https://github.com/alexcrichton/rustc-auto-publish\n "
license = "MIT / Apache-2.0"
repository = "https://github.com/rust-lang/rust"
diff --git a/vendor/rustc-ap-rustc_lexer/src/lib.rs b/vendor/rustc-ap-rustc_lexer/src/lib.rs
index b9781581f..4cb2a6ca5 100644
--- a/vendor/rustc-ap-rustc_lexer/src/lib.rs
+++ b/vendor/rustc-ap-rustc_lexer/src/lib.rs
@@ -66,6 +66,13 @@ pub enum TokenKind {
Ident,
/// "r#ident"
RawIdent,
+ /// An unknown prefix like `foo#`, `foo'`, `foo"`. Note that only the
+ /// prefix (`foo`) is included in the token, not the separator (which is
+ /// lexed as its own distinct token). In Rust 2021 and later, reserved
+ /// prefixes are reported as errors; in earlier editions, they result in a
+ /// (allowed by default) lint, and are treated as regular identifier
+ /// tokens.
+ UnknownPrefix,
/// "12_u8", "1.0e-40", "b"123"". See `LiteralKind` for more details.
Literal { kind: LiteralKind, suffix_start: usize },
/// "'a"
@@ -323,7 +330,7 @@ impl Cursor<'_> {
let kind = RawStr { n_hashes, err };
Literal { kind, suffix_start }
}
- _ => self.ident(),
+ _ => self.ident_or_unknown_prefix(),
},
// Byte literal, byte string literal, raw byte string literal or identifier.
@@ -358,12 +365,12 @@ impl Cursor<'_> {
let kind = RawByteStr { n_hashes, err };
Literal { kind, suffix_start }
}
- _ => self.ident(),
+ _ => self.ident_or_unknown_prefix(),
},
// Identifier (this should be checked after other variant that can
// start as identifier).
- c if is_id_start(c) => self.ident(),
+ c if is_id_start(c) => self.ident_or_unknown_prefix(),
// Numeric literal.
c @ '0'..='9' => {
@@ -487,11 +494,16 @@ impl Cursor<'_> {
RawIdent
}
- fn ident(&mut self) -> TokenKind {
+ fn ident_or_unknown_prefix(&mut self) -> TokenKind {
debug_assert!(is_id_start(self.prev()));
// Start is already eaten, eat the rest of identifier.
self.eat_while(is_id_continue);
- Ident
+ // Known prefixes must have been handled earlier. So if
+ // we see a prefix here, it is definitely a unknown prefix.
+ match self.first() {
+ '#' | '"' | '\'' => UnknownPrefix,
+ _ => Ident,
+ }
}
fn number(&mut self, first_digit: char) -> LiteralKind {
diff --git a/vendor/ryu/.cargo-checksum.json b/vendor/ryu/.cargo-checksum.json
index f0feb1916..a2dcae08d 100644
--- a/vendor/ryu/.cargo-checksum.json
+++ b/vendor/ryu/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"2717f9b8d4230094a8c92ce37bb7bf56f5f869da3904b5b8f177b0c44f32389f","Cargo.toml":"a022feb14dc0f3979da30277ecce957cfefe78e6638d27882bf4cd1f8106e15f","LICENSE-APACHE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","LICENSE-BOOST":"c9bff75738922193e67fa726fa225535870d2aa1059f91452c411736284ad566","README.md":"6f91d4158af93dccaf61aab14ba9db78cce0413a70c8d7e63bf2823c0262be28","benches/bench.rs":"703521c8cb9c6959ee305776a9971d24754b6fff5c1737741be04f956a3692e8","examples/upstream_benchmark.rs":"f702d3598a8fac59134a8058ebf74ba90163b1f23ebbd6c5978a7bd8a888d357","src/buffer/mod.rs":"c5adf9aa037271916e78c61c9fd98e3230a0fed1fca15694d4d57166fa697125","src/common.rs":"cae347e97fc30c50a964f80425e8c3e69ece2b8ab81f9b81b9baa7fcec64a001","src/d2s.rs":"83f821f17fd8d2cf72bcc47cc8c603ab24f2377db6cd0f08638031716f8dc17c","src/d2s_full_table.rs":"9b0186acbc6d65dc55c17e16125be707a2bfb920d22b35d33234b4cc38566a36","src/d2s_intrinsics.rs":"658d00a64ce2aca7f0780a1acc5939167e4a66d836b51c46de1047820992fec1","src/d2s_small_table.rs":"7b25cfbf0793d0662d83f5d92a9f880295652db9979b5acf702b313359996508","src/digit_table.rs":"02351ca54cb8cb3679f635115dd094f32fd91750e9f66103c1ee9ec3db507072","src/f2s.rs":"55320c2301680d8be3a908620cccd9d103b0cd3ad7a7d3378589e274ffc2587b","src/f2s_intrinsics.rs":"97bab98093838e30c60f5135f54f5ccb039ff7d9f35553ac8e74437743ca47e2","src/lib.rs":"b4d5d3d390511b01f81df6321aad564d457cf390e6e2276ad3974564a54527ec","src/parse.rs":"7f8aa7e007caf5dcb03abdc4238157724bb742d0823a3b8a01646fa1f1129154","src/pretty/exponent.rs":"6c9aa1c707c567ae338647056e37557a94e5120781ee9f6f64e9c7071ffb50d0","src/pretty/mantissa.rs":"a3eb97fd8928bfabef4523501f204fc7254e948318d727eff8327b9b06e76242","src/pretty/mod.rs":"169c57b14075295b07fa408963c300cefa94fd0b17e098d524ef46535bd84019","src/s2d.rs":"2f572603eedaa9efbe864105999a1ceac8aa4ff4e1d2fbd96127692460194d16","src/s2f.rs":"411b1e5acdeb3d7a29f4fddfdf9ce77e6395475d1c053a05e31482d49e6bf1ee","tests/common_test.rs":"275184cf366f80c11e5f33c2d53065a073e20d81bf71ca70478c89e47fb8da36","tests/d2s_table_test.rs":"54b3a7d40aa9bec03e9dc555d15fb4512ee16a16398b3098a97819fab50c81f3","tests/d2s_test.rs":"39014777edd6e3231095186174c4ef341fd9c12ecc5510765761713b6cac3bb4","tests/exhaustive.rs":"f475ed9008a2cd86ce95abb577a4b01e9fed23fc16f7e217ccffb3b834005fa0","tests/f2s_test.rs":"10940f005e73a42bb106ff498e7a6cc4665d04d82829fef8dc7d0eb36f574e6f","tests/macros/mod.rs":"8e90a674b3960f9516cb38f4eea0e0981ff902c3b33572ebdb6c5528d3ffa72c","tests/s2d_test.rs":"75c3a1044881718db65e05f25c9f6e1d005392dddb2e8dafb799668bb6a9a5c3","tests/s2f_test.rs":"1ec06646cb65229bfe866ec913901a0d8d736668f30b812fc4b00136a43f5142"},"package":"4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"} \ No newline at end of file
+{"files":{"Cargo.lock":"aa8c35b390605eedd4a9c43615ca68a001cfdc5055ac2f09bec221a8f38c67ab","Cargo.toml":"490b9d1a775582ca920bdf5a9b2aa278425e72d240b0464c076b6374ad117137","LICENSE-APACHE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","LICENSE-BOOST":"c9bff75738922193e67fa726fa225535870d2aa1059f91452c411736284ad566","README.md":"df6a7a024b604ad98dd7603ad261150ef73a94a9de691bd5d2510e12a200021a","benches/bench.rs":"703521c8cb9c6959ee305776a9971d24754b6fff5c1737741be04f956a3692e8","examples/upstream_benchmark.rs":"f702d3598a8fac59134a8058ebf74ba90163b1f23ebbd6c5978a7bd8a888d357","src/buffer/mod.rs":"c5adf9aa037271916e78c61c9fd98e3230a0fed1fca15694d4d57166fa697125","src/common.rs":"cae347e97fc30c50a964f80425e8c3e69ece2b8ab81f9b81b9baa7fcec64a001","src/d2s.rs":"83f821f17fd8d2cf72bcc47cc8c603ab24f2377db6cd0f08638031716f8dc17c","src/d2s_full_table.rs":"9b0186acbc6d65dc55c17e16125be707a2bfb920d22b35d33234b4cc38566a36","src/d2s_intrinsics.rs":"658d00a64ce2aca7f0780a1acc5939167e4a66d836b51c46de1047820992fec1","src/d2s_small_table.rs":"7b25cfbf0793d0662d83f5d92a9f880295652db9979b5acf702b313359996508","src/digit_table.rs":"02351ca54cb8cb3679f635115dd094f32fd91750e9f66103c1ee9ec3db507072","src/f2s.rs":"55320c2301680d8be3a908620cccd9d103b0cd3ad7a7d3378589e274ffc2587b","src/f2s_intrinsics.rs":"97bab98093838e30c60f5135f54f5ccb039ff7d9f35553ac8e74437743ca47e2","src/lib.rs":"fb2239e04c0524db49077adbd6128b622f42239c9b9362289861487c6a530f9a","src/parse.rs":"7f8aa7e007caf5dcb03abdc4238157724bb742d0823a3b8a01646fa1f1129154","src/pretty/exponent.rs":"6c9aa1c707c567ae338647056e37557a94e5120781ee9f6f64e9c7071ffb50d0","src/pretty/mantissa.rs":"5e8d0a6bfdfd04e599a9fc8aefd638e3288651279e870e7cd44820717c3b6438","src/pretty/mod.rs":"731798246d414ca54df739c212f1cb8e05991a0472a7a1c28771e24d7a1cf09b","src/s2d.rs":"2f572603eedaa9efbe864105999a1ceac8aa4ff4e1d2fbd96127692460194d16","src/s2f.rs":"6ae7430fba61f59aa6010d446f5c1043974b6fadb8e4c75ce2ad56f73ee48f4a","tests/common_test.rs":"275184cf366f80c11e5f33c2d53065a073e20d81bf71ca70478c89e47fb8da36","tests/d2s_table_test.rs":"54b3a7d40aa9bec03e9dc555d15fb4512ee16a16398b3098a97819fab50c81f3","tests/d2s_test.rs":"39014777edd6e3231095186174c4ef341fd9c12ecc5510765761713b6cac3bb4","tests/exhaustive.rs":"f475ed9008a2cd86ce95abb577a4b01e9fed23fc16f7e217ccffb3b834005fa0","tests/f2s_test.rs":"10940f005e73a42bb106ff498e7a6cc4665d04d82829fef8dc7d0eb36f574e6f","tests/macros/mod.rs":"8e90a674b3960f9516cb38f4eea0e0981ff902c3b33572ebdb6c5528d3ffa72c","tests/s2d_test.rs":"75c3a1044881718db65e05f25c9f6e1d005392dddb2e8dafb799668bb6a9a5c3","tests/s2f_test.rs":"1ec06646cb65229bfe866ec913901a0d8d736668f30b812fc4b00136a43f5142"},"package":"7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"} \ No newline at end of file
diff --git a/vendor/ryu/Cargo.lock b/vendor/ryu/Cargo.lock
index 0b0ca716a..34a891d55 100644
--- a/vendor/ryu/Cargo.lock
+++ b/vendor/ryu/Cargo.lock
@@ -30,15 +30,15 @@ dependencies = [
[[package]]
name = "libc"
-version = "0.2.126"
+version = "0.2.134"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb"
[[package]]
name = "no-panic"
-version = "0.1.15"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b884e0f4a7a80970539a7757902675ce503d98615f5b564564b4bd0236f7f36"
+checksum = "12f10d4b6dcf2138f0fc171f4cc8f49517cc71ac57e29aa061c61aa57ec2dffc"
dependencies = [
"proc-macro2",
"quote",
@@ -63,18 +63,18 @@ checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
[[package]]
name = "proc-macro2"
-version = "1.0.42"
+version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c278e965f1d8cf32d6e0e96de3d3e79712178ae67986d9cf9151f51e95aac89b"
+checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.20"
+version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
@@ -102,9 +102,9 @@ dependencies = [
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
@@ -120,7 +120,7 @@ dependencies = [
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
dependencies = [
"no-panic",
"num_cpus",
@@ -130,9 +130,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.98"
+version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
+checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [
"proc-macro2",
"quote",
@@ -141,9 +141,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.2"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7"
+checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
[[package]]
name = "wasi"
diff --git a/vendor/ryu/Cargo.toml b/vendor/ryu/Cargo.toml
index 77181c0f6..0f37ef82b 100644
--- a/vendor/ryu/Cargo.toml
+++ b/vendor/ryu/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.36"
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
authors = ["David Tolnay <dtolnay@gmail.com>"]
exclude = [
"performance.png",
diff --git a/vendor/ryu/README.md b/vendor/ryu/README.md
index 1e2248978..0abd71fe9 100644
--- a/vendor/ryu/README.md
+++ b/vendor/ryu/README.md
@@ -3,7 +3,7 @@
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/ryu-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/ryu)
[<img alt="crates.io" src="https://img.shields.io/crates/v/ryu.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/ryu)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-ryu-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/ryu)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/ryu/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/ryu/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/ryu/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/ryu/actions?query=branch%3Amaster)
Pure Rust implementation of Ryū, an algorithm to quickly convert floating point
numbers to decimal strings.
diff --git a/vendor/ryu/src/lib.rs b/vendor/ryu/src/lib.rs
index f69638ef1..59e0dcd0c 100644
--- a/vendor/ryu/src/lib.rs
+++ b/vendor/ryu/src/lib.rs
@@ -81,7 +81,7 @@
//! notation.
#![no_std]
-#![doc(html_root_url = "https://docs.rs/ryu/1.0.11")]
+#![doc(html_root_url = "https://docs.rs/ryu/1.0.12")]
#![allow(
clippy::cast_lossless,
clippy::cast_possible_truncation,
diff --git a/vendor/ryu/src/pretty/mantissa.rs b/vendor/ryu/src/pretty/mantissa.rs
index 150c79c12..0149f5cff 100644
--- a/vendor/ryu/src/pretty/mantissa.rs
+++ b/vendor/ryu/src/pretty/mantissa.rs
@@ -43,7 +43,7 @@ pub unsafe fn write_mantissa_long(mut output: u64, mut result: *mut u8) {
#[cfg_attr(feature = "no-panic", inline)]
pub unsafe fn write_mantissa(mut output: u32, mut result: *mut u8) {
while output >= 10_000 {
- let c = (output - 10_000 * (output / 10_000)) as u32;
+ let c = output - 10_000 * (output / 10_000);
output /= 10_000;
let c0 = (c % 100) << 1;
let c1 = (c / 100) << 1;
@@ -60,7 +60,7 @@ pub unsafe fn write_mantissa(mut output: u32, mut result: *mut u8) {
result = result.offset(-4);
}
if output >= 100 {
- let c = ((output % 100) << 1) as u32;
+ let c = (output % 100) << 1;
output /= 100;
ptr::copy_nonoverlapping(
DIGIT_TABLE.as_ptr().offset(c as isize),
@@ -70,7 +70,7 @@ pub unsafe fn write_mantissa(mut output: u32, mut result: *mut u8) {
result = result.offset(-2);
}
if output >= 10 {
- let c = (output << 1) as u32;
+ let c = output << 1;
ptr::copy_nonoverlapping(
DIGIT_TABLE.as_ptr().offset(c as isize),
result.offset(-2),
diff --git a/vendor/ryu/src/pretty/mod.rs b/vendor/ryu/src/pretty/mod.rs
index b196a11b4..da49e863e 100644
--- a/vendor/ryu/src/pretty/mod.rs
+++ b/vendor/ryu/src/pretty/mod.rs
@@ -160,8 +160,7 @@ pub unsafe fn format32(f: f32, result: *mut u8) -> usize {
let bits = f.to_bits();
let sign = ((bits >> (FLOAT_MANTISSA_BITS + FLOAT_EXPONENT_BITS)) & 1) != 0;
let ieee_mantissa = bits & ((1u32 << FLOAT_MANTISSA_BITS) - 1);
- let ieee_exponent =
- ((bits >> FLOAT_MANTISSA_BITS) & ((1u32 << FLOAT_EXPONENT_BITS) - 1)) as u32;
+ let ieee_exponent = (bits >> FLOAT_MANTISSA_BITS) & ((1u32 << FLOAT_EXPONENT_BITS) - 1);
let mut index = 0isize;
if sign {
diff --git a/vendor/ryu/src/s2f.rs b/vendor/ryu/src/s2f.rs
index 37c541784..959352815 100644
--- a/vendor/ryu/src/s2f.rs
+++ b/vendor/ryu/src/s2f.rs
@@ -220,7 +220,7 @@ pub fn s2f(buffer: &[u8]) -> Result<f32, Error> {
// for overflow here.
ieee_e2 += 1;
}
- let ieee = ((((signed_m as u32) << f2s::FLOAT_EXPONENT_BITS) | ieee_e2 as u32)
+ let ieee = ((((signed_m as u32) << f2s::FLOAT_EXPONENT_BITS) | ieee_e2)
<< f2s::FLOAT_MANTISSA_BITS)
| ieee_m2;
Ok(f32::from_bits(ieee))
diff --git a/vendor/scoped-tls/.cargo-checksum.json b/vendor/scoped-tls/.cargo-checksum.json
index a82babc4a..ab1c98519 100644
--- a/vendor/scoped-tls/.cargo-checksum.json
+++ b/vendor/scoped-tls/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9b7a3fdb45ac3847229254d53222ed393d5e5426e7d126dc3a1adfeb35b8b438","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"5b42f802520064732b4097d9224cddffb6ad397536347bc996c95f433d893e85","appveyor.yml":"da991211b72fa6f231af7adb84c9fb72f5a9131d1c0a3d47b8ceffe5a82c8542","src/lib.rs":"03cafc877737e72e8bc6fed874c5b1154ec7a0579c5f875f1da66df54b642864"},"package":"ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"} \ No newline at end of file
+{"files":{"Cargo.toml":"34fd266d7e61276cf49fb2a9ea9887331ef4e72595fed99b7d8735c86cd13631","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"0cf015ca6dd078c0f0e607508a4190c0a1328990069e6303c4709530960aab08","src/lib.rs":"ca804e29dde7dc843ab24f7f70c694eb43fac7c62194d85a58c8062caa0a84f4"},"package":"e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"} \ No newline at end of file
diff --git a/vendor/scoped-tls/Cargo.toml b/vendor/scoped-tls/Cargo.toml
index 70c386117..08ec7b613 100644
--- a/vendor/scoped-tls/Cargo.toml
+++ b/vendor/scoped-tls/Cargo.toml
@@ -3,18 +3,22 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g. crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
+rust-version = "1.59"
name = "scoped-tls"
-version = "1.0.0"
+version = "1.0.1"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
-description = "Library implementation of the standard library's old `scoped_thread_local!`\nmacro for providing scoped access to thread local storage (TLS) so any type can\nbe stored into TLS.\n"
+description = """
+Library implementation of the standard library's old `scoped_thread_local!`
+macro for providing scoped access to thread local storage (TLS) so any type can
+be stored into TLS.
+"""
homepage = "https://github.com/alexcrichton/scoped-tls"
documentation = "https://docs.rs/scoped-tls"
readme = "README.md"
diff --git a/vendor/scoped-tls/README.md b/vendor/scoped-tls/README.md
index 88bd62401..36e6480e2 100644
--- a/vendor/scoped-tls/README.md
+++ b/vendor/scoped-tls/README.md
@@ -11,7 +11,7 @@ as a library implementation on crates.io.
```toml
# Cargo.toml
[dependencies]
-scoped-tls = "0.1"
+scoped-tls = "1.0"
```
# License
diff --git a/vendor/scoped-tls/appveyor.yml b/vendor/scoped-tls/appveyor.yml
deleted file mode 100644
index 6a1b8dc19..000000000
--- a/vendor/scoped-tls/appveyor.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-environment:
- matrix:
- - TARGET: x86_64-pc-windows-msvc
- - TARGET: i686-pc-windows-msvc
- - TARGET: i686-pc-windows-gnu
-install:
- - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe"
- - rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
- - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin
- - SET PATH=%PATH%;C:\MinGW\bin
- - rustc -V
- - cargo -V
-
-build: false
-
-test_script:
- - cargo test --verbose
diff --git a/vendor/scoped-tls/src/lib.rs b/vendor/scoped-tls/src/lib.rs
index 6fbcf11e3..d4567c2bc 100644
--- a/vendor/scoped-tls/src/lib.rs
+++ b/vendor/scoped-tls/src/lib.rs
@@ -56,8 +56,8 @@ macro_rules! scoped_thread_local {
$(#[$attrs])*
$vis static $name: $crate::ScopedKey<$ty> = $crate::ScopedKey {
inner: {
- thread_local!(static FOO: ::std::cell::Cell<usize> = {
- ::std::cell::Cell::new(0)
+ ::std::thread_local!(static FOO: ::std::cell::Cell<*const ()> = const {
+ ::std::cell::Cell::new(::std::ptr::null())
});
&FOO
},
@@ -75,7 +75,7 @@ macro_rules! scoped_thread_local {
/// their contents.
pub struct ScopedKey<T> {
#[doc(hidden)]
- pub inner: &'static LocalKey<Cell<usize>>,
+ pub inner: &'static LocalKey<Cell<*const ()>>,
#[doc(hidden)]
pub _marker: marker::PhantomData<T>,
}
@@ -86,8 +86,8 @@ impl<T> ScopedKey<T> {
/// Inserts a value into this scoped thread local storage slot for a
/// duration of a closure.
///
- /// While `cb` is running, the value `t` will be returned by `get` unless
- /// this function is called recursively inside of `cb`.
+ /// While `f` is running, the value `t` will be returned by `get` unless
+ /// this function is called recursively inside of `f`.
///
/// Upon return, this function will restore the previous value, if any
/// was available.
@@ -120,8 +120,8 @@ impl<T> ScopedKey<T> {
where F: FnOnce() -> R
{
struct Reset {
- key: &'static LocalKey<Cell<usize>>,
- val: usize,
+ key: &'static LocalKey<Cell<*const ()>>,
+ val: *const (),
}
impl Drop for Reset {
fn drop(&mut self) {
@@ -130,7 +130,7 @@ impl<T> ScopedKey<T> {
}
let prev = self.inner.with(|c| {
let prev = c.get();
- c.set(t as *const T as usize);
+ c.set(t as *const T as *const ());
prev
});
let _reset = Reset { key: self.inner, val: prev };
@@ -165,8 +165,8 @@ impl<T> ScopedKey<T> {
where F: FnOnce(&T) -> R
{
let val = self.inner.with(|c| c.get());
- assert!(val != 0, "cannot access a scoped thread local \
- variable without calling `set` first");
+ assert!(!val.is_null(), "cannot access a scoped thread local \
+ variable without calling `set` first");
unsafe {
f(&*(val as *const T))
}
@@ -174,7 +174,7 @@ impl<T> ScopedKey<T> {
/// Test whether this TLS key has been `set` for the current thread.
pub fn is_set(&'static self) -> bool {
- self.inner.with(|c| c.get() != 0)
+ self.inner.with(|c| !c.get().is_null())
}
}
diff --git a/vendor/semver/.cargo-checksum.json b/vendor/semver/.cargo-checksum.json
index afd692226..3e0993c1a 100644
--- a/vendor/semver/.cargo-checksum.json
+++ b/vendor/semver/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"2ef1bca69e829c310ba591cf769d26b9599343b5e8277607c810fcc27b08b507","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"83e92a83348171f60d768651091803e69e4cb7ea3190cdfc45918d4637f38e1e","benches/parse.rs":"6531f66f80ce2fc83878f9bf84f94c42e96f1e709466f2b88be8d95a3cec1511","build.rs":"eedfc19afa205955347175916974cdad121b55cb940e40c61931e5e7629f0e65","src/backport.rs":"c1335129f4969ac887dcf4cdee7592d0854163ee3023c14649d1fba2ab5f08ba","src/display.rs":"9ba42f7a6579aa9c7dd72f2380036f5c9664592f3eacd09ea25cef291a3e64e5","src/error.rs":"3bb489f4a29f38d93370e64ae8d6e4e9b451a055cd7d392b6aeacab7eb3e1953","src/eval.rs":"b7e7ec976051b9f87ddf5cfdbaad64654d98d86ae0763f7d88b14eeaeac6013c","src/identifier.rs":"2fcc23896070ed0a658282a5c7053f1439a55c9e2ea927703408fea8321bfbee","src/impls.rs":"79b5a2ac6ca3d4cb46adfb1494756079f53bef780dd81c3a8d3adf86f91395c8","src/lib.rs":"4118a9d0aedfdbfb930fac79f6aa03771b7a3348b7fe2b4d3d02530dc8b6f387","src/parse.rs":"ffbb84081f0f66ec47b752a1e32f1bea5f206ca84f464b99d0497451305a92f8","src/serde.rs":"e2a9b9dc3cd2cccc250eaffad049de418ef791bf8c4a34111a48f068353e0a37","tests/node/mod.rs":"2710d9b8daace2038b66db0f8f4cc522dee938e7cbc42d7739c31995343c32f4","tests/test_autotrait.rs":"070500c32ceee14a8a0110c04a01f98278b24614a0aec8c382dcea3da0343f58","tests/test_identifier.rs":"6c3da46c73df210527b60f1069131b15e2c65eb7b5d11793940d00cf66812f4d","tests/test_version.rs":"09e37c3df162205acf3683d1c760a6001e34e1c709fd4a1a265d82450e340003","tests/test_version_req.rs":"b6eea0258cc3b6d567a9f6c42693a97316345083495236c47e85374fd45f7cf0","tests/util/mod.rs":"db61c2cd86af864d8be4f2a3d5f25c86d7712201cc6ab47b715facf5f7f275b7"},"package":"e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"} \ No newline at end of file
+{"files":{"Cargo.toml":"84eaac27f969839e684c9cdf124748de0a8f0b0876a1eacc31cfaa105f35540f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"de1a03443ab8f147676199856a975ec00f3f7334fc5d5d5e056ec8f3fcb61dd5","benches/parse.rs":"6531f66f80ce2fc83878f9bf84f94c42e96f1e709466f2b88be8d95a3cec1511","build.rs":"9a3d42e37b665745044b5d91c6e02dd458152e336a7013654972f4a1a0b562d9","src/backport.rs":"66db55d15d0e2808bffe4cde7cd1d99bda999b26cbe40bb6b5e43b94f9b631d2","src/display.rs":"9ba42f7a6579aa9c7dd72f2380036f5c9664592f3eacd09ea25cef291a3e64e5","src/error.rs":"3bb489f4a29f38d93370e64ae8d6e4e9b451a055cd7d392b6aeacab7eb3e1953","src/eval.rs":"b7e7ec976051b9f87ddf5cfdbaad64654d98d86ae0763f7d88b14eeaeac6013c","src/identifier.rs":"459725383cbd0e2d769aa947decd1f031bdc8732339783ad24eb2b44f0f5d040","src/impls.rs":"79b5a2ac6ca3d4cb46adfb1494756079f53bef780dd81c3a8d3adf86f91395c8","src/lib.rs":"cc912c719047aa679429069a26679f681741b91ff66d847f60ddc519262d588c","src/parse.rs":"ffbb84081f0f66ec47b752a1e32f1bea5f206ca84f464b99d0497451305a92f8","src/serde.rs":"e2a9b9dc3cd2cccc250eaffad049de418ef791bf8c4a34111a48f068353e0a37","tests/node/mod.rs":"2710d9b8daace2038b66db0f8f4cc522dee938e7cbc42d7739c31995343c32f4","tests/test_autotrait.rs":"070500c32ceee14a8a0110c04a01f98278b24614a0aec8c382dcea3da0343f58","tests/test_identifier.rs":"6c3da46c73df210527b60f1069131b15e2c65eb7b5d11793940d00cf66812f4d","tests/test_version.rs":"09e37c3df162205acf3683d1c760a6001e34e1c709fd4a1a265d82450e340003","tests/test_version_req.rs":"b6eea0258cc3b6d567a9f6c42693a97316345083495236c47e85374fd45f7cf0","tests/util/mod.rs":"db61c2cd86af864d8be4f2a3d5f25c86d7712201cc6ab47b715facf5f7f275b7"},"package":"58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"} \ No newline at end of file
diff --git a/vendor/semver/Cargo.toml b/vendor/semver/Cargo.toml
index 68aa0b972..e0bfea20f 100644
--- a/vendor/semver/Cargo.toml
+++ b/vendor/semver/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "semver"
-version = "1.0.14"
+version = "1.0.16"
authors = ["David Tolnay <dtolnay@gmail.com>"]
description = "Parser and evaluator for Cargo's flavor of Semantic Versioning"
documentation = "https://docs.rs/semver"
@@ -31,10 +31,11 @@ targets = ["x86_64-unknown-linux-gnu"]
rustdoc-args = [
"--cfg",
"doc_cfg",
- "--cfg",
- "semver_rustdoc_workaround",
]
+[lib]
+doc-scrape-examples = false
+
[dependencies.serde]
version = "1.0"
optional = true
diff --git a/vendor/semver/README.md b/vendor/semver/README.md
index 84d227ea8..a9a1cb88b 100644
--- a/vendor/semver/README.md
+++ b/vendor/semver/README.md
@@ -4,7 +4,7 @@ semver
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/semver-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/semver)
[<img alt="crates.io" src="https://img.shields.io/crates/v/semver.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/semver)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-semver-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/semver)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/semver/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/semver/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/semver/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/semver/actions?query=branch%3Amaster)
A parser and evaluator for Cargo's flavor of Semantic Versioning.
diff --git a/vendor/semver/build.rs b/vendor/semver/build.rs
index 2bf4418c1..81ad970d8 100644
--- a/vendor/semver/build.rs
+++ b/vendor/semver/build.rs
@@ -3,6 +3,8 @@ use std::process::Command;
use std::str;
fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+
let compiler = match rustc_minor_version() {
Some(compiler) => compiler,
None => return,
diff --git a/vendor/semver/src/backport.rs b/vendor/semver/src/backport.rs
index 4b67f56a5..b5e1d02be 100644
--- a/vendor/semver/src/backport.rs
+++ b/vendor/semver/src/backport.rs
@@ -18,43 +18,6 @@ pub(crate) use crate::alloc::vec::Vec;
#[cfg(no_alloc_crate)] // rustc <1.36
pub(crate) mod alloc {
+ pub use std::alloc;
pub use std::vec;
-
- pub mod alloc {
- use std::mem;
- use std::process;
-
- #[derive(Copy, Clone)]
- pub struct Layout {
- size: usize,
- }
-
- impl Layout {
- pub unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self {
- assert_eq!(align, 2);
- Layout { size }
- }
- }
-
- pub unsafe fn alloc(layout: Layout) -> *mut u8 {
- let len_u16 = (layout.size + 1) / 2;
- let mut vec = Vec::new();
- vec.reserve_exact(len_u16);
- let ptr: *mut u16 = vec.as_mut_ptr();
- mem::forget(vec);
- ptr as *mut u8
- }
-
- pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
- let len_u16 = (layout.size + 1) / 2;
- unsafe { Vec::from_raw_parts(ptr as *mut u16, 0, len_u16) };
- }
-
- pub fn handle_alloc_error(_layout: Layout) -> ! {
- // This is unreachable because the alloc implementation above never
- // returns null; Vec::reserve_exact would already have called std's
- // internal handle_alloc_error.
- process::abort();
- }
- }
}
diff --git a/vendor/semver/src/identifier.rs b/vendor/semver/src/identifier.rs
index fbe1df020..0273ae62a 100644
--- a/vendor/semver/src/identifier.rs
+++ b/vendor/semver/src/identifier.rs
@@ -67,11 +67,13 @@
// allows size_of::<Version>() == size_of::<Option<Version>>().
use crate::alloc::alloc::{alloc, dealloc, handle_alloc_error, Layout};
+use core::isize;
use core::mem;
use core::num::{NonZeroU64, NonZeroUsize};
use core::ptr::{self, NonNull};
use core::slice;
use core::str;
+use core::usize;
const PTR_BYTES: usize = mem::size_of::<NonNull<u8>>();
@@ -103,6 +105,7 @@ impl Identifier {
// SAFETY: string must be ASCII and not contain \0 bytes.
pub(crate) unsafe fn new_unchecked(string: &str) -> Self {
let len = string.len();
+ debug_assert!(len <= isize::MAX as usize);
match len as u64 {
0 => Self::empty(),
1..=8 => {
@@ -118,8 +121,21 @@ impl Identifier {
// SAFETY: len is in a range that does not contain 0.
let size = bytes_for_varint(unsafe { NonZeroUsize::new_unchecked(len) }) + len;
let align = 2;
+ // On 32-bit and 16-bit architecture, check for size overflowing
+ // isize::MAX. Making an allocation request bigger than this to
+ // the allocator is considered UB. All allocations (including
+ // static ones) are limited to isize::MAX so we're guaranteed
+ // len <= isize::MAX, and we know bytes_for_varint(len) <= 5
+ // because 128**5 > isize::MAX, which means the only problem
+ // that can arise is when isize::MAX - 5 <= len <= isize::MAX.
+ // This is pretty much guaranteed to be malicious input so we
+ // don't need to care about returning a good error message.
+ if mem::size_of::<usize>() < 8 {
+ let max_alloc = usize::MAX / 2 - align;
+ assert!(size <= max_alloc);
+ }
// SAFETY: align is not zero, align is a power of two, and
- // rounding size up to align does not overflow usize::MAX.
+ // rounding size up to align does not overflow isize::MAX.
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
// SAFETY: layout's size is nonzero.
let ptr = unsafe { alloc(layout) };
@@ -200,7 +216,7 @@ impl Clone for Identifier {
let size = bytes_for_varint(len) + len.get();
let align = 2;
// SAFETY: align is not zero, align is a power of two, and rounding
- // size up to align does not overflow usize::MAX. This is just
+ // size up to align does not overflow isize::MAX. This is just
// duplicating a previous allocation where all of these guarantees
// were already made.
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
diff --git a/vendor/semver/src/lib.rs b/vendor/semver/src/lib.rs
index ca4d1119c..32ed96d1c 100644
--- a/vendor/semver/src/lib.rs
+++ b/vendor/semver/src/lib.rs
@@ -60,7 +60,7 @@
//!
//! [Specifying Dependencies]: https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html
-#![doc(html_root_url = "https://docs.rs/semver/1.0.14")]
+#![doc(html_root_url = "https://docs.rs/semver/1.0.16")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![cfg_attr(all(not(feature = "std"), not(no_alloc_crate)), no_std)]
#![cfg_attr(not(no_unsafe_op_in_unsafe_fn_lint), deny(unsafe_op_in_unsafe_fn))]
@@ -497,11 +497,6 @@ impl Comparator {
}
impl Prerelease {
- // Work around https://github.com/rust-lang/rust/issues/97933
- #[cfg(all(doc, semver_rustdoc_workaround))]
- pub const EMPTY: Self = "";
-
- #[cfg(not(all(doc, semver_rustdoc_workaround)))]
pub const EMPTY: Self = Prerelease {
identifier: Identifier::empty(),
};
@@ -520,11 +515,6 @@ impl Prerelease {
}
impl BuildMetadata {
- // Work around https://github.com/rust-lang/rust/issues/97933
- #[cfg(all(doc, semver_rustdoc_workaround))]
- pub const EMPTY: Self = "";
-
- #[cfg(not(all(doc, semver_rustdoc_workaround)))]
pub const EMPTY: Self = BuildMetadata {
identifier: Identifier::empty(),
};
diff --git a/vendor/serde/.cargo-checksum.json b/vendor/serde/.cargo-checksum.json
index 0aefd0d73..812d0d93c 100644
--- a/vendor/serde/.cargo-checksum.json
+++ b/vendor/serde/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"7726b0fc6476d58de05dd5342dd517da6968c3b774cb2443b09f8c1e0926d347","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"7049b88cb7366be0c8e593f3ffffa313803a5b382f35686c542b4a0da3b291f3","build.rs":"89cfc904243b611a8a2fc1c8724d6f1c2b61c166ca81676b903ddf80b8ff1c10","crates-io.md":"5c42406936cf9af6d4cd7fe0ac730c609e82fd3f15a54549518c72d0ded70c29","src/de/format.rs":"4b466a6a7f0070e884d14457759671c6ad7394fe9603708b7151ef6159258146","src/de/ignored_any.rs":"967184c86707c99b77a1cfb218dfc823f560fae227b6635aee6af19ee82962f5","src/de/impls.rs":"363f9f9400dcaf46176ab5c0a3592996abbed0618fddb51b090a7617a78eb5a0","src/de/mod.rs":"1dbddc2870da5cae0a725c7aacca83f0dabba55895f17849f2d5da254b1ebeb6","src/de/seed.rs":"e8cf0233afe0af5b8fb9e4c94f301c92729c5ba417280af9e2201b732e374a72","src/de/utf8.rs":"f17524ee0af98ec3abcfd7d0b812fbd1033263bd8e2ce2f57c1e1999ce153558","src/de/value.rs":"aa5055923e2c3fd1c1f1abdfb380a1d63d07cf4d602ef62d2df2b7da33dd8c81","src/integer128.rs":"ca49591abde2d8c4f582174533fee28f0fa9139e5d71bf22b25a6b175f8abccc","src/lib.rs":"66d086f0da93675b6d467814d593e03521007f5e319bcfdb1eb35eabb5974fa5","src/macros.rs":"3d695a51f0a07f9f719dcb5620012c21a1b084c06a6283349cabf574ceba8123","src/private/de.rs":"cc6f7fa8d0345db5918bcea549cd302a1159a7e4c5fe3e10027fbe57517ceb49","src/private/doc.rs":"e9801a43c3088fccd5f1fac76416698f948e65b647024aa9da17d673e1e8c217","src/private/mod.rs":"37b204775e572396515477b393ce793b2579de48e5971e6f596ba3723c489fd6","src/private/ser.rs":"087cf1141d1053f932f51b362ed08fd1fec43b4dbe5504bedc0c183fc8ce05e6","src/private/size_hint.rs":"605521227e9ba3100fbb9d5ea7fd5853385097c35015ce6908bd5f1ea20d59ad","src/ser/fmt.rs":"7827ed07fd8897e6324f75625ba0c926a4c4e7ec2914cd067391ce54d942ac7b","src/ser/impls.rs":"97288074fb0ff40f4178359a37879a996c7d0e5d5a7f173b9203f885d90d3ba0","src/ser/impossible.rs":"db17913522c1c27389c5a085113911353b9813c1b116518681362e7c8b692c3a","src/ser/mod.rs":"e5008f26bd6100f52c7223184802e63f4d046651c9db56f68602752cea20745c","src/std_error.rs":"3aac687856c035517fae44ed2906dd4a1e3184bae4bf613adcdeb73f74126c57"},"package":"d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"} \ No newline at end of file
+{"files":{"Cargo.toml":"e390e019c701323f7a6f3b42dc1242445a0ea6c1188d91b1d3513fcebc7afe1d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3c12b60b6c77a201665344a7612b42392e77ddc3e907f5a14e3f3a4bb6c4692e","build.rs":"bf6f7f791517c2d583457e74452c68a7308c5e5106d1a2cebcfa19c55a9c1a42","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/de/format.rs":"84f902fd4c3be66e81ac01d5b21cd876113c16f9890ff8bab5faa0d085386294","src/de/ignored_any.rs":"967184c86707c99b77a1cfb218dfc823f560fae227b6635aee6af19ee82962f5","src/de/impls.rs":"2d4d9985b1048f5f6371984c9bc8f273ad685901ba22ad27483dfb7cec65898c","src/de/mod.rs":"71198e80e4c64aa686b5ceb6e8bce10db20845a87a30fa14227ecbe365a046d5","src/de/seed.rs":"e8cf0233afe0af5b8fb9e4c94f301c92729c5ba417280af9e2201b732e374a72","src/de/utf8.rs":"f17524ee0af98ec3abcfd7d0b812fbd1033263bd8e2ce2f57c1e1999ce153558","src/de/value.rs":"aa5055923e2c3fd1c1f1abdfb380a1d63d07cf4d602ef62d2df2b7da33dd8c81","src/integer128.rs":"ca49591abde2d8c4f582174533fee28f0fa9139e5d71bf22b25a6b175f8abccc","src/lib.rs":"a359fe67a2d3a8bfe27219b35992c0ed390a65a409c69db75e3bf7e63fd16dde","src/macros.rs":"3d695a51f0a07f9f719dcb5620012c21a1b084c06a6283349cabf574ceba8123","src/private/de.rs":"a85efe9af4f5629ac7d946af56e20fbc184df6ac40a6cfe47bf3997a95b2ea20","src/private/doc.rs":"e9801a43c3088fccd5f1fac76416698f948e65b647024aa9da17d673e1e8c217","src/private/mod.rs":"37b204775e572396515477b393ce793b2579de48e5971e6f596ba3723c489fd6","src/private/ser.rs":"57fbff98429e870da86edcf61c0831caaa3b708c0c32e3038c4b2179e8dff73e","src/private/size_hint.rs":"605521227e9ba3100fbb9d5ea7fd5853385097c35015ce6908bd5f1ea20d59ad","src/ser/fmt.rs":"7827ed07fd8897e6324f75625ba0c926a4c4e7ec2914cd067391ce54d942ac7b","src/ser/impls.rs":"8cbe2b66ae950cbc5223e41ac82194cccfc2c26300acfe6328e5f20081f23af3","src/ser/impossible.rs":"db17913522c1c27389c5a085113911353b9813c1b116518681362e7c8b692c3a","src/ser/mod.rs":"e1e6c764837c70b6410dcf1949a0dae1b4b4ffce65b87607d3d173b612e9bccf","src/std_error.rs":"3aac687856c035517fae44ed2906dd4a1e3184bae4bf613adcdeb73f74126c57"},"package":"bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"} \ No newline at end of file
diff --git a/vendor/serde/Cargo.toml b/vendor/serde/Cargo.toml
index a319d423d..93acff3a2 100644
--- a/vendor/serde/Cargo.toml
+++ b/vendor/serde/Cargo.toml
@@ -12,7 +12,7 @@
[package]
rust-version = "1.13"
name = "serde"
-version = "1.0.147"
+version = "1.0.152"
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
"David Tolnay <dtolnay@gmail.com>",
@@ -28,7 +28,7 @@ include = [
]
description = "A generic serialization/deserialization framework"
homepage = "https://serde.rs"
-documentation = "https://docs.serde.rs/serde/"
+documentation = "https://docs.rs/serde"
readme = "crates-io.md"
keywords = [
"serde",
@@ -51,8 +51,11 @@ features = [
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
+[lib]
+doc-scrape-examples = false
+
[dependencies.serde_derive]
-version = "=1.0.147"
+version = "=1.0.152"
optional = true
[dev-dependencies.serde_derive]
diff --git a/vendor/serde/README.md b/vendor/serde/README.md
index 79409a3fc..c3f6575ef 100644
--- a/vendor/serde/README.md
+++ b/vendor/serde/README.md
@@ -1,6 +1,6 @@
# Serde &emsp; [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31]
-[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/serde/CI/master
+[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
[Latest Version]: https://img.shields.io/crates/v/serde.svg
[crates.io]: https://crates.io/crates/serde
@@ -19,7 +19,7 @@ You may be looking for:
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.serde.rs/serde/)
+- [API documentation](https://docs.rs/serde)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
diff --git a/vendor/serde/build.rs b/vendor/serde/build.rs
index a1103b520..ccbddd273 100644
--- a/vendor/serde/build.rs
+++ b/vendor/serde/build.rs
@@ -6,6 +6,8 @@ use std::str::{self, FromStr};
// opening a GitHub issue if your build environment requires some way to enable
// these cfgs other than by executing our build script.
fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+
let minor = match rustc_minor_version() {
Some(minor) => minor,
None => return,
@@ -89,24 +91,28 @@ fn main() {
println!("cargo:rustc-cfg=no_core_try_from");
println!("cargo:rustc-cfg=no_num_nonzero_signed");
println!("cargo:rustc-cfg=no_systemtime_checked_add");
+ println!("cargo:rustc-cfg=no_relaxed_trait_bounds");
}
- // Whitelist of archs that support std::sync::atomic module. Ideally we
- // would use #[cfg(target_has_atomic = "...")] but it is not stable yet.
- // Instead this is based on rustc's compiler/rustc_target/src/spec/*.rs.
- let has_atomic64 = target.starts_with("x86_64")
- || target.starts_with("i686")
- || target.starts_with("aarch64")
- || target.starts_with("powerpc64")
- || target.starts_with("sparc64")
- || target.starts_with("mips64el")
- || target.starts_with("riscv64");
- let has_atomic32 = has_atomic64 || emscripten;
- if minor < 34 || !has_atomic64 {
- println!("cargo:rustc-cfg=no_std_atomic64");
- }
- if minor < 34 || !has_atomic32 {
- println!("cargo:rustc-cfg=no_std_atomic");
+ // Support for #[cfg(target_has_atomic = "...")] stabilized in Rust 1.60.
+ if minor < 60 {
+ println!("cargo:rustc-cfg=no_target_has_atomic");
+ // Allowlist of archs that support std::sync::atomic module. This is
+ // based on rustc's compiler/rustc_target/src/spec/*.rs.
+ let has_atomic64 = target.starts_with("x86_64")
+ || target.starts_with("i686")
+ || target.starts_with("aarch64")
+ || target.starts_with("powerpc64")
+ || target.starts_with("sparc64")
+ || target.starts_with("mips64el")
+ || target.starts_with("riscv64");
+ let has_atomic32 = has_atomic64 || emscripten;
+ if minor < 34 || !has_atomic64 {
+ println!("cargo:rustc-cfg=no_std_atomic64");
+ }
+ if minor < 34 || !has_atomic32 {
+ println!("cargo:rustc-cfg=no_std_atomic");
+ }
}
}
diff --git a/vendor/serde/crates-io.md b/vendor/serde/crates-io.md
index b57bc5fd0..6e0ec280c 100644
--- a/vendor/serde/crates-io.md
+++ b/vendor/serde/crates-io.md
@@ -10,7 +10,7 @@ You may be looking for:
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.serde.rs/serde/)
+- [API documentation](https://docs.rs/serde)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
diff --git a/vendor/serde/src/de/format.rs b/vendor/serde/src/de/format.rs
index 58ec0968d..f14580b8d 100644
--- a/vendor/serde/src/de/format.rs
+++ b/vendor/serde/src/de/format.rs
@@ -1,7 +1,7 @@
use lib::fmt::{self, Write};
use lib::str;
-pub struct Buf<'a> {
+pub(super) struct Buf<'a> {
bytes: &'a mut [u8],
offset: usize,
}
diff --git a/vendor/serde/src/de/impls.rs b/vendor/serde/src/de/impls.rs
index c048f7145..a257d9e2f 100644
--- a/vendor/serde/src/de/impls.rs
+++ b/vendor/serde/src/de/impls.rs
@@ -733,7 +733,7 @@ impl<'de> Deserialize<'de> for CString {
macro_rules! forwarded_impl {
(
$(#[doc = $doc:tt])*
- ( $($id: ident),* ), $ty: ty, $func: expr
+ ($($id:ident),*), $ty:ty, $func:expr
) => {
$(#[doc = $doc])*
impl<'de $(, $id : Deserialize<'de>,)*> Deserialize<'de> for $ty {
@@ -860,7 +860,7 @@ impl<'de, T: ?Sized> Deserialize<'de> for PhantomData<T> {
#[cfg(any(feature = "std", feature = "alloc"))]
macro_rules! seq_impl {
(
- $ty:ident < T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)* >,
+ $ty:ident <T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)*>,
$access:ident,
$clear:expr,
$with_capacity:expr,
@@ -1353,7 +1353,7 @@ tuple_impls! {
#[cfg(any(feature = "std", feature = "alloc"))]
macro_rules! map_impl {
(
- $ty:ident < K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)* >,
+ $ty:ident <K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)*>,
$access:ident,
$with_capacity:expr
) => {
@@ -1440,15 +1440,15 @@ macro_rules! parse_ip_impl {
#[cfg(feature = "std")]
macro_rules! variant_identifier {
(
- $name_kind: ident ( $($variant: ident; $bytes: expr; $index: expr),* )
- $expecting_message: expr,
- $variants_name: ident
+ $name_kind:ident ($($variant:ident; $bytes:expr; $index:expr),*)
+ $expecting_message:expr,
+ $variants_name:ident
) => {
enum $name_kind {
- $( $variant ),*
+ $($variant),*
}
- static $variants_name: &'static [&'static str] = &[ $( stringify!($variant) ),*];
+ static $variants_name: &'static [&'static str] = &[$(stringify!($variant)),*];
impl<'de> Deserialize<'de> for $name_kind {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
@@ -1515,12 +1515,12 @@ macro_rules! variant_identifier {
#[cfg(feature = "std")]
macro_rules! deserialize_enum {
(
- $name: ident $name_kind: ident ( $($variant: ident; $bytes: expr; $index: expr),* )
- $expecting_message: expr,
- $deserializer: expr
+ $name:ident $name_kind:ident ($($variant:ident; $bytes:expr; $index:expr),*)
+ $expecting_message:expr,
+ $deserializer:expr
) => {
- variant_identifier!{
- $name_kind ( $($variant; $bytes; $index),* )
+ variant_identifier! {
+ $name_kind ($($variant; $bytes; $index),*)
$expecting_message,
VARIANTS
}
@@ -2662,8 +2662,9 @@ where
#[cfg(all(feature = "std", not(no_std_atomic)))]
macro_rules! atomic_impl {
- ($($ty:ident)*) => {
+ ($($ty:ident $size:expr)*) => {
$(
+ #[cfg(any(no_target_has_atomic, target_has_atomic = $size))]
impl<'de> Deserialize<'de> for $ty {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@@ -2678,14 +2679,21 @@ macro_rules! atomic_impl {
#[cfg(all(feature = "std", not(no_std_atomic)))]
atomic_impl! {
- AtomicBool
- AtomicI8 AtomicI16 AtomicI32 AtomicIsize
- AtomicU8 AtomicU16 AtomicU32 AtomicUsize
+ AtomicBool "8"
+ AtomicI8 "8"
+ AtomicI16 "16"
+ AtomicI32 "32"
+ AtomicIsize "ptr"
+ AtomicU8 "8"
+ AtomicU16 "16"
+ AtomicU32 "32"
+ AtomicUsize "ptr"
}
#[cfg(all(feature = "std", not(no_std_atomic64)))]
atomic_impl! {
- AtomicI64 AtomicU64
+ AtomicI64 "64"
+ AtomicU64 "64"
}
#[cfg(feature = "std")]
diff --git a/vendor/serde/src/de/mod.rs b/vendor/serde/src/de/mod.rs
index d9dafbe1e..ca29ec610 100644
--- a/vendor/serde/src/de/mod.rs
+++ b/vendor/serde/src/de/mod.rs
@@ -501,8 +501,8 @@ impl<'a> Display for Expected + 'a {
/// by Serde.
///
/// Serde provides `Deserialize` implementations for many Rust primitive and
-/// standard library types. The complete list is [here][de]. All of these can
-/// be deserialized using Serde out of the box.
+/// standard library types. The complete list is [here][crate::de]. All of these
+/// can be deserialized using Serde out of the box.
///
/// Additionally, Serde provides a procedural macro called `serde_derive` to
/// automatically generate `Deserialize` implementations for structs and enums
@@ -518,7 +518,6 @@ impl<'a> Display for Expected + 'a {
/// `LinkedHashMap<K, V>` type that is deserializable by Serde because the crate
/// provides an implementation of `Deserialize` for it.
///
-/// [de]: https://docs.serde.rs/serde/de/index.html
/// [derive]: https://serde.rs/derive.html
/// [impl-deserialize]: https://serde.rs/impl-deserialize.html
///
diff --git a/vendor/serde/src/lib.rs b/vendor/serde/src/lib.rs
index 02c57ae9d..e4bc7c8f1 100644
--- a/vendor/serde/src/lib.rs
+++ b/vendor/serde/src/lib.rs
@@ -52,16 +52,22 @@
//! - [S-expressions], the textual representation of code and data used by the
//! Lisp language family.
//! - [D-Bus]'s binary wire format.
-//! - [FlexBuffers], the schemaless cousin of Google's FlatBuffers zero-copy serialization format.
+//! - [FlexBuffers], the schemaless cousin of Google's FlatBuffers zero-copy
+//! serialization format.
+//! - [Bencode], a simple binary format used in the BitTorrent protocol.
+//! - [Token streams], for processing Rust procedural macro input.
+//! *(deserialization only)*
//! - [DynamoDB Items], the format used by [rusoto_dynamodb] to transfer data to
//! and from DynamoDB.
+//! - [Hjson], a syntax extension to JSON designed around human reading and
+//! editing. *(deserialization only)*
//!
//! [JSON]: https://github.com/serde-rs/json
//! [Postcard]: https://github.com/jamesmunns/postcard
//! [CBOR]: https://github.com/enarx/ciborium
//! [YAML]: https://github.com/dtolnay/serde-yaml
//! [MessagePack]: https://github.com/3Hren/msgpack-rust
-//! [TOML]: https://github.com/alexcrichton/toml-rs
+//! [TOML]: https://docs.rs/toml
//! [Pickle]: https://github.com/birkenfeld/serde-pickle
//! [RON]: https://github.com/ron-rs/ron
//! [BSON]: https://github.com/mongodb/bson-rust
@@ -75,20 +81,23 @@
//! [S-expressions]: https://github.com/rotty/lexpr-rs
//! [D-Bus]: https://docs.rs/zvariant
//! [FlexBuffers]: https://github.com/google/flatbuffers/tree/master/rust/flexbuffers
+//! [Bencode]: https://github.com/P3KI/bendy
+//! [Token streams]: https://github.com/oxidecomputer/serde_tokenstream
//! [DynamoDB Items]: https://docs.rs/serde_dynamo
//! [rusoto_dynamodb]: https://docs.rs/rusoto_dynamodb
+//! [Hjson]: https://github.com/Canop/deser-hjson
////////////////////////////////////////////////////////////////////////////////
// Serde types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/serde/1.0.147")]
+#![doc(html_root_url = "https://docs.rs/serde/1.0.152")]
// Support using Serde without the standard library!
#![cfg_attr(not(feature = "std"), no_std)]
// Unstable functionality only if the user asks for it. For tracking and
// discussion of these features please refer to this issue:
//
// https://github.com/serde-rs/serde/issues/812
-#![cfg_attr(feature = "unstable", feature(never_type))]
+#![cfg_attr(feature = "unstable", feature(error_in_core, never_type))]
#![allow(unknown_lints, bare_trait_objects, deprecated)]
#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
// Ignored clippy and clippy_pedantic lints
@@ -118,7 +127,6 @@
derive_partial_eq_without_eq,
enum_glob_use,
explicit_auto_deref,
- let_underscore_drop,
map_err_ignore,
new_without_default,
result_unit_err,
@@ -237,13 +245,26 @@ mod lib {
#[cfg(not(no_range_inclusive))]
pub use self::core::ops::RangeInclusive;
- #[cfg(all(feature = "std", not(no_std_atomic)))]
+ #[cfg(all(feature = "std", no_target_has_atomic, not(no_std_atomic)))]
pub use std::sync::atomic::{
AtomicBool, AtomicI16, AtomicI32, AtomicI8, AtomicIsize, AtomicU16, AtomicU32, AtomicU8,
AtomicUsize, Ordering,
};
- #[cfg(all(feature = "std", not(no_std_atomic64)))]
+ #[cfg(all(feature = "std", no_target_has_atomic, not(no_std_atomic64)))]
+ pub use std::sync::atomic::{AtomicI64, AtomicU64};
+
+ #[cfg(all(feature = "std", not(no_target_has_atomic)))]
+ pub use std::sync::atomic::Ordering;
+ #[cfg(all(feature = "std", not(no_target_has_atomic), target_has_atomic = "8"))]
+ pub use std::sync::atomic::{AtomicBool, AtomicI8, AtomicU8};
+ #[cfg(all(feature = "std", not(no_target_has_atomic), target_has_atomic = "16"))]
+ pub use std::sync::atomic::{AtomicI16, AtomicU16};
+ #[cfg(all(feature = "std", not(no_target_has_atomic), target_has_atomic = "32"))]
+ pub use std::sync::atomic::{AtomicI32, AtomicU32};
+ #[cfg(all(feature = "std", not(no_target_has_atomic), target_has_atomic = "64"))]
pub use std::sync::atomic::{AtomicI64, AtomicU64};
+ #[cfg(all(feature = "std", not(no_target_has_atomic), target_has_atomic = "ptr"))]
+ pub use std::sync::atomic::{AtomicIsize, AtomicUsize};
#[cfg(any(feature = "std", not(no_core_duration)))]
pub use self::core::time::Duration;
@@ -291,7 +312,7 @@ use self::__private as private;
#[path = "de/seed.rs"]
mod seed;
-#[cfg(not(feature = "std"))]
+#[cfg(not(any(feature = "std", feature = "unstable")))]
mod std_error;
// Re-export #[derive(Serialize, Deserialize)].
diff --git a/vendor/serde/src/private/de.rs b/vendor/serde/src/private/de.rs
index 01e5bf787..e9c693d4d 100644
--- a/vendor/serde/src/private/de.rs
+++ b/vendor/serde/src/private/de.rs
@@ -1810,7 +1810,7 @@ mod content {
V: Visitor<'de>,
E: de::Error,
{
- let map = content.iter().map(|&(ref k, ref v)| {
+ let map = content.iter().map(|(k, v)| {
(
ContentRefDeserializer::new(k),
ContentRefDeserializer::new(v),
@@ -2107,7 +2107,7 @@ mod content {
let (variant, value) = match *self.content {
Content::Map(ref value) => {
let mut iter = value.iter();
- let &(ref variant, ref value) = match iter.next() {
+ let (variant, value) = match iter.next() {
Some(v) => v,
None => {
return Err(de::Error::invalid_value(
@@ -2254,7 +2254,7 @@ mod content {
V: de::Visitor<'de>,
{
match self.value {
- Some(&Content::Seq(ref v)) => {
+ Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor)
}
Some(other) => Err(de::Error::invalid_type(
@@ -2277,10 +2277,10 @@ mod content {
V: de::Visitor<'de>,
{
match self.value {
- Some(&Content::Map(ref v)) => {
+ Some(Content::Map(v)) => {
de::Deserializer::deserialize_any(MapRefDeserializer::new(v), visitor)
}
- Some(&Content::Seq(ref v)) => {
+ Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor)
}
Some(other) => Err(de::Error::invalid_type(
@@ -2403,7 +2403,7 @@ mod content {
T: de::DeserializeSeed<'de>,
{
match self.iter.next() {
- Some(&(ref key, ref value)) => {
+ Some((key, value)) => {
self.value = Some(value);
seed.deserialize(ContentRefDeserializer::new(key)).map(Some)
}
@@ -2708,7 +2708,7 @@ where
#[cfg(any(feature = "std", feature = "alloc"))]
macro_rules! forward_to_deserialize_other {
- ($($func:ident ( $($arg:ty),* ))*) => {
+ ($($func:ident ($($arg:ty),*))*) => {
$(
fn $func<V>(self, $(_: $arg,)* _visitor: V) -> Result<V::Value, Self::Error>
where
diff --git a/vendor/serde/src/private/ser.rs b/vendor/serde/src/private/ser.rs
index 293d8a865..528e8c125 100644
--- a/vendor/serde/src/private/ser.rs
+++ b/vendor/serde/src/private/ser.rs
@@ -525,7 +525,7 @@ mod content {
Content::Map(ref entries) => {
use ser::SerializeMap;
let mut map = try!(serializer.serialize_map(Some(entries.len())));
- for &(ref k, ref v) in entries {
+ for (k, v) in entries {
try!(map.serialize_entry(k, v));
}
map.end()
diff --git a/vendor/serde/src/ser/impls.rs b/vendor/serde/src/ser/impls.rs
index 8e8655582..da2677261 100644
--- a/vendor/serde/src/ser/impls.rs
+++ b/vendor/serde/src/ser/impls.rs
@@ -182,9 +182,27 @@ where
}
}
-#[cfg(any(feature = "std", feature = "alloc"))]
+#[cfg(all(any(feature = "std", feature = "alloc"), not(no_relaxed_trait_bounds)))]
macro_rules! seq_impl {
- ($ty:ident < T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound:ident)* >) => {
+ ($ty:ident <T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound:ident)*>) => {
+ impl<T $(, $typaram)*> Serialize for $ty<T $(, $typaram)*>
+ where
+ T: Serialize,
+ {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.collect_seq(self)
+ }
+ }
+ }
+}
+
+#[cfg(all(any(feature = "std", feature = "alloc"), no_relaxed_trait_bounds))]
+macro_rules! seq_impl {
+ ($ty:ident <T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound:ident)*>) => {
impl<T $(, $typaram)*> Serialize for $ty<T $(, $typaram)*>
where
T: Serialize $(+ $tbound1 $(+ $tbound2)*)*,
@@ -347,9 +365,28 @@ tuple_impls! {
////////////////////////////////////////////////////////////////////////////////
-#[cfg(any(feature = "std", feature = "alloc"))]
+#[cfg(all(any(feature = "std", feature = "alloc"), not(no_relaxed_trait_bounds)))]
+macro_rules! map_impl {
+ ($ty:ident <K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound:ident)*>) => {
+ impl<K, V $(, $typaram)*> Serialize for $ty<K, V $(, $typaram)*>
+ where
+ K: Serialize,
+ V: Serialize,
+ {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.collect_map(self)
+ }
+ }
+ }
+}
+
+#[cfg(all(any(feature = "std", feature = "alloc"), no_relaxed_trait_bounds))]
macro_rules! map_impl {
- ($ty:ident < K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound:ident)* >) => {
+ ($ty:ident <K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound:ident)*>) => {
impl<K, V $(, $typaram)*> Serialize for $ty<K, V $(, $typaram)*>
where
K: Serialize $(+ $kbound1 $(+ $kbound2)*)*,
@@ -465,7 +502,7 @@ where
////////////////////////////////////////////////////////////////////////////////
macro_rules! nonzero_integers {
- ( $( $T: ident, )+ ) => {
+ ($($T:ident,)+) => {
$(
#[cfg(not(no_num_nonzero))]
impl Serialize for num::$T {
@@ -736,8 +773,9 @@ impl Serialize for net::Ipv4Addr {
// Skip over delimiters that we initialized buf with
written += format_u8(*oct, &mut buf[written + 1..]) + 1;
}
- // We've only written ASCII bytes to the buffer, so it is valid UTF-8
- serializer.serialize_str(unsafe { str::from_utf8_unchecked(&buf[..written]) })
+ // Safety: We've only written ASCII bytes to the buffer, so it is valid UTF-8
+ let buf = unsafe { str::from_utf8_unchecked(&buf[..written]) };
+ serializer.serialize_str(buf)
} else {
self.octets().serialize(serializer)
}
@@ -909,8 +947,9 @@ where
#[cfg(all(feature = "std", not(no_std_atomic)))]
macro_rules! atomic_impl {
- ($($ty:ident)*) => {
+ ($($ty:ident $size:expr)*) => {
$(
+ #[cfg(any(no_target_has_atomic, target_has_atomic = $size))]
impl Serialize for $ty {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
@@ -926,12 +965,19 @@ macro_rules! atomic_impl {
#[cfg(all(feature = "std", not(no_std_atomic)))]
atomic_impl! {
- AtomicBool
- AtomicI8 AtomicI16 AtomicI32 AtomicIsize
- AtomicU8 AtomicU16 AtomicU32 AtomicUsize
+ AtomicBool "8"
+ AtomicI8 "8"
+ AtomicI16 "16"
+ AtomicI32 "32"
+ AtomicIsize "ptr"
+ AtomicU8 "8"
+ AtomicU16 "16"
+ AtomicU32 "32"
+ AtomicUsize "ptr"
}
#[cfg(all(feature = "std", not(no_std_atomic64)))]
atomic_impl! {
- AtomicI64 AtomicU64
+ AtomicI64 "64"
+ AtomicU64 "64"
}
diff --git a/vendor/serde/src/ser/mod.rs b/vendor/serde/src/ser/mod.rs
index 9a21363d6..5c45426e8 100644
--- a/vendor/serde/src/ser/mod.rs
+++ b/vendor/serde/src/ser/mod.rs
@@ -115,10 +115,13 @@ mod impossible;
pub use self::impossible::Impossible;
+#[cfg(all(feature = "unstable", not(feature = "std")))]
+#[doc(inline)]
+pub use core::error::Error as StdError;
#[cfg(feature = "std")]
#[doc(no_inline)]
pub use std::error::Error as StdError;
-#[cfg(not(feature = "std"))]
+#[cfg(not(any(feature = "std", feature = "unstable")))]
#[doc(no_inline)]
pub use std_error::Error as StdError;
@@ -191,8 +194,8 @@ declare_error_trait!(Error: Sized + Debug + Display);
/// by Serde.
///
/// Serde provides `Serialize` implementations for many Rust primitive and
-/// standard library types. The complete list is [here][ser]. All of these can
-/// be serialized using Serde out of the box.
+/// standard library types. The complete list is [here][crate::ser]. All of
+/// these can be serialized using Serde out of the box.
///
/// Additionally, Serde provides a procedural macro called [`serde_derive`] to
/// automatically generate `Serialize` implementations for structs and enums in
@@ -212,7 +215,6 @@ declare_error_trait!(Error: Sized + Debug + Display);
/// [`linked-hash-map`]: https://crates.io/crates/linked-hash-map
/// [`serde_derive`]: https://crates.io/crates/serde_derive
/// [derive section of the manual]: https://serde.rs/derive.html
-/// [ser]: https://docs.serde.rs/serde/ser/index.html
pub trait Serialize {
/// Serialize this value into the given Serde serializer.
///
diff --git a/vendor/serde_derive/.cargo-checksum.json b/vendor/serde_derive/.cargo-checksum.json
index 55e1585d8..4f033d7fe 100644
--- a/vendor/serde_derive/.cargo-checksum.json
+++ b/vendor/serde_derive/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"74849419177cf3d29dc77892c9519708ac38eecd3c2d4e4367de3c171cd2d410","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"7049b88cb7366be0c8e593f3ffffa313803a5b382f35686c542b4a0da3b291f3","build.rs":"c01db20e19c31505b26b9e9a5aff1c5327a7501fc88917f372a9e718edcb50ab","crates-io.md":"5c42406936cf9af6d4cd7fe0ac730c609e82fd3f15a54549518c72d0ded70c29","src/bound.rs":"268b4995a5d0a129dcbd6e32ef11f587bd271df3f6c4f7230ed54bc99f5ce871","src/de.rs":"c5a41016ce15f8176a2d7a8445ba06d2eb8de0863c1fea0dab51c395dd7dccff","src/dummy.rs":"cb154465020973be8ab6079ab8574df46f38fbe028a5561cd6b1a8bfa1a35478","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"778074380c4e353b77e03aff9edf15fda9e15a0e7ec25cdfc51d79a26636ddef","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"11ea94257d2a2ee2276938a6beb4ae11b74c39225c1e342e6df1e7d2b2924496","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"cd125ba4a3dd6250ed4737555c58627bffd630a536cd7223068eed7c10a170d8","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"3c9ce461773b7df3bb64d82aa5a0d93052c3bb0e60209db6c0b5c10ee9cfc9cf","src/lib.rs":"82c587dfec75c78b53fc64ab9366f61213f2776cefea256c9eaeb18193c8cbf4","src/pretend.rs":"4aa53bf6c1350fbcfc8c4997f720cde61a8eb3aab73bb8c101b0f0a74901892b","src/ser.rs":"0d99c841f6c7bc9751ab225fe42d1f8b7fe56e36903efcb4ff10bf6e35c390ba","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"} \ No newline at end of file
+{"files":{"Cargo.toml":"72ed2b0578c6c4fbbd14ab2062502092990c48f4687a01a4a07d7fdbb6330756","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3c12b60b6c77a201665344a7612b42392e77ddc3e907f5a14e3f3a4bb6c4692e","build.rs":"d9a0df0c4dd552ff7fd0c3b3828cb1fff4fc4ab15bd98539881929b76b98003b","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/bound.rs":"268b4995a5d0a129dcbd6e32ef11f587bd271df3f6c4f7230ed54bc99f5ce871","src/de.rs":"b28c2fcf5214d33ba1dc855b60634db18608a26f39f9f92bbedf62c456fa8d10","src/dummy.rs":"cb154465020973be8ab6079ab8574df46f38fbe028a5561cd6b1a8bfa1a35478","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"778074380c4e353b77e03aff9edf15fda9e15a0e7ec25cdfc51d79a26636ddef","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"6b84278b034a156784fc56153df3def1660bcfcfde0cd59f8facce1750717c7d","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"cd125ba4a3dd6250ed4737555c58627bffd630a536cd7223068eed7c10a170d8","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"3c9ce461773b7df3bb64d82aa5a0d93052c3bb0e60209db6c0b5c10ee9cfc9cf","src/lib.rs":"6a80c0114dcf9924cbbbc03f443cfd0d299be9f89ba6c4fdc2867d990aba5063","src/pretend.rs":"4aa53bf6c1350fbcfc8c4997f720cde61a8eb3aab73bb8c101b0f0a74901892b","src/ser.rs":"8f9ffe1d8bcd28bd40e8d94d688547fa1d518cc722d0292f47d951152c406dd9","src/this.rs":"a2c128955324c2994ed7cdc3fe4eeceb7ad8a0f9d071665a8378c85c8df64ce2","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"} \ No newline at end of file
diff --git a/vendor/serde_derive/Cargo.toml b/vendor/serde_derive/Cargo.toml
index dc5abf1b4..6258d9df1 100644
--- a/vendor/serde_derive/Cargo.toml
+++ b/vendor/serde_derive/Cargo.toml
@@ -12,7 +12,7 @@
[package]
rust-version = "1.31"
name = "serde_derive"
-version = "1.0.147"
+version = "1.0.152"
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
"David Tolnay <dtolnay@gmail.com>",
@@ -53,7 +53,7 @@ version = "1.0"
version = "1.0"
[dependencies.syn]
-version = "1.0.90"
+version = "1.0.104"
[dev-dependencies.serde]
version = "1.0"
diff --git a/vendor/serde_derive/README.md b/vendor/serde_derive/README.md
index 79409a3fc..c3f6575ef 100644
--- a/vendor/serde_derive/README.md
+++ b/vendor/serde_derive/README.md
@@ -1,6 +1,6 @@
# Serde &emsp; [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31]
-[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/serde/CI/master
+[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
[Latest Version]: https://img.shields.io/crates/v/serde.svg
[crates.io]: https://crates.io/crates/serde
@@ -19,7 +19,7 @@ You may be looking for:
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.serde.rs/serde/)
+- [API documentation](https://docs.rs/serde)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
diff --git a/vendor/serde_derive/build.rs b/vendor/serde_derive/build.rs
index 2aece0469..1249dab59 100644
--- a/vendor/serde_derive/build.rs
+++ b/vendor/serde_derive/build.rs
@@ -6,6 +6,8 @@ use std::str;
// opening a GitHub issue if your build environment requires some way to enable
// these cfgs other than by executing our build script.
fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+
let minor = match rustc_minor_version() {
Some(minor) => minor,
None => return,
diff --git a/vendor/serde_derive/crates-io.md b/vendor/serde_derive/crates-io.md
index b57bc5fd0..6e0ec280c 100644
--- a/vendor/serde_derive/crates-io.md
+++ b/vendor/serde_derive/crates-io.md
@@ -10,7 +10,7 @@ You may be looking for:
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.serde.rs/serde/)
+- [API documentation](https://docs.rs/serde)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
diff --git a/vendor/serde_derive/src/de.rs b/vendor/serde_derive/src/de.rs
index ff7bc42f4..a703adaf7 100644
--- a/vendor/serde_derive/src/de.rs
+++ b/vendor/serde_derive/src/de.rs
@@ -10,6 +10,7 @@ use fragment::{Expr, Fragment, Match, Stmts};
use internals::ast::{Container, Data, Field, Style, Variant};
use internals::{attr, replace_receiver, ungroup, Ctxt, Derive};
use pretend;
+use this;
use std::collections::BTreeSet;
use std::ptr;
@@ -111,9 +112,13 @@ struct Parameters {
local: syn::Ident,
/// Path to the type the impl is for. Either a single `Ident` for local
- /// types or `some::remote::Ident` for remote types. Does not include
- /// generic parameters.
- this: syn::Path,
+ /// types (does not include generic parameters) or `some::remote::Path` for
+ /// remote types.
+ this_type: syn::Path,
+
+ /// Same as `this_type` but using `::<T>` for generic parameters for use in
+ /// expression position.
+ this_value: syn::Path,
/// Generics including any explicit and inferred bounds for the impl.
generics: syn::Generics,
@@ -133,10 +138,8 @@ struct Parameters {
impl Parameters {
fn new(cont: &Container) -> Self {
let local = cont.ident.clone();
- let this = match cont.attrs.remote() {
- Some(remote) => remote.clone(),
- None => cont.ident.clone().into(),
- };
+ let this_type = this::this_type(cont);
+ let this_value = this::this_value(cont);
let borrowed = borrowed_lifetimes(cont);
let generics = build_generics(cont, &borrowed);
let has_getter = cont.data.has_getter();
@@ -144,7 +147,8 @@ impl Parameters {
Parameters {
local,
- this,
+ this_type,
+ this_value,
generics,
borrowed,
has_getter,
@@ -155,7 +159,7 @@ impl Parameters {
/// Type name to use in error messages and `&'static str` arguments to
/// various Deserializer methods.
fn type_name(&self) -> String {
- self.this.segments.last().unwrap().ident.to_string()
+ self.this_type.segments.last().unwrap().ident.to_string()
}
}
@@ -358,7 +362,7 @@ fn deserialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
Data::Enum(_) => unreachable!(),
};
- let this = &params.this;
+ let this_value = &params.this_value;
let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap();
let path = match transparent_field.attrs.deserialize_with() {
@@ -386,7 +390,7 @@ fn deserialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
quote_block! {
_serde::__private::Result::map(
#path(__deserializer),
- |__transparent| #this { #(#assign),* })
+ |__transparent| #this_value { #(#assign),* })
}
}
@@ -407,7 +411,8 @@ fn deserialize_try_from(type_try_from: &syn::Type) -> Fragment {
}
fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let this_value = &params.this_value;
let type_name = cattrs.name().deserialize_name();
let expecting = format!("unit struct {}", params.type_name());
@@ -417,7 +422,7 @@ fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fra
struct __Visitor;
impl<'de> _serde::de::Visitor<'de> for __Visitor {
- type Value = #this;
+ type Value = #this_type;
fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result {
_serde::__private::Formatter::write_str(__formatter, #expecting)
@@ -428,7 +433,7 @@ fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fra
where
__E: _serde::de::Error,
{
- _serde::__private::Ok(#this)
+ _serde::__private::Ok(#this_value)
}
}
@@ -443,7 +448,8 @@ fn deserialize_tuple(
cattrs: &attr::Container,
deserializer: Option<TokenStream>,
) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let this_value = &params.this_value;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -457,7 +463,7 @@ fn deserialize_tuple(
let local = &params.local;
quote!(#local)
} else {
- quote!(#this)
+ quote!(#this_value)
};
let is_enum = variant_ident.is_some();
@@ -485,7 +491,7 @@ fn deserialize_tuple(
let visitor_expr = quote! {
__Visitor {
- marker: _serde::__private::PhantomData::<#this #ty_generics>,
+ marker: _serde::__private::PhantomData::<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData,
}
};
@@ -510,12 +516,12 @@ fn deserialize_tuple(
quote_block! {
struct __Visitor #de_impl_generics #where_clause {
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result {
_serde::__private::Formatter::write_str(__formatter, #expecting)
@@ -544,7 +550,7 @@ fn deserialize_tuple_in_place(
cattrs: &attr::Container,
deserializer: Option<TokenStream>,
) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -600,7 +606,7 @@ fn deserialize_tuple_in_place(
quote_block! {
struct __Visitor #in_place_impl_generics #where_clause {
- place: &#place_life mut #this #ty_generics,
+ place: &#place_life mut #this_type #ty_generics,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
@@ -705,9 +711,10 @@ fn deserialize_seq(
};
if params.has_getter {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let (_, ty_generics, _) = params.generics.split_for_impl();
result = quote! {
- _serde::__private::Into::<#this>::into(#result)
+ _serde::__private::Into::<#this_type #ty_generics>::into(#result)
};
}
@@ -801,14 +808,14 @@ fn deserialize_seq_in_place(
}
});
- let this = &params.this;
+ let this_type = &params.this_type;
let (_, ty_generics, _) = params.generics.split_for_impl();
let let_default = match cattrs.default() {
attr::Default::Default => Some(quote!(
- let __default: #this #ty_generics = _serde::__private::Default::default();
+ let __default: #this_type #ty_generics = _serde::__private::Default::default();
)),
attr::Default::Path(path) => Some(quote!(
- let __default: #this #ty_generics = #path();
+ let __default: #this_type #ty_generics = #path();
)),
attr::Default::None => {
// We don't need the default value, to prevent an unused variable warning
@@ -849,9 +856,10 @@ fn deserialize_newtype_struct(
let mut result = quote!(#type_path(__field0));
if params.has_getter {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let (_, ty_generics, _) = params.generics.split_for_impl();
result = quote! {
- _serde::__private::Into::<#this>::into(#result)
+ _serde::__private::Into::<#this_type #ty_generics>::into(#result)
};
}
@@ -901,7 +909,8 @@ fn deserialize_struct(
) -> Fragment {
let is_enum = variant_ident.is_some();
- let this = &params.this;
+ let this_type = &params.this_type;
+ let this_value = &params.this_value;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -913,7 +922,7 @@ fn deserialize_struct(
let local = &params.local;
quote!(#local)
} else {
- quote!(#this)
+ quote!(#this_value)
};
let type_path = match variant_ident {
@@ -941,7 +950,7 @@ fn deserialize_struct(
let visitor_expr = quote! {
__Visitor {
- marker: _serde::__private::PhantomData::<#this #ty_generics>,
+ marker: _serde::__private::PhantomData::<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData,
}
};
@@ -993,7 +1002,7 @@ fn deserialize_struct(
let visitor_seed = if is_enum && cattrs.has_flatten() {
Some(quote! {
impl #de_impl_generics _serde::de::DeserializeSeed<#delife> for __Visitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result<Self::Value, __D::Error>
where
@@ -1011,12 +1020,12 @@ fn deserialize_struct(
#field_visitor
struct __Visitor #de_impl_generics #where_clause {
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result {
_serde::__private::Formatter::write_str(__formatter, #expecting)
@@ -1057,7 +1066,7 @@ fn deserialize_struct_in_place(
return None;
}
- let this = &params.this;
+ let this_type = &params.this_type;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -1123,7 +1132,7 @@ fn deserialize_struct_in_place(
#field_visitor
struct __Visitor #in_place_impl_generics #where_clause {
- place: &#place_life mut #this #ty_generics,
+ place: &#place_life mut #this_type #ty_generics,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
@@ -1212,7 +1221,7 @@ fn deserialize_externally_tagged_enum(
variants: &[Variant],
cattrs: &attr::Container,
) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -1266,12 +1275,12 @@ fn deserialize_externally_tagged_enum(
#variant_visitor
struct __Visitor #de_impl_generics #where_clause {
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result {
_serde::__private::Formatter::write_str(__formatter, #expecting)
@@ -1292,7 +1301,7 @@ fn deserialize_externally_tagged_enum(
#type_name,
VARIANTS,
__Visitor {
- marker: _serde::__private::PhantomData::<#this #ty_generics>,
+ marker: _serde::__private::PhantomData::<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData,
},
)
@@ -1354,7 +1363,8 @@ fn deserialize_adjacently_tagged_enum(
tag: &str,
content: &str,
) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let this_value = &params.this_value;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -1415,13 +1425,13 @@ fn deserialize_adjacently_tagged_enum(
let arm = match variant.style {
Style::Unit => quote! {
- _serde::__private::Ok(#this::#variant_ident)
+ _serde::__private::Ok(#this_value::#variant_ident)
},
Style::Newtype if variant.attrs.deserialize_with().is_none() => {
let span = variant.original.span();
let func = quote_spanned!(span=> _serde::__private::de::missing_field);
quote! {
- #func(#content).map(#this::#variant_ident)
+ #func(#content).map(#this_value::#variant_ident)
}
}
_ => {
@@ -1513,12 +1523,12 @@ fn deserialize_adjacently_tagged_enum(
struct __Seed #de_impl_generics #where_clause {
field: __Field,
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::DeserializeSeed<#delife> for __Seed #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result<Self::Value, __D::Error>
where
@@ -1531,12 +1541,12 @@ fn deserialize_adjacently_tagged_enum(
}
struct __Visitor #de_impl_generics #where_clause {
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result {
_serde::__private::Formatter::write_str(__formatter, #expecting)
@@ -1638,7 +1648,7 @@ fn deserialize_adjacently_tagged_enum(
#type_name,
FIELDS,
__Visitor {
- marker: _serde::__private::PhantomData::<#this #ty_generics>,
+ marker: _serde::__private::PhantomData::<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData,
},
)
@@ -1707,10 +1717,10 @@ fn deserialize_externally_tagged_variant(
match variant.style {
Style::Unit => {
- let this = &params.this;
+ let this_value = &params.this_value;
quote_block! {
try!(_serde::de::VariantAccess::unit_variant(__variant));
- _serde::__private::Ok(#this::#variant_ident)
+ _serde::__private::Ok(#this_value::#variant_ident)
}
}
Style::Newtype => deserialize_externally_tagged_newtype_variant(
@@ -1749,7 +1759,7 @@ fn deserialize_internally_tagged_variant(
match effective_style(variant) {
Style::Unit => {
- let this = &params.this;
+ let this_value = &params.this_value;
let type_name = params.type_name();
let variant_name = variant.ident.to_string();
let default = variant.fields.get(0).map(|field| {
@@ -1758,7 +1768,7 @@ fn deserialize_internally_tagged_variant(
});
quote_block! {
try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::__private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name)));
- _serde::__private::Ok(#this::#variant_ident #default)
+ _serde::__private::Ok(#this_value::#variant_ident #default)
}
}
Style::Newtype => deserialize_untagged_newtype_variant(
@@ -1796,7 +1806,7 @@ fn deserialize_untagged_variant(
match effective_style(variant) {
Style::Unit => {
- let this = &params.this;
+ let this_value = &params.this_value;
let type_name = params.type_name();
let variant_name = variant.ident.to_string();
let default = variant.fields.get(0).map(|field| {
@@ -1808,7 +1818,7 @@ fn deserialize_untagged_variant(
#deserializer,
_serde::__private::de::UntaggedUnitVisitor::new(#type_name, #variant_name)
) {
- _serde::__private::Ok(()) => _serde::__private::Ok(#this::#variant_ident #default),
+ _serde::__private::Ok(()) => _serde::__private::Ok(#this_value::#variant_ident #default),
_serde::__private::Err(__err) => _serde::__private::Err(__err),
}
}
@@ -1843,14 +1853,13 @@ fn deserialize_externally_tagged_newtype_variant(
field: &Field,
cattrs: &attr::Container,
) -> Fragment {
- let this = &params.this;
+ let this_value = &params.this_value;
if field.attrs.skip_deserializing() {
- let this = &params.this;
let default = Expr(expr_is_missing(field, cattrs));
return quote_block! {
try!(_serde::de::VariantAccess::unit_variant(__variant));
- _serde::__private::Ok(#this::#variant_ident(#default))
+ _serde::__private::Ok(#this_value::#variant_ident(#default))
};
}
@@ -1861,7 +1870,7 @@ fn deserialize_externally_tagged_newtype_variant(
let func =
quote_spanned!(span=> _serde::de::VariantAccess::newtype_variant::<#field_ty>);
quote_expr! {
- _serde::__private::Result::map(#func(__variant), #this::#variant_ident)
+ _serde::__private::Result::map(#func(__variant), #this_value::#variant_ident)
}
}
Some(path) => {
@@ -1870,7 +1879,7 @@ fn deserialize_externally_tagged_newtype_variant(
#wrapper
_serde::__private::Result::map(
_serde::de::VariantAccess::newtype_variant::<#wrapper_ty>(__variant),
- |__wrapper| #this::#variant_ident(__wrapper.value))
+ |__wrapper| #this_value::#variant_ident(__wrapper.value))
}
}
}
@@ -1882,20 +1891,20 @@ fn deserialize_untagged_newtype_variant(
field: &Field,
deserializer: &TokenStream,
) -> Fragment {
- let this = &params.this;
+ let this_value = &params.this_value;
let field_ty = field.ty;
match field.attrs.deserialize_with() {
None => {
let span = field.original.span();
let func = quote_spanned!(span=> <#field_ty as _serde::Deserialize>::deserialize);
quote_expr! {
- _serde::__private::Result::map(#func(#deserializer), #this::#variant_ident)
+ _serde::__private::Result::map(#func(#deserializer), #this_value::#variant_ident)
}
}
Some(path) => {
quote_block! {
let __value: _serde::__private::Result<#field_ty, _> = #path(#deserializer);
- _serde::__private::Result::map(__value, #this::#variant_ident)
+ _serde::__private::Result::map(__value, #this_value::#variant_ident)
}
}
}
@@ -1907,7 +1916,7 @@ fn deserialize_generated_identifier(
is_variant: bool,
other_idx: Option<usize>,
) -> Fragment {
- let this = quote!(__Field);
+ let this_value = quote!(__Field);
let field_idents: &Vec<_> = &fields.iter().map(|(_, ident, _)| ident).collect();
let (ignore_variant, fallthrough) = if !is_variant && cattrs.has_flatten() {
@@ -1927,7 +1936,7 @@ fn deserialize_generated_identifier(
};
let visitor_impl = Stmts(deserialize_identifier(
- &this,
+ &this_value,
fields,
is_variant,
fallthrough,
@@ -1982,8 +1991,8 @@ fn deserialize_custom_identifier(
attr::Identifier::No => unreachable!(),
};
- let this = &params.this;
- let this = quote!(#this);
+ let this_type = params.this_type.to_token_stream();
+ let this_value = params.this_value.to_token_stream();
let (ordinary, fallthrough, fallthrough_borrowed) = if let Some(last) = variants.last() {
let last_ident = &last.ident;
@@ -1992,7 +2001,7 @@ fn deserialize_custom_identifier(
// last variant (checked in `check_identifier`), so all preceding
// are ordinary variants.
let ordinary = &variants[..variants.len() - 1];
- let fallthrough = quote!(_serde::__private::Ok(#this::#last_ident));
+ let fallthrough = quote!(_serde::__private::Ok(#this_value::#last_ident));
(ordinary, Some(fallthrough), None)
} else if let Style::Newtype = last.style {
let ordinary = &variants[..variants.len() - 1];
@@ -2002,7 +2011,7 @@ fn deserialize_custom_identifier(
_serde::Deserialize::deserialize(
_serde::__private::de::IdentifierDeserializer::from(#value)
),
- #this::#last_ident)
+ #this_value::#last_ident)
}
};
(
@@ -2050,7 +2059,7 @@ fn deserialize_custom_identifier(
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
let visitor_impl = Stmts(deserialize_identifier(
- &this,
+ &this_value,
&names_idents,
is_variant,
fallthrough,
@@ -2063,18 +2072,18 @@ fn deserialize_custom_identifier(
#names_const
struct __FieldVisitor #de_impl_generics #where_clause {
- marker: _serde::__private::PhantomData<#this #ty_generics>,
+ marker: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
impl #de_impl_generics _serde::de::Visitor<#delife> for __FieldVisitor #de_ty_generics #where_clause {
- type Value = #this #ty_generics;
+ type Value = #this_type #ty_generics;
#visitor_impl
}
let __visitor = __FieldVisitor {
- marker: _serde::__private::PhantomData::<#this #ty_generics>,
+ marker: _serde::__private::PhantomData::<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData,
};
_serde::Deserializer::deserialize_identifier(__deserializer, __visitor)
@@ -2082,7 +2091,7 @@ fn deserialize_custom_identifier(
}
fn deserialize_identifier(
- this: &TokenStream,
+ this_value: &TokenStream,
fields: &[(String, Ident, Vec<String>)],
is_variant: bool,
fallthrough: Option<TokenStream>,
@@ -2103,11 +2112,11 @@ fn deserialize_identifier(
let constructors: &Vec<_> = &flat_fields
.iter()
- .map(|(_, ident)| quote!(#this::#ident))
+ .map(|(_, ident)| quote!(#this_value::#ident))
.collect();
let main_constructors: &Vec<_> = &fields
.iter()
- .map(|(_, ident, _)| quote!(#this::#ident))
+ .map(|(_, ident, _)| quote!(#this_value::#ident))
.collect();
let expecting = expecting.unwrap_or(if is_variant {
@@ -2621,9 +2630,10 @@ fn deserialize_map(
let mut result = quote!(#struct_path { #(#result),* });
if params.has_getter {
- let this = &params.this;
+ let this_type = &params.this_type;
+ let (_, ty_generics, _) = params.generics.split_for_impl();
result = quote! {
- _serde::__private::Into::<#this>::into(#result)
+ _serde::__private::Into::<#this_type #ty_generics>::into(#result)
};
}
@@ -2803,15 +2813,15 @@ fn deserialize_map_in_place(
}
});
- let this = &params.this;
+ let this_type = &params.this_type;
let (_, _, ty_generics, _) = split_with_de_lifetime(params);
let let_default = match cattrs.default() {
attr::Default::Default => Some(quote!(
- let __default: #this #ty_generics = _serde::__private::Default::default();
+ let __default: #this_type #ty_generics = _serde::__private::Default::default();
)),
attr::Default::Path(path) => Some(quote!(
- let __default: #this #ty_generics = #path();
+ let __default: #this_type #ty_generics = #path();
)),
attr::Default::None => {
// We don't need the default value, to prevent an unused variable warning
@@ -2844,7 +2854,7 @@ fn wrap_deserialize_with(
value_ty: &TokenStream,
deserialize_with: &syn::ExprPath,
) -> (TokenStream, TokenStream) {
- let this = &params.this;
+ let this_type = &params.this_type;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
split_with_de_lifetime(params);
let delife = params.borrowed.de_lifetime();
@@ -2852,7 +2862,7 @@ fn wrap_deserialize_with(
let wrapper = quote! {
struct __DeserializeWith #de_impl_generics #where_clause {
value: #value_ty,
- phantom: _serde::__private::PhantomData<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
lifetime: _serde::__private::PhantomData<&#delife ()>,
}
@@ -2903,7 +2913,7 @@ fn unwrap_to_variant_closure(
variant: &Variant,
with_wrapper: bool,
) -> TokenStream {
- let this = &params.this;
+ let this_value = &params.this_value;
let variant_ident = &variant.ident;
let (arg, wrapper) = if with_wrapper {
@@ -2924,23 +2934,23 @@ fn unwrap_to_variant_closure(
Style::Struct if variant.fields.len() == 1 => {
let member = &variant.fields[0].member;
quote! {
- |#arg| #this::#variant_ident { #member: #wrapper }
+ |#arg| #this_value::#variant_ident { #member: #wrapper }
}
}
Style::Struct => {
let members = variant.fields.iter().map(|field| &field.member);
quote! {
- |#arg| #this::#variant_ident { #(#members: #wrapper.#field_access),* }
+ |#arg| #this_value::#variant_ident { #(#members: #wrapper.#field_access),* }
}
}
Style::Tuple => quote! {
- |#arg| #this::#variant_ident(#(#wrapper.#field_access),*)
+ |#arg| #this_value::#variant_ident(#(#wrapper.#field_access),*)
},
Style::Newtype => quote! {
- |#arg| #this::#variant_ident(#wrapper)
+ |#arg| #this_value::#variant_ident(#wrapper)
},
Style::Unit => quote! {
- |#arg| #this::#variant_ident
+ |#arg| #this_value::#variant_ident
},
}
}
diff --git a/vendor/serde_derive/src/internals/check.rs b/vendor/serde_derive/src/internals/check.rs
index 0e2484a79..eb1297aa7 100644
--- a/vendor/serde_derive/src/internals/check.rs
+++ b/vendor/serde_derive/src/internals/check.rs
@@ -6,6 +6,7 @@ use syn::{Member, Type};
/// Cross-cutting checks that require looking at more than a single attrs
/// object. Simpler checks should happen when parsing and building the attrs.
pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
+ check_remote_generic(cx, cont);
check_getter(cx, cont);
check_flatten(cx, cont);
check_identifier(cx, cont);
@@ -16,6 +17,28 @@ pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
check_from_and_try_from(cx, cont);
}
+/// Remote derive definition type must have either all of the generics of the
+/// remote type:
+///
+/// #[serde(remote = "Generic")]
+/// struct Generic<T> {…}
+///
+/// or none of them, i.e. defining impls for one concrete instantiation of the
+/// remote type only:
+///
+/// #[serde(remote = "Generic<T>")]
+/// struct ConcreteDef {…}
+///
+fn check_remote_generic(cx: &Ctxt, cont: &Container) {
+ if let Some(remote) = cont.attrs.remote() {
+ let local_has_generic = !cont.generics.params.is_empty();
+ let remote_has_generic = !remote.segments.last().unwrap().arguments.is_none();
+ if local_has_generic && remote_has_generic {
+ cx.error_spanned_by(remote, "remove generic parameters from this path");
+ }
+ }
+}
+
/// Getters are only allowed inside structs (not enums) with the `remote`
/// attribute.
fn check_getter(cx: &Ctxt, cont: &Container) {
diff --git a/vendor/serde_derive/src/lib.rs b/vendor/serde_derive/src/lib.rs
index fc8529e40..410078911 100644
--- a/vendor/serde_derive/src/lib.rs
+++ b/vendor/serde_derive/src/lib.rs
@@ -13,7 +13,7 @@
//!
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
-#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.147")]
+#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.152")]
#![allow(unknown_lints, bare_trait_objects)]
// Ignored clippy lints
#![allow(
@@ -43,7 +43,6 @@
clippy::enum_glob_use,
clippy::indexing_slicing,
clippy::items_after_statements,
- clippy::let_underscore_drop,
clippy::manual_assert,
clippy::map_err_ignore,
clippy::match_same_arms,
@@ -85,6 +84,7 @@ mod de;
mod dummy;
mod pretend;
mod ser;
+mod this;
mod try;
#[proc_macro_derive(Serialize, attributes(serde))]
diff --git a/vendor/serde_derive/src/ser.rs b/vendor/serde_derive/src/ser.rs
index 529a20d79..43695dd0c 100644
--- a/vendor/serde_derive/src/ser.rs
+++ b/vendor/serde_derive/src/ser.rs
@@ -8,6 +8,7 @@ use fragment::{Fragment, Match, Stmts};
use internals::ast::{Container, Data, Field, Style, Variant};
use internals::{attr, replace_receiver, Ctxt, Derive};
use pretend;
+use this;
pub fn expand_derive_serialize(
input: &mut syn::DeriveInput,
@@ -82,9 +83,13 @@ struct Parameters {
self_var: Ident,
/// Path to the type the impl is for. Either a single `Ident` for local
- /// types or `some::remote::Ident` for remote types. Does not include
- /// generic parameters.
- this: syn::Path,
+ /// types (does not include generic parameters) or `some::remote::Path` for
+ /// remote types.
+ this_type: syn::Path,
+
+ /// Same as `this_type` but using `::<T>` for generic parameters for use in
+ /// expression position.
+ this_value: syn::Path,
/// Generics including any explicit and inferred bounds for the impl.
generics: syn::Generics,
@@ -105,18 +110,15 @@ impl Parameters {
Ident::new("self", Span::call_site())
};
- let this = match cont.attrs.remote() {
- Some(remote) => remote.clone(),
- None => cont.ident.clone().into(),
- };
-
+ let this_type = this::this_type(cont);
+ let this_value = this::this_value(cont);
let is_packed = cont.attrs.is_packed();
-
let generics = build_generics(cont);
Parameters {
self_var,
- this,
+ this_type,
+ this_value,
generics,
is_remote,
is_packed,
@@ -126,7 +128,7 @@ impl Parameters {
/// Type name to use in error messages and `&'static str` arguments to
/// various Serializer methods.
fn type_name(&self) -> String {
- self.this.segments.last().unwrap().ident.to_string()
+ self.this_type.segments.last().unwrap().ident.to_string()
}
}
@@ -427,7 +429,7 @@ fn serialize_variant(
variant_index: u32,
cattrs: &attr::Container,
) -> TokenStream {
- let this = &params.this;
+ let this_value = &params.this_value;
let variant_ident = &variant.ident;
if variant.attrs.skip_serializing() {
@@ -445,32 +447,32 @@ fn serialize_variant(
Style::Struct => quote!({ .. }),
};
quote! {
- #this::#variant_ident #fields_pat => #skipped_err,
+ #this_value::#variant_ident #fields_pat => #skipped_err,
}
} else {
// variant wasn't skipped
let case = match variant.style {
Style::Unit => {
quote! {
- #this::#variant_ident
+ #this_value::#variant_ident
}
}
Style::Newtype => {
quote! {
- #this::#variant_ident(ref __field0)
+ #this_value::#variant_ident(ref __field0)
}
}
Style::Tuple => {
let field_names = (0..variant.fields.len())
.map(|i| Ident::new(&format!("__field{}", i), Span::call_site()));
quote! {
- #this::#variant_ident(#(ref #field_names),*)
+ #this_value::#variant_ident(#(ref #field_names),*)
}
}
Style::Struct => {
let members = variant.fields.iter().map(|f| &f.member);
quote! {
- #this::#variant_ident { #(ref #members),* }
+ #this_value::#variant_ident { #(ref #members),* }
}
}
};
@@ -640,7 +642,7 @@ fn serialize_adjacently_tagged_variant(
tag: &str,
content: &str,
) -> Fragment {
- let this = &params.this;
+ let this_type = &params.this_type;
let type_name = cattrs.name().serialize_name();
let variant_name = variant.attrs.name().serialize_name();
@@ -719,7 +721,7 @@ fn serialize_adjacently_tagged_variant(
quote_block! {
struct __AdjacentlyTagged #wrapper_generics #where_clause {
data: (#(&'__a #fields_ty,)*),
- phantom: _serde::__private::PhantomData<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
}
impl #wrapper_impl_generics _serde::Serialize for __AdjacentlyTagged #wrapper_ty_generics #where_clause {
@@ -741,7 +743,7 @@ fn serialize_adjacently_tagged_variant(
try!(_serde::ser::SerializeStruct::serialize_field(
&mut __struct, #content, &__AdjacentlyTagged {
data: (#(#fields_ident,)*),
- phantom: _serde::__private::PhantomData::<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData::<#this_type #ty_generics>,
}));
_serde::ser::SerializeStruct::end(__struct)
}
@@ -866,8 +868,8 @@ enum StructVariant<'a> {
Untagged,
}
-fn serialize_struct_variant<'a>(
- context: StructVariant<'a>,
+fn serialize_struct_variant(
+ context: StructVariant,
params: &Parameters,
fields: &[Field],
name: &str,
@@ -950,8 +952,8 @@ fn serialize_struct_variant<'a>(
}
}
-fn serialize_struct_variant_with_flatten<'a>(
- context: StructVariant<'a>,
+fn serialize_struct_variant_with_flatten(
+ context: StructVariant,
params: &Parameters,
fields: &[Field],
name: &str,
@@ -971,7 +973,7 @@ fn serialize_struct_variant_with_flatten<'a>(
variant_index,
variant_name,
} => {
- let this = &params.this;
+ let this_type = &params.this_type;
let fields_ty = fields.iter().map(|f| &f.ty);
let members = &fields.iter().map(|f| &f.member).collect::<Vec<_>>();
@@ -982,7 +984,7 @@ fn serialize_struct_variant_with_flatten<'a>(
quote_block! {
struct __EnumFlatten #wrapper_generics #where_clause {
data: (#(&'__a #fields_ty,)*),
- phantom: _serde::__private::PhantomData<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
}
impl #wrapper_impl_generics _serde::Serialize for __EnumFlatten #wrapper_ty_generics #where_clause {
@@ -1006,7 +1008,7 @@ fn serialize_struct_variant_with_flatten<'a>(
#variant_name,
&__EnumFlatten {
data: (#(#members,)*),
- phantom: _serde::__private::PhantomData::<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData::<#this_type #ty_generics>,
})
}
}
@@ -1192,7 +1194,7 @@ fn wrap_serialize_with(
field_tys: &[&syn::Type],
field_exprs: &[TokenStream],
) -> TokenStream {
- let this = &params.this;
+ let this_type = &params.this_type;
let (_, ty_generics, where_clause) = params.generics.split_for_impl();
let wrapper_generics = if field_exprs.is_empty() {
@@ -1212,7 +1214,7 @@ fn wrap_serialize_with(
quote!({
struct __SerializeWith #wrapper_impl_generics #where_clause {
values: (#(&'__a #field_tys, )*),
- phantom: _serde::__private::PhantomData<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
}
impl #wrapper_impl_generics _serde::Serialize for __SerializeWith #wrapper_ty_generics #where_clause {
@@ -1226,7 +1228,7 @@ fn wrap_serialize_with(
&__SerializeWith {
values: (#(#field_exprs, )*),
- phantom: _serde::__private::PhantomData::<#this #ty_generics>,
+ phantom: _serde::__private::PhantomData::<#this_type #ty_generics>,
}
})
}
diff --git a/vendor/serde_derive/src/this.rs b/vendor/serde_derive/src/this.rs
new file mode 100644
index 000000000..32731d089
--- /dev/null
+++ b/vendor/serde_derive/src/this.rs
@@ -0,0 +1,32 @@
+use internals::ast::Container;
+use syn::{Path, PathArguments, Token};
+
+pub fn this_type(cont: &Container) -> Path {
+ if let Some(remote) = cont.attrs.remote() {
+ let mut this = remote.clone();
+ for segment in &mut this.segments {
+ if let PathArguments::AngleBracketed(arguments) = &mut segment.arguments {
+ arguments.colon2_token = None;
+ }
+ }
+ this
+ } else {
+ Path::from(cont.ident.clone())
+ }
+}
+
+pub fn this_value(cont: &Container) -> Path {
+ if let Some(remote) = cont.attrs.remote() {
+ let mut this = remote.clone();
+ for segment in &mut this.segments {
+ if let PathArguments::AngleBracketed(arguments) = &mut segment.arguments {
+ if arguments.colon2_token.is_none() {
+ arguments.colon2_token = Some(Token![::](arguments.lt_token.span));
+ }
+ }
+ }
+ this
+ } else {
+ Path::from(cont.ident.clone())
+ }
+}
diff --git a/vendor/serde_json/.cargo-checksum.json b/vendor/serde_json/.cargo-checksum.json
index 69f778237..023247168 100644
--- a/vendor/serde_json/.cargo-checksum.json
+++ b/vendor/serde_json/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CONTRIBUTING.md":"f5270cafba66223a7b51ffc0d286075a17bb7cd88762fc80d333d3102629f4d8","Cargo.toml":"6b32727a214b53c295b1c7ae174cee020957e5c7439ef842e94b391797098cba","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"7c6217e41b058880a37e01cf8caf205daa397d33cc2199f9fa1634048bc088c5","build.rs":"20e23e5bfe6fc8155fef0e0da036ebc1f81b34078fe86f929c8247b6f4317b99","src/de.rs":"0048bb57f39c7014a491c044d527598270fe417f1676ec7ba4529fea337746bc","src/error.rs":"abb92a9bf62cb7f47141a733a9fd66ec8c369615d7e6f633d3536fd2a5ac74a2","src/features_check/error.rs":"d7359f864afbfe105a38abea9f563dc423036ebc4c956a5695a4beef144dc7ec","src/features_check/mod.rs":"2209f8d5c46b50c8a3b8dc22338dcaf0135d192e8b05d2f456cbe6a73104e958","src/io/core.rs":"9a4146802391fd202a36bebbf3b14b715ae09d8828cbe8e06a01214c470ebf5c","src/io/mod.rs":"fd1ed5080495cab21117f6f7d3c2c9e3687cad0c69a0cd087b08a145a9e672da","src/iter.rs":"f832c469cd7999d26ba9b76baa69b257a212a7edb3dfdf9b1d1bb35e8da85fa9","src/lexical/algorithm.rs":"4fbeb1994049348d1fc388dd1a29e481f8abb8fe1e28bfebf50f3bbce5fa5fbe","src/lexical/bhcomp.rs":"b7c68d74c0055eb67ec2c1bcf27bbc28bef8f1bbc43db8eb94ba69892230add6","src/lexical/bignum.rs":"4230cde10dc8eae456a713cf90ec4e48dff4b1d0c542621ce7f00f39ade2645e","src/lexical/cached.rs":"0e127398691f8042c19cde209e7f4b0161f0f3150342430145929f711e6fdac8","src/lexical/cached_float80.rs":"0f8f74a22cb7d871322a9893bffd0255ca10bf9dffd13afb2462dd3d7f51805f","src/lexical/digit.rs":"a265b9072194a62a67dfc4df3c86d4213097cf3f82280d025e0012a5a262fd9d","src/lexical/errors.rs":"6bc993febceb7dd96ac1c8c5c53b5f5a30297016c0f813ed8ff8d7938d01534d","src/lexical/exponent.rs":"387e945b97dc7ba48a7091c50d228a0dde3a1c4145703d4ab9c31191a91693b0","src/lexical/float.rs":"fe356213c92a049f4bef2f58bc0e3a26866ca06b8c1d74d0f961c5b883852cad","src/lexical/large_powers.rs":"34537f5c701afce1ec2a1fd3c14950381b2e27c9ad74f002c91f3708e8da9ca5","src/lexical/large_powers32.rs":"d533037c6141e6671102aee490c9cdeaba81e667ddca781b2b99db2c455e4a1a","src/lexical/large_powers64.rs":"745dd7c0cbe499eec027ef586248881011d9df20c7efab7929c1807b59886ba1","src/lexical/math.rs":"27e22b724cdf990cdacd0ccfc3749e6e2eb7529d43ebf6e95b1999560b9e199a","src/lexical/mod.rs":"4b4c5228779c0f135a4cb018700e3bcd495da48b74421a86f6b8b304acdef924","src/lexical/num.rs":"cf705c62612e31d704f43d94a633ea1243c6befad7ef5792e2e881a7fd21e809","src/lexical/parse.rs":"c2bfac4c70a19938ced61e991f4ec606764887cf12bac1a0978b5b5318a56aac","src/lexical/rounding.rs":"697207248ba17b7f4965aedb11d276261ada5b06d9c6265d8fd6246664ff6e3c","src/lexical/shift.rs":"bc1ed053dd63d45ac9c35302f18de9f00d94027f28af4ab749c9248439de832a","src/lexical/small_powers.rs":"4608dd218b8002435db7e1ec79d2d0fef5f47ae257b93353326d52ecc80cccda","src/lib.rs":"3787fa7e66342e654359ae6aa81c0683a1260f819ad0e3a803363407122937cd","src/macros.rs":"c9f23156faec8d5216d72b6a97eebd768efb3f75870a6e2beed824308587b998","src/map.rs":"54e5e8ba63cf12a24f2c0257b1ace12f1935ca6d862e7b657209ee6634bb771f","src/number.rs":"157378ce0c01700b99839b8d5578fa46e4d525e6ec7d49e1920537618f51768f","src/raw.rs":"ee2b4ed085d8488e72c5a19791aae9de569d6c1381e9e64ff71afa03d5cf902a","src/read.rs":"49b4b1d067b6485cbded28fb961666ab5df82c36661af722dbae756efb6b2891","src/ser.rs":"44a57aa347c2a3b234635622b675fa3b2c30b818212f5a5eef02027cf3406a84","src/value/de.rs":"64a86f6c14c2c04abc4b6f06b90bfcbda097b37b7f3e990576801f170d4af1e4","src/value/from.rs":"1968835c63dcd4058850162e3a8714b5a7e20eeec458fd4c200aa9ef0515f94c","src/value/index.rs":"8a99d8d50f5674181ced22f6e81dc529eaecb01e543e30346e51fe42cb4b8a5f","src/value/mod.rs":"47ff472a2426a135c7acdf59c4c83c7b7ce986269f10ec31c809a2b35152beda","src/value/partial_eq.rs":"95de799d57f7f4310b64a9488c0a7286dee76dba4329cb69a96298a887e58586","src/value/ser.rs":"3dafa51d46fa7d742398863c4f3a0251afc6193dac75e6231655c9c1690dcd54","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/debug.rs":"a8451217c1e127ad6e653ef11e0513525ee350e1e37dd575758a8ee9301b28fb","tests/lexical.rs":"383e06283e15e1db7c6b818da3c84a3afa0059f6e9fc994b069919d81346ab91","tests/lexical/algorithm.rs":"da378df9ee24bfa033968d5c94e91b58e52c39bf6c825dec51c3eb7250cc5874","tests/lexical/exponent.rs":"26ea92abc654a6a88a8281552bca2f76ea1fa4c17d66a1dd6defe14f7d89b666","tests/lexical/float.rs":"0440f2d85c993bcccd925096d7f4136bf624ffd66b3c7ee565d158390685eb11","tests/lexical/math.rs":"4874be2103be5fbe8b8015354414df271ffa00fd815546fc077f15fb4d7a5a37","tests/lexical/num.rs":"6e650c40de85ed72ac06b6bf1487ba161f3824e26d827df6cfdf2bbdb8d05a05","tests/lexical/parse.rs":"17c73e0a59d462716d974b8dd23a291eb6efdc3a933248874e5eab7e7209d67f","tests/lexical/rounding.rs":"6c56e39ba534616c1b2146e8efa6eb57aed322e683bf23183cd32a61fae6447e","tests/macros/mod.rs":"93aa1d54af20bc2c55b6ae8db73c1414cda2626eb9fa7bd57b9d613a3c6e6a19","tests/map.rs":"dcc5212242e4e93703c4335d54f5603b0211b33d6fb5ab410bb630cda6d46b09","tests/regression.rs":"86731134bfb9bb693d9a4fc62393027de80a8bf031109ea6c7ea475b1ebdde8d","tests/regression/issue520.rs":"d146be3472db902b48127d65fe83aa9f698143aca9074c83cd1a9d5dd28e3ec3","tests/regression/issue795.rs":"582e2e7c68113f05a4b1d2cb556a2df7cc77f2ce8164a32c5cc58ae68abb60ec","tests/regression/issue845.rs":"8bd64588fc344e119d0e9e5e7604236e7c168c574b0692033f15278e216a6b9c","tests/stream.rs":"c7d91014538ecd8f495b196d40e999ab2745f2e69fa2ff9e52521605dc6ce856","tests/test.rs":"809b07b6990004a389780fc45d7e81539bf95003a3f410873ce924153ae01ae0","tests/ui/missing_colon.rs":"d07e0c34d98eb43465f0a0310f2c0b5d5b0d26d243b352a1c6bbe6ad3b27eda9","tests/ui/missing_colon.stderr":"8dd5c769f36ede610172f69140a3faf603cd4590cb4abc8eaa1b499fe3537ad9","tests/ui/missing_comma.rs":"b8a9662f99c3e6dd2b6417892c37640578ce91d3a8365bf10c1f686a3227aa87","tests/ui/missing_comma.stderr":"c6acd42b41ee78b197c77ff513fc77a9495423bb912b188ce6ea2963b65dd82c","tests/ui/missing_value.rs":"bca25d67127fb88e7c191c7b03af5a4ce8a9abb630f3d2e6a6c1e77e213dc9a4","tests/ui/missing_value.stderr":"15727519f300c64d6968cd99398227f7fbbf660825459a0768f2bf947eadf752","tests/ui/not_found.rs":"d0a7adb309879ff65aee115b52cc33d36f4bad353cf97c4effc34a6128c2bee3","tests/ui/not_found.stderr":"359b751c0c21fab6d460daef4d5f73a265f7769c9b578f98ea3cb6cbf2387643","tests/ui/parse_expr.rs":"32e6d51f528db3d1ab0ed1e24765b865be393565c26f77413c5aa39d601ac563","tests/ui/parse_expr.stderr":"edfbaa14fa52f6fdb319c1e1aeac4f8870258930850e669d56aa94ef59ce4432","tests/ui/parse_key.rs":"18829b2af320d5cf8a0a5cd3aaf84c7e92cc874651c30e45a3acafb76c2d8b93","tests/ui/parse_key.stderr":"fcb44e060b804a4762b7291e128c41d7010ffa8ab820b8828fd13fbe6d405ca6","tests/ui/unexpected_after_array_element.rs":"a343fc3104431720bdfcf330bcc3cfcd98c8dec3e951133b495242478b0b7eb3","tests/ui/unexpected_after_array_element.stderr":"e0547b280bcc006155c481c66b49fbe2df577e9e741b7f75fcf6ab21e9e20969","tests/ui/unexpected_after_map_entry.rs":"6e3bd2def435ca610e346bbc75cdbaf61963eb2ef1885bb5f76781ba1fac37ef","tests/ui/unexpected_after_map_entry.stderr":"57b7fc4fbff089dd5e5d76f4eba56a3357273c3f4b7ce93eea60891762cc4275","tests/ui/unexpected_colon.rs":"a313cff3fed4be4c33f1eda5d0c5c98147fb835a56d36470d9f367352c1d61ef","tests/ui/unexpected_colon.stderr":"41585758c8764f485e5c98b0cf6f5ad796f5482839f8644189d980ff422316cd","tests/ui/unexpected_comma.rs":"55a8b684bde1ce905837cce719fd457d8898b61cebc27e5b420d05cb6be97256","tests/ui/unexpected_comma.stderr":"847bb88d0db4d8a89b2a339d57eeb2d75af7670f31fcfdc687373a8681cc1653"},"package":"41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"} \ No newline at end of file
+{"files":{"CONTRIBUTING.md":"f5270cafba66223a7b51ffc0d286075a17bb7cd88762fc80d333d3102629f4d8","Cargo.toml":"ef162b674028e85093b301f43972c272bc879f5f27f18011fe7ac620c4e9632a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"96ca6cef9a39d6ba25578b28e5ae10b49fea2d6d4a00f088afc1d4656f087540","build.rs":"0dd97b63a07aa2d4bbb4c7d0e73b521da130769da9f49f28a7b63332781eb3de","src/de.rs":"5dba95fc6a564917289bf1e404d59c74f95772f22ec92cb91d55053b65e93032","src/error.rs":"abb92a9bf62cb7f47141a733a9fd66ec8c369615d7e6f633d3536fd2a5ac74a2","src/features_check/error.rs":"d7359f864afbfe105a38abea9f563dc423036ebc4c956a5695a4beef144dc7ec","src/features_check/mod.rs":"2209f8d5c46b50c8a3b8dc22338dcaf0135d192e8b05d2f456cbe6a73104e958","src/io/core.rs":"9a4146802391fd202a36bebbf3b14b715ae09d8828cbe8e06a01214c470ebf5c","src/io/mod.rs":"fd1ed5080495cab21117f6f7d3c2c9e3687cad0c69a0cd087b08a145a9e672da","src/iter.rs":"f832c469cd7999d26ba9b76baa69b257a212a7edb3dfdf9b1d1bb35e8da85fa9","src/lexical/algorithm.rs":"4fbeb1994049348d1fc388dd1a29e481f8abb8fe1e28bfebf50f3bbce5fa5fbe","src/lexical/bhcomp.rs":"b7c68d74c0055eb67ec2c1bcf27bbc28bef8f1bbc43db8eb94ba69892230add6","src/lexical/bignum.rs":"4230cde10dc8eae456a713cf90ec4e48dff4b1d0c542621ce7f00f39ade2645e","src/lexical/cached.rs":"0e127398691f8042c19cde209e7f4b0161f0f3150342430145929f711e6fdac8","src/lexical/cached_float80.rs":"0f8f74a22cb7d871322a9893bffd0255ca10bf9dffd13afb2462dd3d7f51805f","src/lexical/digit.rs":"a265b9072194a62a67dfc4df3c86d4213097cf3f82280d025e0012a5a262fd9d","src/lexical/errors.rs":"6bc993febceb7dd96ac1c8c5c53b5f5a30297016c0f813ed8ff8d7938d01534d","src/lexical/exponent.rs":"387e945b97dc7ba48a7091c50d228a0dde3a1c4145703d4ab9c31191a91693b0","src/lexical/float.rs":"fe356213c92a049f4bef2f58bc0e3a26866ca06b8c1d74d0f961c5b883852cad","src/lexical/large_powers.rs":"34537f5c701afce1ec2a1fd3c14950381b2e27c9ad74f002c91f3708e8da9ca5","src/lexical/large_powers32.rs":"d533037c6141e6671102aee490c9cdeaba81e667ddca781b2b99db2c455e4a1a","src/lexical/large_powers64.rs":"745dd7c0cbe499eec027ef586248881011d9df20c7efab7929c1807b59886ba1","src/lexical/math.rs":"27e22b724cdf990cdacd0ccfc3749e6e2eb7529d43ebf6e95b1999560b9e199a","src/lexical/mod.rs":"4b4c5228779c0f135a4cb018700e3bcd495da48b74421a86f6b8b304acdef924","src/lexical/num.rs":"cf705c62612e31d704f43d94a633ea1243c6befad7ef5792e2e881a7fd21e809","src/lexical/parse.rs":"c2bfac4c70a19938ced61e991f4ec606764887cf12bac1a0978b5b5318a56aac","src/lexical/rounding.rs":"697207248ba17b7f4965aedb11d276261ada5b06d9c6265d8fd6246664ff6e3c","src/lexical/shift.rs":"bc1ed053dd63d45ac9c35302f18de9f00d94027f28af4ab749c9248439de832a","src/lexical/small_powers.rs":"4608dd218b8002435db7e1ec79d2d0fef5f47ae257b93353326d52ecc80cccda","src/lib.rs":"61e0660b777318b649ada8e40583e668b80796b9a1b2013f77d0bdac3f8ec0eb","src/macros.rs":"c9f23156faec8d5216d72b6a97eebd768efb3f75870a6e2beed824308587b998","src/map.rs":"14fa16650b462ef138bebe1d18cb296b0e1ff404f12f2c212f72ed7c969b3a12","src/number.rs":"425f528c5ec2fcb4dd3c87a633d0b2cd505f7a305a40dffa1f022a643eda938b","src/raw.rs":"ee2b4ed085d8488e72c5a19791aae9de569d6c1381e9e64ff71afa03d5cf902a","src/read.rs":"49b4b1d067b6485cbded28fb961666ab5df82c36661af722dbae756efb6b2891","src/ser.rs":"566ae0b1860861ad70efd17fb0f6ae326e76d60453b6538c783a40d4ce616851","src/value/de.rs":"78f938d960e285f671f3b86ed173d598a815690a14512d6daa94dee43d3ce4cf","src/value/from.rs":"2dcfed837b040447a61eae50bd3938106b090f8a292206aea686022767006625","src/value/index.rs":"8a99d8d50f5674181ced22f6e81dc529eaecb01e543e30346e51fe42cb4b8a5f","src/value/mod.rs":"47ff472a2426a135c7acdf59c4c83c7b7ce986269f10ec31c809a2b35152beda","src/value/partial_eq.rs":"95de799d57f7f4310b64a9488c0a7286dee76dba4329cb69a96298a887e58586","src/value/ser.rs":"5f8eeff57e283527e1aabd89709ef8d6e006682be5929cbdb1d7d182fe185166","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/debug.rs":"a8451217c1e127ad6e653ef11e0513525ee350e1e37dd575758a8ee9301b28fb","tests/lexical.rs":"383e06283e15e1db7c6b818da3c84a3afa0059f6e9fc994b069919d81346ab91","tests/lexical/algorithm.rs":"da378df9ee24bfa033968d5c94e91b58e52c39bf6c825dec51c3eb7250cc5874","tests/lexical/exponent.rs":"26ea92abc654a6a88a8281552bca2f76ea1fa4c17d66a1dd6defe14f7d89b666","tests/lexical/float.rs":"0440f2d85c993bcccd925096d7f4136bf624ffd66b3c7ee565d158390685eb11","tests/lexical/math.rs":"4874be2103be5fbe8b8015354414df271ffa00fd815546fc077f15fb4d7a5a37","tests/lexical/num.rs":"6e650c40de85ed72ac06b6bf1487ba161f3824e26d827df6cfdf2bbdb8d05a05","tests/lexical/parse.rs":"17c73e0a59d462716d974b8dd23a291eb6efdc3a933248874e5eab7e7209d67f","tests/lexical/rounding.rs":"6c56e39ba534616c1b2146e8efa6eb57aed322e683bf23183cd32a61fae6447e","tests/macros/mod.rs":"93aa1d54af20bc2c55b6ae8db73c1414cda2626eb9fa7bd57b9d613a3c6e6a19","tests/map.rs":"dcc5212242e4e93703c4335d54f5603b0211b33d6fb5ab410bb630cda6d46b09","tests/regression.rs":"86731134bfb9bb693d9a4fc62393027de80a8bf031109ea6c7ea475b1ebdde8d","tests/regression/issue520.rs":"d146be3472db902b48127d65fe83aa9f698143aca9074c83cd1a9d5dd28e3ec3","tests/regression/issue795.rs":"582e2e7c68113f05a4b1d2cb556a2df7cc77f2ce8164a32c5cc58ae68abb60ec","tests/regression/issue845.rs":"8bd64588fc344e119d0e9e5e7604236e7c168c574b0692033f15278e216a6b9c","tests/regression/issue953.rs":"b2cddc761f5ca6639900c173765a8a5868528a896924e5e925db2696469208f7","tests/stream.rs":"c7d91014538ecd8f495b196d40e999ab2745f2e69fa2ff9e52521605dc6ce856","tests/test.rs":"cde57e6a0b2f23ca8b4ef83673955a0ba85da04db7875f47d37cc23773f32d28","tests/ui/missing_colon.rs":"d07e0c34d98eb43465f0a0310f2c0b5d5b0d26d243b352a1c6bbe6ad3b27eda9","tests/ui/missing_colon.stderr":"3732fd8f4e57b84efc07170cda5f9c5b2b17c707e23c1659222b5a46f652a8d8","tests/ui/missing_comma.rs":"b8a9662f99c3e6dd2b6417892c37640578ce91d3a8365bf10c1f686a3227aa87","tests/ui/missing_comma.stderr":"eae626cf93c97abd105066e624ca4e8cb096784413b9d2564cf9414a8492bc4d","tests/ui/missing_value.rs":"bca25d67127fb88e7c191c7b03af5a4ce8a9abb630f3d2e6a6c1e77e213dc9a4","tests/ui/missing_value.stderr":"b0df8add5cf74e5df30eedd3ca347e4862c04a01c54d802ff45392f2032065b1","tests/ui/not_found.rs":"d0a7adb309879ff65aee115b52cc33d36f4bad353cf97c4effc34a6128c2bee3","tests/ui/not_found.stderr":"359b751c0c21fab6d460daef4d5f73a265f7769c9b578f98ea3cb6cbf2387643","tests/ui/parse_expr.rs":"32e6d51f528db3d1ab0ed1e24765b865be393565c26f77413c5aa39d601ac563","tests/ui/parse_expr.stderr":"4fcd0a014fbce31c9266bab8527d6e6b6806a0e21d9e0275ce713137856073ce","tests/ui/parse_key.rs":"18829b2af320d5cf8a0a5cd3aaf84c7e92cc874651c30e45a3acafb76c2d8b93","tests/ui/parse_key.stderr":"fcb44e060b804a4762b7291e128c41d7010ffa8ab820b8828fd13fbe6d405ca6","tests/ui/unexpected_after_array_element.rs":"a343fc3104431720bdfcf330bcc3cfcd98c8dec3e951133b495242478b0b7eb3","tests/ui/unexpected_after_array_element.stderr":"8df615998fa3057bb9ed865981a35cdbb771625337048f0ad3fba7734e607adf","tests/ui/unexpected_after_map_entry.rs":"6e3bd2def435ca610e346bbc75cdbaf61963eb2ef1885bb5f76781ba1fac37ef","tests/ui/unexpected_after_map_entry.stderr":"b1985c89075ab48b2158bd1705ed766d37854b3d4620ab257cc8bc319d224f17","tests/ui/unexpected_colon.rs":"a313cff3fed4be4c33f1eda5d0c5c98147fb835a56d36470d9f367352c1d61ef","tests/ui/unexpected_colon.stderr":"b2288742fa6a4a7eb65d2ae899bcfed8795b57bd04958da227d60928a8df26c5","tests/ui/unexpected_comma.rs":"55a8b684bde1ce905837cce719fd457d8898b61cebc27e5b420d05cb6be97256","tests/ui/unexpected_comma.stderr":"4c103ca63ff15e2ca659242cc0eae0612bf050e7580da62f1cf50de8082aa7dc"},"package":"877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"} \ No newline at end of file
diff --git a/vendor/serde_json/Cargo.toml b/vendor/serde_json/Cargo.toml
index 1e1aebb8e..08c94aacb 100644
--- a/vendor/serde_json/Cargo.toml
+++ b/vendor/serde_json/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.36"
name = "serde_json"
-version = "1.0.86"
+version = "1.0.91"
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
"David Tolnay <dtolnay@gmail.com>",
@@ -48,6 +48,9 @@ rustdoc-args = [
[package.metadata.playground]
features = ["raw_value"]
+[lib]
+doc-scrape-examples = false
+
[dependencies.indexmap]
version = "1.5.2"
features = ["std"]
diff --git a/vendor/serde_json/README.md b/vendor/serde_json/README.md
index df58a47e4..50b5d458d 100644
--- a/vendor/serde_json/README.md
+++ b/vendor/serde_json/README.md
@@ -1,7 +1,7 @@
-# Serde JSON &emsp; [![Build Status]][travis] [![Latest Version]][crates.io] [![Rustc Version 1.36+]][rustc]
+# Serde JSON &emsp; [![Build Status]][actions] [![Latest Version]][crates.io] [![Rustc Version 1.36+]][rustc]
-[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/json/CI/master
-[travis]: https://github.com/serde-rs/json/actions?query=branch%3Amaster
+[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/json/ci.yml?branch=master
+[actions]: https://github.com/serde-rs/json/actions?query=branch%3Amaster
[Latest Version]: https://img.shields.io/crates/v/serde_json.svg
[crates.io]: https://crates.io/crates/serde\_json
[Rustc Version 1.36+]: https://img.shields.io/badge/rustc-1.36+-lightgray.svg
@@ -350,8 +350,8 @@ closed without a response after some time.
## No-std support
As long as there is a memory allocator, it is possible to use serde_json without
-the rest of the Rust standard library. This is supported on Rust 1.36+. Disable
-the default "std" feature and enable the "alloc" feature:
+the rest of the Rust standard library. Disable the default "std" feature and
+enable the "alloc" feature:
```toml
[dependencies]
diff --git a/vendor/serde_json/build.rs b/vendor/serde_json/build.rs
index e9ec7d56a..0e12602e4 100644
--- a/vendor/serde_json/build.rs
+++ b/vendor/serde_json/build.rs
@@ -3,6 +3,8 @@ use std::process::Command;
use std::str::{self, FromStr};
fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+
// Decide ideal limb width for arithmetic in the float parser. Refer to
// src/lexical/math.rs for where this has an effect.
let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
diff --git a/vendor/serde_json/src/de.rs b/vendor/serde_json/src/de.rs
index ffd0d48c2..88d0f2624 100644
--- a/vendor/serde_json/src/de.rs
+++ b/vendor/serde_json/src/de.rs
@@ -14,7 +14,7 @@ use core::marker::PhantomData;
use core::result;
use core::str::FromStr;
use serde::de::{self, Expected, Unexpected};
-use serde::{forward_to_deserialize_any, serde_if_integer128};
+use serde::forward_to_deserialize_any;
#[cfg(feature = "arbitrary_precision")]
use crate::number::NumberDeserializer;
@@ -335,31 +335,25 @@ impl<'de, R: Read<'de>> Deserializer<R> {
}
}
- serde_if_integer128! {
- fn scan_integer128(&mut self, buf: &mut String) -> Result<()> {
- match tri!(self.next_char_or_null()) {
- b'0' => {
- buf.push('0');
- // There can be only one leading '0'.
- match tri!(self.peek_or_null()) {
- b'0'..=b'9' => {
- Err(self.peek_error(ErrorCode::InvalidNumber))
- }
- _ => Ok(()),
- }
+ fn scan_integer128(&mut self, buf: &mut String) -> Result<()> {
+ match tri!(self.next_char_or_null()) {
+ b'0' => {
+ buf.push('0');
+ // There can be only one leading '0'.
+ match tri!(self.peek_or_null()) {
+ b'0'..=b'9' => Err(self.peek_error(ErrorCode::InvalidNumber)),
+ _ => Ok(()),
}
- c @ b'1'..=b'9' => {
+ }
+ c @ b'1'..=b'9' => {
+ buf.push(c as char);
+ while let c @ b'0'..=b'9' = tri!(self.peek_or_null()) {
+ self.eat_char();
buf.push(c as char);
- while let c @ b'0'..=b'9' = tri!(self.peek_or_null()) {
- self.eat_char();
- buf.push(c as char);
- }
- Ok(())
- }
- _ => {
- Err(self.error(ErrorCode::InvalidNumber))
}
+ Ok(())
}
+ _ => Err(self.error(ErrorCode::InvalidNumber)),
}
}
@@ -457,30 +451,33 @@ impl<'de, R: Read<'de>> Deserializer<R> {
&mut self,
positive: bool,
mut significand: u64,
- mut exponent: i32,
+ exponent_before_decimal_point: i32,
) -> Result<f64> {
self.eat_char();
+ let mut exponent_after_decimal_point = 0;
while let c @ b'0'..=b'9' = tri!(self.peek_or_null()) {
let digit = (c - b'0') as u64;
if overflow!(significand * 10 + digit, u64::max_value()) {
+ let exponent = exponent_before_decimal_point + exponent_after_decimal_point;
return self.parse_decimal_overflow(positive, significand, exponent);
}
self.eat_char();
significand = significand * 10 + digit;
- exponent -= 1;
+ exponent_after_decimal_point -= 1;
}
// Error if there is not at least one digit after the decimal point.
- if exponent == 0 {
+ if exponent_after_decimal_point == 0 {
match tri!(self.peek()) {
Some(_) => return Err(self.peek_error(ErrorCode::InvalidNumber)),
None => return Err(self.peek_error(ErrorCode::EofWhileParsingValue)),
}
}
+ let exponent = exponent_before_decimal_point + exponent_after_decimal_point;
match tri!(self.peek_or_null()) {
b'e' | b'E' => self.parse_exponent(positive, significand, exponent),
_ => self.f64_from_parts(positive, significand, exponent),
@@ -1437,67 +1434,65 @@ impl<'de, 'a, R: Read<'de>> de::Deserializer<'de> for &'a mut Deserializer<R> {
val
}
- serde_if_integer128! {
- fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value>
- where
- V: de::Visitor<'de>,
- {
- let mut buf = String::new();
-
- match tri!(self.parse_whitespace()) {
- Some(b'-') => {
- self.eat_char();
- buf.push('-');
- }
- Some(_) => {}
- None => {
- return Err(self.peek_error(ErrorCode::EofWhileParsingValue));
- }
- };
+ fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: de::Visitor<'de>,
+ {
+ let mut buf = String::new();
- tri!(self.scan_integer128(&mut buf));
+ match tri!(self.parse_whitespace()) {
+ Some(b'-') => {
+ self.eat_char();
+ buf.push('-');
+ }
+ Some(_) => {}
+ None => {
+ return Err(self.peek_error(ErrorCode::EofWhileParsingValue));
+ }
+ };
- let value = match buf.parse() {
- Ok(int) => visitor.visit_i128(int),
- Err(_) => {
- return Err(self.error(ErrorCode::NumberOutOfRange));
- }
- };
+ tri!(self.scan_integer128(&mut buf));
- match value {
- Ok(value) => Ok(value),
- Err(err) => Err(self.fix_position(err)),
+ let value = match buf.parse() {
+ Ok(int) => visitor.visit_i128(int),
+ Err(_) => {
+ return Err(self.error(ErrorCode::NumberOutOfRange));
}
+ };
+
+ match value {
+ Ok(value) => Ok(value),
+ Err(err) => Err(self.fix_position(err)),
}
+ }
- fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value>
- where
- V: de::Visitor<'de>,
- {
- match tri!(self.parse_whitespace()) {
- Some(b'-') => {
- return Err(self.peek_error(ErrorCode::NumberOutOfRange));
- }
- Some(_) => {}
- None => {
- return Err(self.peek_error(ErrorCode::EofWhileParsingValue));
- }
+ fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: de::Visitor<'de>,
+ {
+ match tri!(self.parse_whitespace()) {
+ Some(b'-') => {
+ return Err(self.peek_error(ErrorCode::NumberOutOfRange));
}
+ Some(_) => {}
+ None => {
+ return Err(self.peek_error(ErrorCode::EofWhileParsingValue));
+ }
+ }
- let mut buf = String::new();
- tri!(self.scan_integer128(&mut buf));
-
- let value = match buf.parse() {
- Ok(int) => visitor.visit_u128(int),
- Err(_) => {
- return Err(self.error(ErrorCode::NumberOutOfRange));
- }
- };
+ let mut buf = String::new();
+ tri!(self.scan_integer128(&mut buf));
- match value {
- Ok(value) => Ok(value),
- Err(err) => Err(self.fix_position(err)),
+ let value = match buf.parse() {
+ Ok(int) => visitor.visit_u128(int),
+ Err(_) => {
+ return Err(self.error(ErrorCode::NumberOutOfRange));
}
+ };
+
+ match value {
+ Ok(value) => Ok(value),
+ Err(err) => Err(self.fix_position(err)),
}
}
@@ -2164,15 +2159,12 @@ where
deserialize_integer_key!(deserialize_i16 => visit_i16);
deserialize_integer_key!(deserialize_i32 => visit_i32);
deserialize_integer_key!(deserialize_i64 => visit_i64);
+ deserialize_integer_key!(deserialize_i128 => visit_i128);
deserialize_integer_key!(deserialize_u8 => visit_u8);
deserialize_integer_key!(deserialize_u16 => visit_u16);
deserialize_integer_key!(deserialize_u32 => visit_u32);
deserialize_integer_key!(deserialize_u64 => visit_u64);
-
- serde_if_integer128! {
- deserialize_integer_key!(deserialize_i128 => visit_i128);
- deserialize_integer_key!(deserialize_u128 => visit_u128);
- }
+ deserialize_integer_key!(deserialize_u128 => visit_u128);
#[inline]
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value>
diff --git a/vendor/serde_json/src/lib.rs b/vendor/serde_json/src/lib.rs
index 3a1983c3f..fdd95a121 100644
--- a/vendor/serde_json/src/lib.rs
+++ b/vendor/serde_json/src/lib.rs
@@ -279,8 +279,8 @@
//! # No-std support
//!
//! As long as there is a memory allocator, it is possible to use serde_json
-//! without the rest of the Rust standard library. This is supported on Rust
-//! 1.36+. Disable the default "std" feature and enable the "alloc" feature:
+//! without the rest of the Rust standard library. Disable the default "std"
+//! feature and enable the "alloc" feature:
//!
//! ```toml
//! [dependencies]
@@ -300,7 +300,7 @@
//! [macro]: https://docs.serde.rs/serde_json/macro.json.html
//! [`serde-json-core`]: https://github.com/rust-embedded-community/serde-json-core
-#![doc(html_root_url = "https://docs.rs/serde_json/1.0.86")]
+#![doc(html_root_url = "https://docs.rs/serde_json/1.0.91")]
// Ignored clippy lints
#![allow(
clippy::collapsible_else_if,
diff --git a/vendor/serde_json/src/map.rs b/vendor/serde_json/src/map.rs
index 87cf54566..3e8a3814c 100644
--- a/vendor/serde_json/src/map.rs
+++ b/vendor/serde_json/src/map.rs
@@ -197,9 +197,8 @@ impl Map<String, Value> {
#[inline]
pub fn append(&mut self, other: &mut Self) {
#[cfg(feature = "preserve_order")]
- for (k, v) in mem::replace(&mut other.map, MapImpl::default()) {
- self.map.insert(k, v);
- }
+ self.map
+ .extend(mem::replace(&mut other.map, MapImpl::default()));
#[cfg(not(feature = "preserve_order"))]
self.map.append(&mut other.map);
}
@@ -304,6 +303,11 @@ impl Clone for Map<String, Value> {
map: self.map.clone(),
}
}
+
+ #[inline]
+ fn clone_from(&mut self, source: &Self) {
+ self.map.clone_from(&source.map);
+ }
}
impl PartialEq for Map<String, Value> {
diff --git a/vendor/serde_json/src/number.rs b/vendor/serde_json/src/number.rs
index 3c8f6f1bf..21a76411c 100644
--- a/vendor/serde_json/src/number.rs
+++ b/vendor/serde_json/src/number.rs
@@ -12,10 +12,7 @@ use core::hash::{Hash, Hasher};
use serde::de::{self, Unexpected, Visitor};
#[cfg(feature = "arbitrary_precision")]
use serde::de::{IntoDeserializer, MapAccess};
-use serde::{
- forward_to_deserialize_any, serde_if_integer128, Deserialize, Deserializer, Serialize,
- Serializer,
-};
+use serde::{forward_to_deserialize_any, Deserialize, Deserializer, Serialize, Serializer};
#[cfg(feature = "arbitrary_precision")]
pub(crate) const TOKEN: &str = "$serde_json::private::Number";
@@ -540,18 +537,15 @@ impl<'de> Deserializer<'de> for Number {
deserialize_number!(deserialize_i16 => visit_i16);
deserialize_number!(deserialize_i32 => visit_i32);
deserialize_number!(deserialize_i64 => visit_i64);
+ deserialize_number!(deserialize_i128 => visit_i128);
deserialize_number!(deserialize_u8 => visit_u8);
deserialize_number!(deserialize_u16 => visit_u16);
deserialize_number!(deserialize_u32 => visit_u32);
deserialize_number!(deserialize_u64 => visit_u64);
+ deserialize_number!(deserialize_u128 => visit_u128);
deserialize_number!(deserialize_f32 => visit_f32);
deserialize_number!(deserialize_f64 => visit_f64);
- serde_if_integer128! {
- deserialize_number!(deserialize_i128 => visit_i128);
- deserialize_number!(deserialize_u128 => visit_u128);
- }
-
forward_to_deserialize_any! {
bool char str string bytes byte_buf option unit unit_struct
newtype_struct seq tuple tuple_struct map struct enum identifier
@@ -568,18 +562,15 @@ impl<'de, 'a> Deserializer<'de> for &'a Number {
deserialize_number!(deserialize_i16 => visit_i16);
deserialize_number!(deserialize_i32 => visit_i32);
deserialize_number!(deserialize_i64 => visit_i64);
+ deserialize_number!(deserialize_i128 => visit_i128);
deserialize_number!(deserialize_u8 => visit_u8);
deserialize_number!(deserialize_u16 => visit_u16);
deserialize_number!(deserialize_u32 => visit_u32);
deserialize_number!(deserialize_u64 => visit_u64);
+ deserialize_number!(deserialize_u128 => visit_u128);
deserialize_number!(deserialize_f32 => visit_f32);
deserialize_number!(deserialize_f64 => visit_f64);
- serde_if_integer128! {
- deserialize_number!(deserialize_i128 => visit_i128);
- deserialize_number!(deserialize_u128 => visit_u128);
- }
-
forward_to_deserialize_any! {
bool char str string bytes byte_buf option unit unit_struct
newtype_struct seq tuple tuple_struct map struct enum identifier
@@ -731,21 +722,9 @@ impl_from_unsigned!(u8, u16, u32, u64, usize);
impl_from_signed!(i8, i16, i32, i64, isize);
#[cfg(feature = "arbitrary_precision")]
-serde_if_integer128! {
- impl From<i128> for Number {
- fn from(i: i128) -> Self {
- let n = itoa::Buffer::new().format(i).to_owned();
- Number { n }
- }
- }
-
- impl From<u128> for Number {
- fn from(u: u128) -> Self {
- let n = itoa::Buffer::new().format(u).to_owned();
- Number { n }
- }
- }
-}
+impl_from_unsigned!(u128);
+#[cfg(feature = "arbitrary_precision")]
+impl_from_signed!(i128);
impl Number {
#[cfg(not(feature = "arbitrary_precision"))]
diff --git a/vendor/serde_json/src/ser.rs b/vendor/serde_json/src/ser.rs
index 64cb00e1a..80c2deb0c 100644
--- a/vendor/serde_json/src/ser.rs
+++ b/vendor/serde_json/src/ser.rs
@@ -7,7 +7,6 @@ use alloc::vec::Vec;
use core::fmt::{self, Display};
use core::num::FpCategory;
use serde::ser::{self, Impossible, Serialize};
-use serde::serde_if_integer128;
/// A structure for serializing Rust values into JSON.
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
@@ -75,137 +74,105 @@ where
#[inline]
fn serialize_bool(self, value: bool) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_bool(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_i8(self, value: i8) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_i8(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_i16(self, value: i16) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_i16(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_i32(self, value: i32) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_i32(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_i64(self, value: i64) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_i64(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
- serde_if_integer128! {
- fn serialize_i128(self, value: i128) -> Result<()> {
- self.formatter
- .write_number_str(&mut self.writer, &value.to_string())
- .map_err(Error::io)
- }
+ fn serialize_i128(self, value: i128) -> Result<()> {
+ self.formatter
+ .write_i128(&mut self.writer, value)
+ .map_err(Error::io)
}
#[inline]
fn serialize_u8(self, value: u8) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_u8(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_u16(self, value: u16) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_u16(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_u32(self, value: u32) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_u32(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
fn serialize_u64(self, value: u64) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_u64(&mut self.writer, value)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
- serde_if_integer128! {
- fn serialize_u128(self, value: u128) -> Result<()> {
- self.formatter
- .write_number_str(&mut self.writer, &value.to_string())
- .map_err(Error::io)
- }
+ fn serialize_u128(self, value: u128) -> Result<()> {
+ self.formatter
+ .write_u128(&mut self.writer, value)
+ .map_err(Error::io)
}
#[inline]
fn serialize_f32(self, value: f32) -> Result<()> {
match value.classify() {
- FpCategory::Nan | FpCategory::Infinite => {
- tri!(self
- .formatter
- .write_null(&mut self.writer)
- .map_err(Error::io));
- }
- _ => {
- tri!(self
- .formatter
- .write_f32(&mut self.writer, value)
- .map_err(Error::io));
- }
+ FpCategory::Nan | FpCategory::Infinite => self
+ .formatter
+ .write_null(&mut self.writer)
+ .map_err(Error::io),
+ _ => self
+ .formatter
+ .write_f32(&mut self.writer, value)
+ .map_err(Error::io),
}
- Ok(())
}
#[inline]
fn serialize_f64(self, value: f64) -> Result<()> {
match value.classify() {
- FpCategory::Nan | FpCategory::Infinite => {
- tri!(self
- .formatter
- .write_null(&mut self.writer)
- .map_err(Error::io));
- }
- _ => {
- tri!(self
- .formatter
- .write_f64(&mut self.writer, value)
- .map_err(Error::io));
- }
+ FpCategory::Nan | FpCategory::Infinite => self
+ .formatter
+ .write_null(&mut self.writer)
+ .map_err(Error::io),
+ _ => self
+ .formatter
+ .write_f64(&mut self.writer, value)
+ .map_err(Error::io),
}
- Ok(())
}
#[inline]
@@ -217,8 +184,7 @@ where
#[inline]
fn serialize_str(self, value: &str) -> Result<()> {
- tri!(format_escaped_str(&mut self.writer, &mut self.formatter, value).map_err(Error::io));
- Ok(())
+ format_escaped_str(&mut self.writer, &mut self.formatter, value).map_err(Error::io)
}
#[inline]
@@ -233,11 +199,9 @@ where
#[inline]
fn serialize_unit(self) -> Result<()> {
- tri!(self
- .formatter
+ self.formatter
.write_null(&mut self.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
@@ -297,11 +261,9 @@ where
.formatter
.end_object_value(&mut self.writer)
.map_err(Error::io));
- tri!(self
- .formatter
+ self.formatter
.end_object(&mut self.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[inline]
@@ -490,11 +452,9 @@ where
}
}
}
- tri!(self
- .formatter
+ self.formatter
.end_string(&mut self.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
}
@@ -541,11 +501,9 @@ where
.map_err(Error::io));
*state = State::Rest;
tri!(value.serialize(&mut **ser));
- tri!(ser
- .formatter
+ ser.formatter
.end_array_value(&mut ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
@@ -557,13 +515,10 @@ where
#[inline]
fn end(self) -> Result<()> {
match self {
- Compound::Map { ser, state } => {
- match state {
- State::Empty => {}
- _ => tri!(ser.formatter.end_array(&mut ser.writer).map_err(Error::io)),
- }
- Ok(())
- }
+ Compound::Map { ser, state } => match state {
+ State::Empty => Ok(()),
+ _ => ser.formatter.end_array(&mut ser.writer).map_err(Error::io),
+ },
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
#[cfg(feature = "raw_value")]
@@ -644,8 +599,7 @@ where
.formatter
.end_object_value(&mut ser.writer)
.map_err(Error::io));
- tri!(ser.formatter.end_object(&mut ser.writer).map_err(Error::io));
- Ok(())
+ ser.formatter.end_object(&mut ser.writer).map_err(Error::io)
}
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
@@ -678,11 +632,9 @@ where
tri!(key.serialize(MapKeySerializer { ser: *ser }));
- tri!(ser
- .formatter
+ ser.formatter
.end_object_key(&mut ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
@@ -703,11 +655,9 @@ where
.begin_object_value(&mut ser.writer)
.map_err(Error::io));
tri!(value.serialize(&mut **ser));
- tri!(ser
- .formatter
+ ser.formatter
.end_object_value(&mut ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
@@ -719,13 +669,10 @@ where
#[inline]
fn end(self) -> Result<()> {
match self {
- Compound::Map { ser, state } => {
- match state {
- State::Empty => {}
- _ => tri!(ser.formatter.end_object(&mut ser.writer).map_err(Error::io)),
- }
- Ok(())
- }
+ Compound::Map { ser, state } => match state {
+ State::Empty => Ok(()),
+ _ => ser.formatter.end_object(&mut ser.writer).map_err(Error::io),
+ },
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
#[cfg(feature = "raw_value")]
@@ -752,8 +699,7 @@ where
#[cfg(feature = "arbitrary_precision")]
Compound::Number { ser, .. } => {
if key == crate::number::TOKEN {
- tri!(value.serialize(NumberStrEmitter(ser)));
- Ok(())
+ value.serialize(NumberStrEmitter(ser))
} else {
Err(invalid_number())
}
@@ -761,8 +707,7 @@ where
#[cfg(feature = "raw_value")]
Compound::RawValue { ser, .. } => {
if key == crate::raw::TOKEN {
- tri!(value.serialize(RawValueStrEmitter(ser)));
- Ok(())
+ value.serialize(RawValueStrEmitter(ser))
} else {
Err(invalid_raw_value())
}
@@ -816,8 +761,7 @@ where
.formatter
.end_object_value(&mut ser.writer)
.map_err(Error::io));
- tri!(ser.formatter.end_object(&mut ser.writer).map_err(Error::io));
- Ok(())
+ ser.formatter.end_object(&mut ser.writer).map_err(Error::io)
}
#[cfg(feature = "arbitrary_precision")]
Compound::Number { .. } => unreachable!(),
@@ -899,12 +843,10 @@ where
.formatter
.write_i8(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_i16(self, value: i16) -> Result<()> {
@@ -918,12 +860,10 @@ where
.formatter
.write_i16(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_i32(self, value: i32) -> Result<()> {
@@ -937,12 +877,10 @@ where
.formatter
.write_i32(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_i64(self, value: i64) -> Result<()> {
@@ -956,33 +894,27 @@ where
.formatter
.write_i64(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
- serde_if_integer128! {
- fn serialize_i128(self, value: i128) -> Result<()> {
- tri!(self
- .ser
- .formatter
- .begin_string(&mut self.ser.writer)
- .map_err(Error::io));
- tri!(self
- .ser
- .formatter
- .write_number_str(&mut self.ser.writer, &value.to_string())
- .map_err(Error::io));
- tri!(self
- .ser
- .formatter
- .end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
- }
+ fn serialize_i128(self, value: i128) -> Result<()> {
+ tri!(self
+ .ser
+ .formatter
+ .begin_string(&mut self.ser.writer)
+ .map_err(Error::io));
+ tri!(self
+ .ser
+ .formatter
+ .write_i128(&mut self.ser.writer, value)
+ .map_err(Error::io));
+ self.ser
+ .formatter
+ .end_string(&mut self.ser.writer)
+ .map_err(Error::io)
}
fn serialize_u8(self, value: u8) -> Result<()> {
@@ -996,12 +928,10 @@ where
.formatter
.write_u8(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_u16(self, value: u16) -> Result<()> {
@@ -1015,12 +945,10 @@ where
.formatter
.write_u16(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_u32(self, value: u32) -> Result<()> {
@@ -1034,12 +962,10 @@ where
.formatter
.write_u32(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
fn serialize_u64(self, value: u64) -> Result<()> {
@@ -1053,33 +979,27 @@ where
.formatter
.write_u64(&mut self.ser.writer, value)
.map_err(Error::io));
- tri!(self
- .ser
+ self.ser
.formatter
.end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
+ .map_err(Error::io)
}
- serde_if_integer128! {
- fn serialize_u128(self, value: u128) -> Result<()> {
- tri!(self
- .ser
- .formatter
- .begin_string(&mut self.ser.writer)
- .map_err(Error::io));
- tri!(self
- .ser
- .formatter
- .write_number_str(&mut self.ser.writer, &value.to_string())
- .map_err(Error::io));
- tri!(self
- .ser
- .formatter
- .end_string(&mut self.ser.writer)
- .map_err(Error::io));
- Ok(())
- }
+ fn serialize_u128(self, value: u128) -> Result<()> {
+ tri!(self
+ .ser
+ .formatter
+ .begin_string(&mut self.ser.writer)
+ .map_err(Error::io));
+ tri!(self
+ .ser
+ .formatter
+ .write_u128(&mut self.ser.writer, value)
+ .map_err(Error::io));
+ self.ser
+ .formatter
+ .end_string(&mut self.ser.writer)
+ .map_err(Error::io)
}
fn serialize_f32(self, _value: f32) -> Result<()> {
@@ -1218,10 +1138,8 @@ impl<'a, W: io::Write, F: Formatter> ser::Serializer for NumberStrEmitter<'a, W,
Err(invalid_number())
}
- serde_if_integer128! {
- fn serialize_i128(self, _v: i128) -> Result<()> {
- Err(invalid_number())
- }
+ fn serialize_i128(self, _v: i128) -> Result<()> {
+ Err(invalid_number())
}
fn serialize_u8(self, _v: u8) -> Result<()> {
@@ -1240,10 +1158,8 @@ impl<'a, W: io::Write, F: Formatter> ser::Serializer for NumberStrEmitter<'a, W,
Err(invalid_number())
}
- serde_if_integer128! {
- fn serialize_u128(self, _v: u128) -> Result<()> {
- Err(invalid_number())
- }
+ fn serialize_u128(self, _v: u128) -> Result<()> {
+ Err(invalid_number())
}
fn serialize_f32(self, _v: f32) -> Result<()> {
@@ -1399,10 +1315,8 @@ impl<'a, W: io::Write, F: Formatter> ser::Serializer for RawValueStrEmitter<'a,
Err(ser::Error::custom("expected RawValue"))
}
- serde_if_integer128! {
- fn serialize_i128(self, _v: i128) -> Result<()> {
- Err(ser::Error::custom("expected RawValue"))
- }
+ fn serialize_i128(self, _v: i128) -> Result<()> {
+ Err(ser::Error::custom("expected RawValue"))
}
fn serialize_u8(self, _v: u8) -> Result<()> {
@@ -1421,10 +1335,8 @@ impl<'a, W: io::Write, F: Formatter> ser::Serializer for RawValueStrEmitter<'a,
Err(ser::Error::custom("expected RawValue"))
}
- serde_if_integer128! {
- fn serialize_u128(self, _v: u128) -> Result<()> {
- Err(ser::Error::custom("expected RawValue"))
- }
+ fn serialize_u128(self, _v: u128) -> Result<()> {
+ Err(ser::Error::custom("expected RawValue"))
}
fn serialize_f32(self, _v: f32) -> Result<()> {
@@ -1661,6 +1573,17 @@ pub trait Formatter {
writer.write_all(s.as_bytes())
}
+ /// Writes an integer value like `-123` to the specified writer.
+ #[inline]
+ fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
+ where
+ W: ?Sized + io::Write,
+ {
+ let mut buffer = itoa::Buffer::new();
+ let s = buffer.format(value);
+ writer.write_all(s.as_bytes())
+ }
+
/// Writes an integer value like `123` to the specified writer.
#[inline]
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
@@ -1705,6 +1628,17 @@ pub trait Formatter {
writer.write_all(s.as_bytes())
}
+ /// Writes an integer value like `123` to the specified writer.
+ #[inline]
+ fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
+ where
+ W: ?Sized + io::Write,
+ {
+ let mut buffer = itoa::Buffer::new();
+ let s = buffer.format(value);
+ writer.write_all(s.as_bytes())
+ }
+
/// Writes a floating point value like `-31.26e+12` to the specified writer.
#[inline]
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
@@ -1985,13 +1919,8 @@ impl<'a> Formatter for PrettyFormatter<'a> {
where
W: ?Sized + io::Write,
{
- if first {
- tri!(writer.write_all(b"\n"));
- } else {
- tri!(writer.write_all(b",\n"));
- }
- tri!(indent(writer, self.current_indent, self.indent));
- Ok(())
+ tri!(writer.write_all(if first { b"\n" } else { b",\n" }));
+ indent(writer, self.current_indent, self.indent)
}
#[inline]
@@ -2033,11 +1962,7 @@ impl<'a> Formatter for PrettyFormatter<'a> {
where
W: ?Sized + io::Write,
{
- if first {
- tri!(writer.write_all(b"\n"));
- } else {
- tri!(writer.write_all(b",\n"));
- }
+ tri!(writer.write_all(if first { b"\n" } else { b",\n" }));
indent(writer, self.current_indent, self.indent)
}
@@ -2066,8 +1991,7 @@ where
{
tri!(formatter.begin_string(writer));
tri!(format_escaped_str_contents(writer, formatter, value));
- tri!(formatter.end_string(writer));
- Ok(())
+ formatter.end_string(writer)
}
fn format_escaped_str_contents<W, F>(
@@ -2099,11 +2023,11 @@ where
start = i + 1;
}
- if start != bytes.len() {
- tri!(formatter.write_string_fragment(writer, &value[start..]));
+ if start == bytes.len() {
+ return Ok(());
}
- Ok(())
+ formatter.write_string_fragment(writer, &value[start..])
}
const BB: u8 = b'b'; // \x08
@@ -2152,8 +2076,7 @@ where
T: ?Sized + Serialize,
{
let mut ser = Serializer::new(writer);
- tri!(value.serialize(&mut ser));
- Ok(())
+ value.serialize(&mut ser)
}
/// Serialize the given data structure as pretty-printed JSON into the IO
@@ -2171,8 +2094,7 @@ where
T: ?Sized + Serialize,
{
let mut ser = Serializer::pretty(writer);
- tri!(value.serialize(&mut ser));
- Ok(())
+ value.serialize(&mut ser)
}
/// Serialize the given data structure as a JSON byte vector.
diff --git a/vendor/serde_json/src/value/de.rs b/vendor/serde_json/src/value/de.rs
index cc1d38565..9c266d08a 100644
--- a/vendor/serde_json/src/value/de.rs
+++ b/vendor/serde_json/src/value/de.rs
@@ -14,7 +14,7 @@ use serde::de::{
self, Deserialize, DeserializeSeed, EnumAccess, Expected, IntoDeserializer, MapAccess,
SeqAccess, Unexpected, VariantAccess, Visitor,
};
-use serde::{forward_to_deserialize_any, serde_if_integer128};
+use serde::forward_to_deserialize_any;
#[cfg(feature = "arbitrary_precision")]
use crate::number::NumberFromString;
@@ -228,18 +228,15 @@ impl<'de> serde::Deserializer<'de> for Value {
deserialize_number!(deserialize_i16);
deserialize_number!(deserialize_i32);
deserialize_number!(deserialize_i64);
+ deserialize_number!(deserialize_i128);
deserialize_number!(deserialize_u8);
deserialize_number!(deserialize_u16);
deserialize_number!(deserialize_u32);
deserialize_number!(deserialize_u64);
+ deserialize_number!(deserialize_u128);
deserialize_number!(deserialize_f32);
deserialize_number!(deserialize_f64);
- serde_if_integer128! {
- deserialize_number!(deserialize_i128);
- deserialize_number!(deserialize_u128);
- }
-
#[inline]
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
where
@@ -724,18 +721,15 @@ impl<'de> serde::Deserializer<'de> for &'de Value {
deserialize_value_ref_number!(deserialize_i16);
deserialize_value_ref_number!(deserialize_i32);
deserialize_value_ref_number!(deserialize_i64);
+ deserialize_number!(deserialize_i128);
deserialize_value_ref_number!(deserialize_u8);
deserialize_value_ref_number!(deserialize_u16);
deserialize_value_ref_number!(deserialize_u32);
deserialize_value_ref_number!(deserialize_u64);
+ deserialize_number!(deserialize_u128);
deserialize_value_ref_number!(deserialize_f32);
deserialize_value_ref_number!(deserialize_f64);
- serde_if_integer128! {
- deserialize_number!(deserialize_i128);
- deserialize_number!(deserialize_u128);
- }
-
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
where
V: Visitor<'de>,
@@ -1000,7 +994,7 @@ impl<'de> VariantAccess<'de> for VariantRefDeserializer<'de> {
V: Visitor<'de>,
{
match self.value {
- Some(&Value::Array(ref v)) => {
+ Some(Value::Array(v)) => {
if v.is_empty() {
visitor.visit_unit()
} else {
@@ -1027,7 +1021,7 @@ impl<'de> VariantAccess<'de> for VariantRefDeserializer<'de> {
V: Visitor<'de>,
{
match self.value {
- Some(&Value::Object(ref v)) => visit_object_ref(v, visitor),
+ Some(Value::Object(v)) => visit_object_ref(v, visitor),
Some(other) => Err(serde::de::Error::invalid_type(
other.unexpected(),
&"struct variant",
@@ -1156,15 +1150,12 @@ impl<'de> serde::Deserializer<'de> for MapKeyDeserializer<'de> {
deserialize_integer_key!(deserialize_i16 => visit_i16);
deserialize_integer_key!(deserialize_i32 => visit_i32);
deserialize_integer_key!(deserialize_i64 => visit_i64);
+ deserialize_integer_key!(deserialize_i128 => visit_i128);
deserialize_integer_key!(deserialize_u8 => visit_u8);
deserialize_integer_key!(deserialize_u16 => visit_u16);
deserialize_integer_key!(deserialize_u32 => visit_u32);
deserialize_integer_key!(deserialize_u64 => visit_u64);
-
- serde_if_integer128! {
- deserialize_integer_key!(deserialize_i128 => visit_i128);
- deserialize_integer_key!(deserialize_u128 => visit_u128);
- }
+ deserialize_integer_key!(deserialize_u128 => visit_u128);
#[inline]
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
diff --git a/vendor/serde_json/src/value/from.rs b/vendor/serde_json/src/value/from.rs
index 858a6e48a..c5a6a3960 100644
--- a/vendor/serde_json/src/value/from.rs
+++ b/vendor/serde_json/src/value/from.rs
@@ -6,9 +6,6 @@ use alloc::string::{String, ToString};
use alloc::vec::Vec;
use core::iter::FromIterator;
-#[cfg(feature = "arbitrary_precision")]
-use serde::serde_if_integer128;
-
macro_rules! from_integer {
($($ty:ident)*) => {
$(
@@ -27,10 +24,8 @@ from_integer! {
}
#[cfg(feature = "arbitrary_precision")]
-serde_if_integer128! {
- from_integer! {
- i128 u128
- }
+from_integer! {
+ i128 u128
}
impl From<f32> for Value {
diff --git a/vendor/serde_json/src/value/ser.rs b/vendor/serde_json/src/value/ser.rs
index 37e495f65..892a63d5f 100644
--- a/vendor/serde_json/src/value/ser.rs
+++ b/vendor/serde_json/src/value/ser.rs
@@ -9,9 +9,6 @@ use core::fmt::Display;
use core::result;
use serde::ser::{Impossible, Serialize};
-#[cfg(feature = "arbitrary_precision")]
-use serde::serde_if_integer128;
-
impl Serialize for Value {
#[inline]
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
@@ -96,10 +93,8 @@ impl serde::Serializer for Serializer {
}
#[cfg(feature = "arbitrary_precision")]
- serde_if_integer128! {
- fn serialize_i128(self, value: i128) -> Result<Value> {
- Ok(Value::Number(value.into()))
- }
+ fn serialize_i128(self, value: i128) -> Result<Value> {
+ Ok(Value::Number(value.into()))
}
#[inline]
@@ -123,10 +118,8 @@ impl serde::Serializer for Serializer {
}
#[cfg(feature = "arbitrary_precision")]
- serde_if_integer128! {
- fn serialize_u128(self, value: u128) -> Result<Value> {
- Ok(Value::Number(value.into()))
- }
+ fn serialize_u128(self, value: u128) -> Result<Value> {
+ Ok(Value::Number(value.into()))
}
#[inline]
diff --git a/vendor/serde_json/tests/regression/issue953.rs b/vendor/serde_json/tests/regression/issue953.rs
new file mode 100644
index 000000000..771aa5287
--- /dev/null
+++ b/vendor/serde_json/tests/regression/issue953.rs
@@ -0,0 +1,9 @@
+use serde_json::Value;
+
+#[test]
+fn test() {
+ let x1 = serde_json::from_str::<Value>("18446744073709551615.");
+ assert!(x1.is_err());
+ let x2 = serde_json::from_str::<Value>("18446744073709551616.");
+ assert!(x2.is_err());
+}
diff --git a/vendor/serde_json/tests/test.rs b/vendor/serde_json/tests/test.rs
index aa5b5caa0..c2050724b 100644
--- a/vendor/serde_json/tests/test.rs
+++ b/vendor/serde_json/tests/test.rs
@@ -6,7 +6,6 @@
clippy::excessive_precision,
clippy::float_cmp,
clippy::items_after_statements,
- clippy::let_underscore_drop,
clippy::shadow_unrelated,
clippy::too_many_lines,
clippy::unreadable_literal,
@@ -1928,7 +1927,7 @@ fn test_deny_float_key() {
// map with float key
let map = treemap!(Float => "x");
- assert!(serde_json::to_value(&map).is_err());
+ assert!(serde_json::to_value(map).is_err());
}
#[test]
diff --git a/vendor/serde_json/tests/ui/missing_colon.stderr b/vendor/serde_json/tests/ui/missing_colon.stderr
index 3cebc4fd3..1515211ad 100644
--- a/vendor/serde_json/tests/ui/missing_colon.stderr
+++ b/vendor/serde_json/tests/ui/missing_colon.stderr
@@ -4,4 +4,9 @@ error: unexpected end of macro invocation
4 | json!({ "a" });
| ^^^^^^^^^^^^^^ missing tokens in macro arguments
|
+note: while trying to match `@`
+ --> src/macros.rs
+ |
+ | (@array [$($elems:expr,)*]) => {
+ | ^
= note: this error originates in the macro `json_internal` which comes from the expansion of the macro `json` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/vendor/serde_json/tests/ui/missing_comma.stderr b/vendor/serde_json/tests/ui/missing_comma.stderr
index bd911d035..bafa0f891 100644
--- a/vendor/serde_json/tests/ui/missing_comma.stderr
+++ b/vendor/serde_json/tests/ui/missing_comma.stderr
@@ -5,3 +5,9 @@ error: no rules expected the token `"2"`
| -^^^ no rules expected this token in macro call
| |
| help: missing comma here
+ |
+note: while trying to match `,`
+ --> src/macros.rs
+ |
+ | ($e:expr , $($tt:tt)*) => {};
+ | ^
diff --git a/vendor/serde_json/tests/ui/missing_value.stderr b/vendor/serde_json/tests/ui/missing_value.stderr
index a1edbc37b..9c9de99ca 100644
--- a/vendor/serde_json/tests/ui/missing_value.stderr
+++ b/vendor/serde_json/tests/ui/missing_value.stderr
@@ -4,4 +4,9 @@ error: unexpected end of macro invocation
4 | json!({ "a" : });
| ^^^^^^^^^^^^^^^^ missing tokens in macro arguments
|
+note: while trying to match `@`
+ --> src/macros.rs
+ |
+ | (@array [$($elems:expr,)*]) => {
+ | ^
= note: this error originates in the macro `json_internal` which comes from the expansion of the macro `json` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/vendor/serde_json/tests/ui/parse_expr.stderr b/vendor/serde_json/tests/ui/parse_expr.stderr
index 6959673d9..cd3e1c94d 100644
--- a/vendor/serde_json/tests/ui/parse_expr.stderr
+++ b/vendor/serde_json/tests/ui/parse_expr.stderr
@@ -3,3 +3,9 @@ error: no rules expected the token `~`
|
4 | json!({ "a" : ~ });
| ^ no rules expected this token in macro call
+ |
+note: while trying to match meta-variable `$e:expr`
+ --> src/macros.rs
+ |
+ | ($e:expr , $($tt:tt)*) => {};
+ | ^^^^^^^
diff --git a/vendor/serde_json/tests/ui/unexpected_after_array_element.stderr b/vendor/serde_json/tests/ui/unexpected_after_array_element.stderr
index f745a212d..ef449f764 100644
--- a/vendor/serde_json/tests/ui/unexpected_after_array_element.stderr
+++ b/vendor/serde_json/tests/ui/unexpected_after_array_element.stderr
@@ -3,3 +3,5 @@ error: no rules expected the token `=>`
|
4 | json!([ true => ]);
| ^^ no rules expected this token in macro call
+ |
+ = note: while trying to match end of macro
diff --git a/vendor/serde_json/tests/ui/unexpected_after_map_entry.stderr b/vendor/serde_json/tests/ui/unexpected_after_map_entry.stderr
index a18c9b4cd..c62d90ba0 100644
--- a/vendor/serde_json/tests/ui/unexpected_after_map_entry.stderr
+++ b/vendor/serde_json/tests/ui/unexpected_after_map_entry.stderr
@@ -3,3 +3,5 @@ error: no rules expected the token `=>`
|
4 | json!({ "k": true => });
| ^^ no rules expected this token in macro call
+ |
+ = note: while trying to match end of macro
diff --git a/vendor/serde_json/tests/ui/unexpected_colon.stderr b/vendor/serde_json/tests/ui/unexpected_colon.stderr
index ed038f608..7e47726bc 100644
--- a/vendor/serde_json/tests/ui/unexpected_colon.stderr
+++ b/vendor/serde_json/tests/ui/unexpected_colon.stderr
@@ -3,3 +3,5 @@ error: no rules expected the token `:`
|
4 | json!({ : true });
| ^ no rules expected this token in macro call
+ |
+ = note: while trying to match end of macro
diff --git a/vendor/serde_json/tests/ui/unexpected_comma.stderr b/vendor/serde_json/tests/ui/unexpected_comma.stderr
index a4309c4e5..552f399a5 100644
--- a/vendor/serde_json/tests/ui/unexpected_comma.stderr
+++ b/vendor/serde_json/tests/ui/unexpected_comma.stderr
@@ -3,3 +3,5 @@ error: no rules expected the token `,`
|
4 | json!({ "a" , "b": true });
| ^ no rules expected this token in macro call
+ |
+ = note: while trying to match end of macro
diff --git a/vendor/serde_repr/.cargo-checksum.json b/vendor/serde_repr/.cargo-checksum.json
index fb30edebf..ac7ba6ea5 100644
--- a/vendor/serde_repr/.cargo-checksum.json
+++ b/vendor/serde_repr/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9c18626e7651c83a27fd830650fddfba5db26eecfb706efdf49a33b9dbd49376","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"390805207b02b9766faf9df282ab0a6ee03c64c97a5af07ad516d5c6e095a011","src/lib.rs":"7acb4fb05f93dd98a0f2c83de6da38ed68e882c7b7ea244aa607c1c5113f8509","src/parse.rs":"687cf1436d12c0c26b1c6d06f80e56b4ec0d63a38d71e1f175b981c0fdc10c68","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"a4ddb956ee9eafc0b50694075497a2712a347f1a682f9038e2dd654c6605af44","tests/ui/empty_enum.rs":"fe1166f2f92ee213d26a23e57572a99c65c163d446fd8d67e1520bab34f4b859","tests/ui/empty_enum.stderr":"2c8907fc146bb4dcdb926b72de4f823dae4c1c0d8de8a636c512539408f26cc7","tests/ui/missing_repr.rs":"b7ba9341111582cad52e761b82f14778c60352014c4265566e4d4d01ccdcb306","tests/ui/missing_repr.stderr":"dfcb65527963fb8962e1442e10d3c71e141f238b1732483e1739b528ae0eca43","tests/ui/multiple_others.rs":"569658516619719f21e5071873a37125d5390edb77558e4381401f03efda0c83","tests/ui/multiple_others.stderr":"a300f94d2a2049d6718866d17b5673201edc275164b9e8eba25a9ffcd71fd30f","tests/ui/non_unit_variant.rs":"0d8295ae08d882fc3ef4164857240038c5b0674ff0811da9b6ea7343e8bb955c","tests/ui/non_unit_variant.stderr":"18f2900e501b8c64b980445758ca1cb59594dc8d24a1a76abb08a48d8beb3f35","tests/ui/not_enum.rs":"ada7637821c924a6b99175363c820375991be60223f96ca553d304fde2721386","tests/ui/not_enum.stderr":"814525f9a1495225511a02ad4149c9371ea622230b8002564fe83cf52300c728"},"package":"1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"} \ No newline at end of file
+{"files":{"Cargo.toml":"eeae9b55dbaecf1fbe4505c7a202da3143336766d2f6bc6880776d3a724884f5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"eeaecee94e896222307c54e647a2ca546e3f7a778f5a37f9a33300c9a3260324","src/lib.rs":"7acb4fb05f93dd98a0f2c83de6da38ed68e882c7b7ea244aa607c1c5113f8509","src/parse.rs":"687cf1436d12c0c26b1c6d06f80e56b4ec0d63a38d71e1f175b981c0fdc10c68","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"a4ddb956ee9eafc0b50694075497a2712a347f1a682f9038e2dd654c6605af44","tests/ui/empty_enum.rs":"fe1166f2f92ee213d26a23e57572a99c65c163d446fd8d67e1520bab34f4b859","tests/ui/empty_enum.stderr":"2c8907fc146bb4dcdb926b72de4f823dae4c1c0d8de8a636c512539408f26cc7","tests/ui/missing_repr.rs":"b7ba9341111582cad52e761b82f14778c60352014c4265566e4d4d01ccdcb306","tests/ui/missing_repr.stderr":"dfcb65527963fb8962e1442e10d3c71e141f238b1732483e1739b528ae0eca43","tests/ui/multiple_others.rs":"569658516619719f21e5071873a37125d5390edb77558e4381401f03efda0c83","tests/ui/multiple_others.stderr":"a300f94d2a2049d6718866d17b5673201edc275164b9e8eba25a9ffcd71fd30f","tests/ui/non_unit_variant.rs":"0d8295ae08d882fc3ef4164857240038c5b0674ff0811da9b6ea7343e8bb955c","tests/ui/non_unit_variant.stderr":"18f2900e501b8c64b980445758ca1cb59594dc8d24a1a76abb08a48d8beb3f35","tests/ui/not_enum.rs":"ada7637821c924a6b99175363c820375991be60223f96ca553d304fde2721386","tests/ui/not_enum.stderr":"814525f9a1495225511a02ad4149c9371ea622230b8002564fe83cf52300c728"},"package":"9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e"} \ No newline at end of file
diff --git a/vendor/serde_repr/Cargo.toml b/vendor/serde_repr/Cargo.toml
index 6cbdafeae..d1a4916e0 100644
--- a/vendor/serde_repr/Cargo.toml
+++ b/vendor/serde_repr/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "serde_repr"
-version = "0.1.9"
+version = "0.1.10"
authors = ["David Tolnay <dtolnay@gmail.com>"]
description = "Derive Serialize and Deserialize that delegates to the underlying repr of a C-like enum."
documentation = "https://docs.rs/serde_repr"
diff --git a/vendor/serde_repr/README.md b/vendor/serde_repr/README.md
index 8c8dee1ea..a932449e5 100644
--- a/vendor/serde_repr/README.md
+++ b/vendor/serde_repr/README.md
@@ -4,7 +4,7 @@ Serde repr derive
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/serde--repr-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/serde-repr)
[<img alt="crates.io" src="https://img.shields.io/crates/v/serde_repr.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/serde_repr)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-serde__repr-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/serde_repr)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/serde-repr/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/serde-repr/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/serde-repr/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/serde-repr/actions?query=branch%3Amaster)
This crate provides a derive macro to derive Serde's `Serialize` and
`Deserialize` traits in a way that delegates to the underlying repr of a C-like
diff --git a/vendor/snap/.cargo-checksum.json b/vendor/snap/.cargo-checksum.json
index 6fc40889e..ca9b28aad 100644
--- a/vendor/snap/.cargo-checksum.json
+++ b/vendor/snap/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"COPYING":"7ca1297d23644e30bd489193a82a33f324e5fe33f25df4195649b91b883df967","Cargo.lock":"5e2cad11e762a0ae36613c821b6ff8a8eb27001a676eed4a2a0d2f60ef276858","Cargo.toml":"82e6f46a064cad4d4fd5e0d9efcd6ccfa6500809d6b2654538866974e11de79c","README.md":"7519de7d11a582319372ec90bc12c41ed1bc4348a3d8ed10a07fc940c9810549","build.rs":"afae7dd3c45a9a46bcce096e9e0aa9e36ade171e8fd00309969478025b82cac1","examples/compress-escaped.rs":"cef1933dbc5d9b496587c4c1a596f26e326e73799ab9a0ede1c3d76b00676090","examples/compress.rs":"34bad60450e768ed4ac27edf8622abc4d918b17e57b8c286030203e2b3590c24","examples/decompress.rs":"87aa11875ec98f253c8b48ce3a202ac3412063ce2f64387074194151c11fff5e","rustfmt.toml":"1ca600239a27401c4a43f363cf3f38183a212affc1f31bff3ae93234bbaec228","src/bytes.rs":"2eb9783acdf949ef96a9031d2bd43ead65db50a41a60ad98dc117cd61655c7c4","src/compress.rs":"e6f3160be400c2a4869d71210a2558ba8b438b835b632a12b7ff487660db093f","src/crc32.rs":"9f3858bef802640c595705c496f462d0343c4adcff9bdf333a43306fb67db7ae","src/crc32_table.rs":"e999470ca10bb7914887747be838d05d0f5059ae99a8b8c57b0be7de3931e72a","src/decompress.rs":"05c4d0c0922b1ad084cf32f224722f2fe9d63128d6a4da0c44333742d681b80f","src/error.rs":"ba50d0652e50198d6ab42d23788f726d083ba69d8b2459a10a00cc83755dbbd0","src/frame.rs":"4f69a9526a5dd0367631fc7a764bea877f7f0b4c33ee30d4ece62913c42ea48d","src/lib.rs":"1f4dc31068bc60a327ecd632993eaa40feb0b2198affdb1a00a7a9538b23b527","src/raw.rs":"a7048da3a4b76b75e4b3d071ee9959dbf12c5b3a36dabb2f7878513507c80cb9","src/read.rs":"7528c6487d90c4f6170982e5393f56b3a01443b14abac3d4f41d94b7f3beb3f1","src/tag.rs":"9ac94977c5ab3ea687b9441ea3c615f64a23eac11a4bade19cfa3be5968cb718","src/varint.rs":"6d63fca6323c91f193628a41b6e75f5b3443777467e6a5665907bbbba549e6c7","src/write.rs":"086edd9fb1442b6bc8bf8c6a80023eccc4ca627de57ccc62279691c9ff7f0786"},"package":"45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"} \ No newline at end of file
+{"files":{"COPYING":"7ca1297d23644e30bd489193a82a33f324e5fe33f25df4195649b91b883df967","Cargo.lock":"d6e31fa248a50ec25e0cf04bde1f74cadd620705b80dfb4fadd5eb7372cc6397","Cargo.toml":"5fd2ac1dc31bbbf63f2888183f142eafbb940b0dc9eff6b93552475a2e0af316","README.md":"7519de7d11a582319372ec90bc12c41ed1bc4348a3d8ed10a07fc940c9810549","build.rs":"afae7dd3c45a9a46bcce096e9e0aa9e36ade171e8fd00309969478025b82cac1","examples/compress-escaped.rs":"cef1933dbc5d9b496587c4c1a596f26e326e73799ab9a0ede1c3d76b00676090","examples/compress.rs":"34bad60450e768ed4ac27edf8622abc4d918b17e57b8c286030203e2b3590c24","examples/decompress.rs":"87aa11875ec98f253c8b48ce3a202ac3412063ce2f64387074194151c11fff5e","rustfmt.toml":"1ca600239a27401c4a43f363cf3f38183a212affc1f31bff3ae93234bbaec228","src/bytes.rs":"2eb9783acdf949ef96a9031d2bd43ead65db50a41a60ad98dc117cd61655c7c4","src/compress.rs":"e6f3160be400c2a4869d71210a2558ba8b438b835b632a12b7ff487660db093f","src/crc32.rs":"9f3858bef802640c595705c496f462d0343c4adcff9bdf333a43306fb67db7ae","src/crc32_table.rs":"e999470ca10bb7914887747be838d05d0f5059ae99a8b8c57b0be7de3931e72a","src/decompress.rs":"05c4d0c0922b1ad084cf32f224722f2fe9d63128d6a4da0c44333742d681b80f","src/error.rs":"61e474876cb019ccd34981d8fd5f491e6684d3bfe2e444f0752faddcdc5c0491","src/frame.rs":"4f69a9526a5dd0367631fc7a764bea877f7f0b4c33ee30d4ece62913c42ea48d","src/lib.rs":"1f4dc31068bc60a327ecd632993eaa40feb0b2198affdb1a00a7a9538b23b527","src/raw.rs":"a7048da3a4b76b75e4b3d071ee9959dbf12c5b3a36dabb2f7878513507c80cb9","src/read.rs":"500881e24c820f4fb3f63af1c1de3fc65b0e8aeaee136d4dab20fd15d11c466d","src/tag.rs":"9ac94977c5ab3ea687b9441ea3c615f64a23eac11a4bade19cfa3be5968cb718","src/varint.rs":"6d63fca6323c91f193628a41b6e75f5b3443777467e6a5665907bbbba549e6c7","src/write.rs":"086edd9fb1442b6bc8bf8c6a80023eccc4ca627de57ccc62279691c9ff7f0786"},"package":"5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831"} \ No newline at end of file
diff --git a/vendor/snap/Cargo.lock b/vendor/snap/Cargo.lock
index 8ddce9d43..4b58a3045 100644
--- a/vendor/snap/Cargo.lock
+++ b/vendor/snap/Cargo.lock
@@ -1,14 +1,16 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
+version = 3
+
[[package]]
name = "doc-comment"
-version = "0.3.1"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
+checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "snap"
-version = "1.0.5"
+version = "1.1.0"
dependencies = [
"doc-comment",
]
diff --git a/vendor/snap/Cargo.toml b/vendor/snap/Cargo.toml
index 8559a1e4b..12ad5ddeb 100644
--- a/vendor/snap/Cargo.toml
+++ b/vendor/snap/Cargo.toml
@@ -3,26 +3,35 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "snap"
-version = "1.0.5"
+version = "1.1.0"
authors = ["Andrew Gallant <jamslam@gmail.com>"]
exclude = ["data/*"]
-description = "A pure Rust implementation of the Snappy compression algorithm. Includes\nstreaming compression and decompression.\n"
+description = """
+A pure Rust implementation of the Snappy compression algorithm. Includes
+streaming compression and decompression.
+"""
homepage = "https://github.com/BurntSushi/rust-snappy"
documentation = "https://docs.rs/snap"
readme = "README.md"
-keywords = ["snappy", "compress", "compression", "decompress", "decompression"]
+keywords = [
+ "snappy",
+ "compress",
+ "compression",
+ "decompress",
+ "decompression",
+]
license = "BSD-3-Clause"
repository = "https://github.com/BurntSushi/rust-snappy"
+
[profile.release]
debug = true
@@ -31,5 +40,6 @@ opt-level = 3
[lib]
bench = false
+
[dev-dependencies.doc-comment]
version = "0.3.1"
diff --git a/vendor/snap/src/error.rs b/vendor/snap/src/error.rs
index 8f0b400a6..99073aa0a 100644
--- a/vendor/snap/src/error.rs
+++ b/vendor/snap/src/error.rs
@@ -29,6 +29,13 @@ impl<W> IntoInnerError<W> {
&self.err
}
+ /// Returns the error which caused the call to `into_inner` to fail.
+ ///
+ /// This error was returned when attempting to flush the internal buffer.
+ pub fn into_error(self) -> io::Error {
+ self.err
+ }
+
/// Returns the underlying writer which generated the error.
///
/// The returned value can be used for error recovery, such as
diff --git a/vendor/snap/src/read.rs b/vendor/snap/src/read.rs
index a924bf91d..b78939cc5 100644
--- a/vendor/snap/src/read.rs
+++ b/vendor/snap/src/read.rs
@@ -94,6 +94,11 @@ impl<R: io::Read> FrameDecoder<R> {
pub fn get_mut(&mut self) -> &mut R {
&mut self.r
}
+
+ /// Gets the underlying reader of this decoder.
+ pub fn into_inner(self) -> R {
+ self.r
+ }
}
impl<R: io::Read> io::Read for FrameDecoder<R> {
diff --git a/vendor/syn/.cargo-checksum.json b/vendor/syn/.cargo-checksum.json
index 8b90dae0f..edd04e5cd 100644
--- a/vendor/syn/.cargo-checksum.json
+++ b/vendor/syn/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"8366f3b0e0c3a589f43424b1837bb43aa8b4dd224184d355ad38a63bac915210","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"b1546652aefba564455c1ebbf0f276450d4fdb19755e08bfa03c13c8bab241fc","benches/file.rs":"3d737ef3878f6e242b003af9bd539e565f98439a12ee44d9548d84e3fdd7af0c","benches/rust.rs":"11ac9fe898a7bf1bd63e8a8cc9c08bd795b01f0248215cff99afaaf28ce87fab","build.rs":"b815649fd2929d3debd93a58f5da2fb8eba506047a6a5ba538347305828a87b0","src/attr.rs":"234d9cebe2c5e92cd0f5e1117bf5755037e2e905788a337000a65d4bd82b63aa","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2b48296087f096c9630e1e4b03a00ae703407d1b352902b3197370b2f56c62ff","src/custom_keyword.rs":"5c706fc3611e73d16b8c019d7ecb848a86b1ccfcd9e556f80bb6e6a4abe058a8","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"75d2c2b5d6a01bf8a6fa2845e41663d8045a78b4b191f1a1bd7c93619d20017a","src/derive.rs":"ee24a202be2d36ccdff576dd9cd765e94b33ef2286946e6725d75b08e777d462","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/error.rs":"e548cc5b7c6f742ab6c19788755980594c4cb8086f99e6709f1cbc982961102d","src/export.rs":"0cf50d70c32d5fddba8b1193032df62e560237c113df3e86ba26b565cc82838e","src/expr.rs":"5eea3828f3291b0ce5463ed5f0c23fc8a39aeceae68a3247ae02ae467dd35a98","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"76e89fe155fedf43bc4a252af7e35319b82ce455f584bad8698fdc3f9b7f5d4e","src/gen/debug.rs":"4b05e474e864ce6bf1a5a6ab48ee6c0ecdf41a0d750237990cf2e31963bc1208","src/gen/eq.rs":"79f84836fdcd5cfa352f38055dab7c3246c7757650946c1c701234b11021652a","src/gen/fold.rs":"fcd6a05c8c8e0c36e7ede8593002528b553c8b648fbed452106fd6a8a8c9212a","src/gen/hash.rs":"575e8beae303c1eabda12bf76cbd82672268c502a8ebb8517aab18b40fdbc44e","src/gen/visit.rs":"ced9f6c17d2b3eb3553faab710cb2b3d44d6bca7d1862c8c5da09c3d45debecb","src/gen/visit_mut.rs":"966ea340c53461bf8a1c6bed3c882e4ab8b8907fd18ac35531266f7891ae5f46","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"46ed41bf116448822ddfefcb62e803fd33264ca8ba672efc0612674d85b6dd11","src/group.rs":"166f0fbb365471ffa3e4f554b72c2b460cbf7e3a1f9bec6c01ef6bbbcd751041","src/ident.rs":"2443e43561abea7eea577b141422258237a663499c839923d8a5ca6fea2470db","src/item.rs":"419c4d6135a7ca7b8f94b5ba038b6af8fcb3939ae807153a19e3c82e9b01e0b7","src/lib.rs":"7875551b227d19f083115d48a83e8c35e3e6d31dbd749bdd03556e2762f7d4fd","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"fc06ddd523f7f9971d8abdb4c8d5d51030ffb3d6810615d5575ae210a7800695","src/lookahead.rs":"e2c2b6d55906421e83dab51463b58bc6dcb582f1bff9303c8b62afefb8d71e5f","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"936f503c2fcde602f05220954ecaf87625c6138d0af13d33d56c7b6530110084","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"7b2f8caddf25a5734cbcdf7cbf043cbf9afbc07b484966cd59ddfcec9f970fb3","src/parse_macro_input.rs":"a5d16859b782bb6a2754c1066468a2f1ea05b57390caa32175bb84064973be7b","src/parse_quote.rs":"d7d996f1382c68b5fbfd4b7327ce1d389cd43c3bb3c4f382a35994d0bb79d8ab","src/pat.rs":"b2de04ae6c01df50eab9d1c3908287aca8424adc2007b926c7bcf74d1f64d40a","src/path.rs":"269d5d8b0c21eaf96e1c49bcb1ec2a03175a8adcc103c142e550b3f5e79825d8","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"f687c23bd3ae512e7412c28ac68030d3bc7a384d1ca8b3da6620e364b0cbbb78","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"601a6914f1e0bf97ae0d31d474a531d195b8c251a4ded11aa8746ac0018d367b","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"7e678749af18fc84ae9220435e467e520de05eea66adeeed3b5d634cd744561c","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"953f5db261a3334eba1d37df4247463e9234c7988da04f43028b5273d24bf2da","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"1b7f875344cb04a7dd3df62deac2f410a9d107c097986e68006d87465f5f5306","tests/debug/mod.rs":"3a6bb799f478101f71c84c6f1a854a58afe2f9db43c39017909346ca20262d94","tests/macros/mod.rs":"aff805b35cfd55aef6a1359ff747e4023afcb08d69d86aff4c19465d29dda088","tests/regression.rs":"f962ebf24007f631f7e702e34e142d07581da7c9a36321ac142cafed1a0afc69","tests/regression/issue1108.rs":"adcc55a42239d344da74216ed85fc14153ddd6ca4dec4872d8339604ba78c185","tests/repo/mod.rs":"1ea18f9430e75cabc4b23b826544c2bf2f950b679a04b237a11e17aabc16e2e9","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"62bb86aaaaf730187a46ff700a8e3b2d1a163039b109b6a483aa44ed2b6806fe","tests/test_expr.rs":"a639728866a063b590430965a4840c01755e398b89be12d8d09b0aa97837ecac","tests/test_generics.rs":"54b7d2afc19aa6e9049585f4c8f7d3f0c29ac3bd11a2c769e9df76f18a4f5ecb","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"a3642c80066f1e7787becfd0278af90a6b7968d6c1249e25e81663aa454cfb2a","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"19740ea9cd4a980bcab9b0dcaa4b032bb6ebb137fa5e4237140b97da1d9679fa","tests/test_meta.rs":"65d4586d131f6cac66694ca5e936748ec4e7f7423af6d8da509240e6be14800b","tests/test_parse_buffer.rs":"68d857f776396d064fcc0023c37093c2fbf75ee68e8241d4014d00d1423c18e9","tests/test_parse_stream.rs":"bf1db6fab7ac396fa61012faccbe6ffbc9c3d795ed2900be75e91c5b09b0c62f","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"71092a5ae2c9143b92a8fe15a92d39958b3c28bd4d4275cfb2d22cbdd53ada07","tests/test_precedence.rs":"736eee861c4c7a3d7d4387d2fb1b5eced1541790d34974f72b0a5532797e73c3","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"b47662e35be2729f28bacdbbea20f1879c111889430e735a7bcb5f2a5c0b9e5c","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"697906d892ab8186eebdf6bc7696fde7a42376d50bee846ba69f031bdb847e01","tests/test_stmt.rs":"0601fc32131b5501dfcdc4b4248d46bf21e0a98a49eb19439e1a46869dfb30b7","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"f71d7f7f1c038aaabea8dd4c03c0d5752c76d570f8b4885a81659825bbb4d576","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"} \ No newline at end of file
+{"files":{"Cargo.toml":"1ff565970239963d56cbfdd20476fd265fcf2d050fc5ed92a298686321985230","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ea9f2b6340b302b5608d2bedcda7e2d707f3eaebf4cc983c02d55071ead7096f","benches/file.rs":"3d737ef3878f6e242b003af9bd539e565f98439a12ee44d9548d84e3fdd7af0c","benches/rust.rs":"11ac9fe898a7bf1bd63e8a8cc9c08bd795b01f0248215cff99afaaf28ce87fab","build.rs":"b815649fd2929d3debd93a58f5da2fb8eba506047a6a5ba538347305828a87b0","src/attr.rs":"234d9cebe2c5e92cd0f5e1117bf5755037e2e905788a337000a65d4bd82b63aa","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"4d15f35273d485261be4f1a765ae03abc1daee9fc9dac5fb4f9b624d6b22cb58","src/custom_keyword.rs":"5c706fc3611e73d16b8c019d7ecb848a86b1ccfcd9e556f80bb6e6a4abe058a8","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"75d2c2b5d6a01bf8a6fa2845e41663d8045a78b4b191f1a1bd7c93619d20017a","src/derive.rs":"ee24a202be2d36ccdff576dd9cd765e94b33ef2286946e6725d75b08e777d462","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/drops.rs":"013385f1dd95663f1afab41abc1e2eea04181998644828935ca564c74d6462ae","src/error.rs":"b30e738fdab7d10e126350e09f7ad907bf4dc14e684b9ed9eeea001c7ee356e0","src/export.rs":"0cf50d70c32d5fddba8b1193032df62e560237c113df3e86ba26b565cc82838e","src/expr.rs":"5eea3828f3291b0ce5463ed5f0c23fc8a39aeceae68a3247ae02ae467dd35a98","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"76e89fe155fedf43bc4a252af7e35319b82ce455f584bad8698fdc3f9b7f5d4e","src/gen/debug.rs":"4b05e474e864ce6bf1a5a6ab48ee6c0ecdf41a0d750237990cf2e31963bc1208","src/gen/eq.rs":"79f84836fdcd5cfa352f38055dab7c3246c7757650946c1c701234b11021652a","src/gen/fold.rs":"fcd6a05c8c8e0c36e7ede8593002528b553c8b648fbed452106fd6a8a8c9212a","src/gen/hash.rs":"575e8beae303c1eabda12bf76cbd82672268c502a8ebb8517aab18b40fdbc44e","src/gen/visit.rs":"ced9f6c17d2b3eb3553faab710cb2b3d44d6bca7d1862c8c5da09c3d45debecb","src/gen/visit_mut.rs":"966ea340c53461bf8a1c6bed3c882e4ab8b8907fd18ac35531266f7891ae5f46","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"b81ce0d3ea0f7accef4590d5181cecc4589395865abaea60b0470da727f17340","src/group.rs":"166f0fbb365471ffa3e4f554b72c2b460cbf7e3a1f9bec6c01ef6bbbcd751041","src/ident.rs":"2443e43561abea7eea577b141422258237a663499c839923d8a5ca6fea2470db","src/item.rs":"419c4d6135a7ca7b8f94b5ba038b6af8fcb3939ae807153a19e3c82e9b01e0b7","src/lib.rs":"8c152481907905472fc3e4aae63f82ed78d4d16cf8cc286675727668760c7f2e","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"fc06ddd523f7f9971d8abdb4c8d5d51030ffb3d6810615d5575ae210a7800695","src/lookahead.rs":"e2c2b6d55906421e83dab51463b58bc6dcb582f1bff9303c8b62afefb8d71e5f","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"936f503c2fcde602f05220954ecaf87625c6138d0af13d33d56c7b6530110084","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"7b2f8caddf25a5734cbcdf7cbf043cbf9afbc07b484966cd59ddfcec9f970fb3","src/parse_macro_input.rs":"a5d16859b782bb6a2754c1066468a2f1ea05b57390caa32175bb84064973be7b","src/parse_quote.rs":"d7d996f1382c68b5fbfd4b7327ce1d389cd43c3bb3c4f382a35994d0bb79d8ab","src/pat.rs":"b2de04ae6c01df50eab9d1c3908287aca8424adc2007b926c7bcf74d1f64d40a","src/path.rs":"58a4fb3b1ff76d32cfd84a3914f8cadbf55b363c1929222b362b7465385520ac","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"44c29523dee76605be2531674fe21ed2f1bbd02559aac8b7a49c70af23129ca1","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"601a6914f1e0bf97ae0d31d474a531d195b8c251a4ded11aa8746ac0018d367b","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"9befd22f8c8ac731b7f68008552a1335797a3ef19184190eec0e103e4ebe18a7","src/verbatim.rs":"96d4280e4556a1841b8dcb306bc35a94d18f71dceb63f3c27a4fe7f776191760","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"e930fb0bdcec3e787986b56785b1db580e5a26a5131df2f2b91a6da37069de15","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"1b7f875344cb04a7dd3df62deac2f410a9d107c097986e68006d87465f5f5306","tests/debug/mod.rs":"3a6bb799f478101f71c84c6f1a854a58afe2f9db43c39017909346ca20262d94","tests/macros/mod.rs":"aff805b35cfd55aef6a1359ff747e4023afcb08d69d86aff4c19465d29dda088","tests/regression.rs":"86731134bfb9bb693d9a4fc62393027de80a8bf031109ea6c7ea475b1ebdde8d","tests/regression/issue1108.rs":"adcc55a42239d344da74216ed85fc14153ddd6ca4dec4872d8339604ba78c185","tests/regression/issue1235.rs":"a2266b10c3f7c7af5734817ab0a3e8b309b51e7d177b63f26e67e6b744d280b0","tests/repo/mod.rs":"159c2c4b6416d26ac42ffc35f6cb587c4c1e2b0f24de9aa42b0337a534d7d86d","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"62bb86aaaaf730187a46ff700a8e3b2d1a163039b109b6a483aa44ed2b6806fe","tests/test_expr.rs":"41eb343829ad36cdea40cd06d45a90765e7fe6f1e47dd550daf1b6096c3a7b44","tests/test_generics.rs":"54b7d2afc19aa6e9049585f4c8f7d3f0c29ac3bd11a2c769e9df76f18a4f5ecb","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"a3642c80066f1e7787becfd0278af90a6b7968d6c1249e25e81663aa454cfb2a","tests/test_iterators.rs":"9cf6fde17853ce7d5617e1de9ef901c47ca35c0f1c2dd668c0d0604d7b48598c","tests/test_lit.rs":"19740ea9cd4a980bcab9b0dcaa4b032bb6ebb137fa5e4237140b97da1d9679fa","tests/test_meta.rs":"65d4586d131f6cac66694ca5e936748ec4e7f7423af6d8da509240e6be14800b","tests/test_parse_buffer.rs":"68d857f776396d064fcc0023c37093c2fbf75ee68e8241d4014d00d1423c18e9","tests/test_parse_stream.rs":"bf1db6fab7ac396fa61012faccbe6ffbc9c3d795ed2900be75e91c5b09b0c62f","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"71092a5ae2c9143b92a8fe15a92d39958b3c28bd4d4275cfb2d22cbdd53ada07","tests/test_precedence.rs":"736eee861c4c7a3d7d4387d2fb1b5eced1541790d34974f72b0a5532797e73c3","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"c3c415413d5177a728c7cbbfb7ef44aebbc6a2c821dd56695156e9e33636fd57","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"6720d55569808244ab011364c39931f06aa509cd05f98ab908b0670e8501b3c8","tests/test_stmt.rs":"0601fc32131b5501dfcdc4b4248d46bf21e0a98a49eb19439e1a46869dfb30b7","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"f71d7f7f1c038aaabea8dd4c03c0d5752c76d570f8b4885a81659825bbb4d576","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"} \ No newline at end of file
diff --git a/vendor/syn/Cargo.toml b/vendor/syn/Cargo.toml
index 7b1412593..c2a36013e 100644
--- a/vendor/syn/Cargo.toml
+++ b/vendor/syn/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "syn"
-version = "1.0.102"
+version = "1.0.107"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = [
"/benches/**",
@@ -56,6 +56,9 @@ features = [
"extra-traits",
]
+[lib]
+doc-scrape-examples = false
+
[[bench]]
name = "rust"
harness = false
diff --git a/vendor/syn/README.md b/vendor/syn/README.md
index 9c883ae2b..eeef83dd5 100644
--- a/vendor/syn/README.md
+++ b/vendor/syn/README.md
@@ -4,7 +4,7 @@ Parser for Rust source code
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
of Rust source code.
diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs
index 161b614c8..0d5cf30d5 100644
--- a/vendor/syn/src/buffer.rs
+++ b/vendor/syn/src/buffer.rs
@@ -14,6 +14,7 @@
use crate::proc_macro as pm;
use crate::Lifetime;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::cmp::Ordering;
use std::marker::PhantomData;
/// Internal type which is used instead of `TokenTree` to represent a token tree
@@ -25,7 +26,8 @@ enum Entry {
Ident(Ident),
Punct(Punct),
Literal(Literal),
- End,
+ // End entries contain the offset (negative) to the start of the buffer.
+ End(isize),
}
/// A buffer that can be efficiently traversed multiple times, unlike
@@ -48,10 +50,10 @@ impl TokenBuffer {
TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
TokenTree::Group(group) => {
let group_start_index = entries.len();
- entries.push(Entry::End); // we replace this below
+ entries.push(Entry::End(0)); // we replace this below
Self::recursive_new(entries, group.stream());
let group_end_index = entries.len();
- entries.push(Entry::End);
+ entries.push(Entry::End(-(group_end_index as isize)));
let group_end_offset = group_end_index - group_start_index;
entries[group_start_index] = Entry::Group(group, group_end_offset);
}
@@ -77,7 +79,7 @@ impl TokenBuffer {
pub fn new2(stream: TokenStream) -> Self {
let mut entries = Vec::new();
Self::recursive_new(&mut entries, stream);
- entries.push(Entry::End);
+ entries.push(Entry::End(-(entries.len() as isize)));
Self {
entries: entries.into_boxed_slice(),
}
@@ -127,7 +129,7 @@ impl<'a> Cursor<'a> {
// object in global storage.
struct UnsafeSyncEntry(Entry);
unsafe impl Sync for UnsafeSyncEntry {}
- static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End);
+ static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));
Cursor {
ptr: &EMPTY_ENTRY.0,
@@ -144,7 +146,7 @@ impl<'a> Cursor<'a> {
// past it, unless `ptr == scope`, which means that we're at the edge of
// our cursor's scope. We should only have `ptr != scope` at the exit
// from None-delimited groups entered with `ignore_none`.
- while let Entry::End = *ptr {
+ while let Entry::End(_) = *ptr {
if ptr == scope {
break;
}
@@ -292,7 +294,7 @@ impl<'a> Cursor<'a> {
Entry::Literal(literal) => (literal.clone().into(), 1),
Entry::Ident(ident) => (ident.clone().into(), 1),
Entry::Punct(punct) => (punct.clone().into(), 1),
- Entry::End => return None,
+ Entry::End(_) => return None,
};
let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
@@ -307,7 +309,7 @@ impl<'a> Cursor<'a> {
Entry::Literal(literal) => literal.span(),
Entry::Ident(ident) => ident.span(),
Entry::Punct(punct) => punct.span(),
- Entry::End => Span::call_site(),
+ Entry::End(_) => Span::call_site(),
}
}
@@ -317,7 +319,7 @@ impl<'a> Cursor<'a> {
/// This method treats `'lifetimes` as a single token.
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
let len = match self.entry() {
- Entry::End => return None,
+ Entry::End(_) => return None,
// Treat lifetimes as a single tt for the purposes of 'skip'.
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
@@ -347,9 +349,17 @@ impl<'a> Eq for Cursor<'a> {}
impl<'a> PartialEq for Cursor<'a> {
fn eq(&self, other: &Self) -> bool {
- let Cursor { ptr, scope, marker } = self;
- let _ = marker;
- *ptr == other.ptr && *scope == other.scope
+ self.ptr == other.ptr
+ }
+}
+
+impl<'a> PartialOrd for Cursor<'a> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ if same_buffer(*self, *other) {
+ Some(self.ptr.cmp(&other.ptr))
+ } else {
+ None
+ }
}
}
@@ -357,6 +367,22 @@ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
a.scope == b.scope
}
+pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
+ unsafe {
+ match (&*a.scope, &*b.scope) {
+ (Entry::End(a_offset), Entry::End(b_offset)) => {
+ a.scope.offset(*a_offset) == b.scope.offset(*b_offset)
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+
+#[cfg(any(feature = "full", feature = "derive"))]
+pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
+ a.ptr.cmp(&b.ptr)
+}
+
pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
match cursor.entry() {
Entry::Group(group, _) => group.span_open(),
diff --git a/vendor/syn/src/drops.rs b/vendor/syn/src/drops.rs
new file mode 100644
index 000000000..89b42d82e
--- /dev/null
+++ b/vendor/syn/src/drops.rs
@@ -0,0 +1,58 @@
+use std::iter;
+use std::mem::ManuallyDrop;
+use std::ops::{Deref, DerefMut};
+use std::option;
+use std::slice;
+
+#[repr(transparent)]
+pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
+
+impl<T> NoDrop<T> {
+ pub(crate) fn new(value: T) -> Self
+ where
+ T: TrivialDrop,
+ {
+ NoDrop(ManuallyDrop::new(value))
+ }
+}
+
+impl<T: ?Sized> Deref for NoDrop<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<T: ?Sized> DerefMut for NoDrop<T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+pub(crate) trait TrivialDrop {}
+
+impl<T> TrivialDrop for iter::Empty<T> {}
+impl<'a, T> TrivialDrop for slice::Iter<'a, T> {}
+impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {}
+impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {}
+
+#[test]
+fn test_needs_drop() {
+ use std::mem::needs_drop;
+
+ struct NeedsDrop;
+
+ impl Drop for NeedsDrop {
+ fn drop(&mut self) {}
+ }
+
+ assert!(needs_drop::<NeedsDrop>());
+
+ // Test each of the types with a handwritten TrivialDrop impl above.
+ assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
+ assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
+ assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
+ assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
+ assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
+}
diff --git a/vendor/syn/src/error.rs b/vendor/syn/src/error.rs
index 609cc086f..e301367d5 100644
--- a/vendor/syn/src/error.rs
+++ b/vendor/syn/src/error.rs
@@ -134,12 +134,16 @@ impl Error {
/// }
/// ```
pub fn new<T: Display>(span: Span, message: T) -> Self {
- Error {
- messages: vec![ErrorMessage {
- start_span: ThreadBound::new(span),
- end_span: ThreadBound::new(span),
- message: message.to_string(),
- }],
+ return new(span, message.to_string());
+
+ fn new(span: Span, message: String) -> Error {
+ Error {
+ messages: vec![ErrorMessage {
+ start_span: ThreadBound::new(span),
+ end_span: ThreadBound::new(span),
+ message,
+ }],
+ }
}
}
@@ -158,15 +162,19 @@ impl Error {
/// `ParseStream::error`)!
#[cfg(feature = "printing")]
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
- let mut iter = tokens.into_token_stream().into_iter();
- let start = iter.next().map_or_else(Span::call_site, |t| t.span());
- let end = iter.last().map_or(start, |t| t.span());
- Error {
- messages: vec![ErrorMessage {
- start_span: ThreadBound::new(start),
- end_span: ThreadBound::new(end),
- message: message.to_string(),
- }],
+ return new_spanned(tokens.into_token_stream(), message.to_string());
+
+ fn new_spanned(tokens: TokenStream, message: String) -> Error {
+ let mut iter = tokens.into_iter();
+ let start = iter.next().map_or_else(Span::call_site, |t| t.span());
+ let end = iter.last().map_or(start, |t| t.span());
+ Error {
+ messages: vec![ErrorMessage {
+ start_span: ThreadBound::new(start),
+ end_span: ThreadBound::new(end),
+ message,
+ }],
+ }
}
}
@@ -288,12 +296,16 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
- Error {
- messages: vec![ErrorMessage {
- start_span: ThreadBound::new(start),
- end_span: ThreadBound::new(end),
- message: message.to_string(),
- }],
+ return new2(start, end, message.to_string());
+
+ fn new2(start: Span, end: Span, message: String) -> Error {
+ Error {
+ messages: vec![ErrorMessage {
+ start_span: ThreadBound::new(start),
+ end_span: ThreadBound::new(end),
+ message,
+ }],
+ }
}
}
diff --git a/vendor/syn/src/generics.rs b/vendor/syn/src/generics.rs
index 9c2802f87..6d4fe847e 100644
--- a/vendor/syn/src/generics.rs
+++ b/vendor/syn/src/generics.rs
@@ -828,6 +828,31 @@ pub mod parsing {
}
}
+ impl TypeParamBound {
+ pub(crate) fn parse_multiple(
+ input: ParseStream,
+ allow_plus: bool,
+ ) -> Result<Punctuated<Self, Token![+]>> {
+ let mut bounds = Punctuated::new();
+ loop {
+ bounds.push_value(input.parse()?);
+ if !(allow_plus && input.peek(Token![+])) {
+ break;
+ }
+ bounds.push_punct(input.parse()?);
+ if !(input.peek(Ident::peek_any)
+ || input.peek(Token![::])
+ || input.peek(Token![?])
+ || input.peek(Lifetime)
+ || input.peek(token::Paren))
+ {
+ break;
+ }
+ }
+ Ok(bounds)
+ }
+ }
+
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for TraitBound {
fn parse(input: ParseStream) -> Result<Self> {
diff --git a/vendor/syn/src/lib.rs b/vendor/syn/src/lib.rs
index 81f03e1b5..e47ba28c6 100644
--- a/vendor/syn/src/lib.rs
+++ b/vendor/syn/src/lib.rs
@@ -250,13 +250,14 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/1.0.102")]
+#![doc(html_root_url = "https://docs.rs/syn/1.0.107")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(non_camel_case_types)]
#![allow(
clippy::bool_to_int_with_if,
clippy::cast_lossless,
clippy::cast_possible_truncation,
+ clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::default_trait_access,
clippy::doc_markdown,
@@ -264,8 +265,8 @@
clippy::explicit_auto_deref,
clippy::if_not_else,
clippy::inherent_to_string,
+ clippy::items_after_statements,
clippy::large_enum_variant,
- clippy::let_underscore_drop,
clippy::manual_assert,
clippy::match_on_vec_items,
clippy::match_same_arms,
@@ -428,6 +429,7 @@ pub use crate::path::{
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod buffer;
+mod drops;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod ext;
diff --git a/vendor/syn/src/path.rs b/vendor/syn/src/path.rs
index 742273afd..6cdb43ac5 100644
--- a/vendor/syn/src/path.rs
+++ b/vendor/syn/src/path.rs
@@ -89,9 +89,8 @@ impl PathArguments {
}
}
- #[cfg(feature = "parsing")]
- fn is_none(&self) -> bool {
- match *self {
+ pub fn is_none(&self) -> bool {
+ match self {
PathArguments::None => true,
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
}
diff --git a/vendor/syn/src/punctuated.rs b/vendor/syn/src/punctuated.rs
index 0fe1078cf..b7d0185e8 100644
--- a/vendor/syn/src/punctuated.rs
+++ b/vendor/syn/src/punctuated.rs
@@ -32,6 +32,7 @@ use std::option;
use std::slice;
use std::vec;
+use crate::drops::{NoDrop, TrivialDrop};
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream, Result};
#[cfg(feature = "parsing")]
@@ -104,10 +105,10 @@ impl<T, P> Punctuated<T, P> {
/// Returns an iterator over borrowed syntax tree nodes of type `&T`.
pub fn iter(&self) -> Iter<T> {
Iter {
- inner: Box::new(PrivateIter {
+ inner: Box::new(NoDrop::new(PrivateIter {
inner: self.inner.iter(),
last: self.last.as_ref().map(Box::as_ref).into_iter(),
- }),
+ })),
}
}
@@ -115,10 +116,10 @@ impl<T, P> Punctuated<T, P> {
/// `&mut T`.
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
- inner: Box::new(PrivateIterMut {
+ inner: Box::new(NoDrop::new(PrivateIterMut {
inner: self.inner.iter_mut(),
last: self.last.as_mut().map(Box::as_mut).into_iter(),
- }),
+ })),
}
}
@@ -721,13 +722,13 @@ pub struct Iter<'a, T: 'a> {
// The `Item = &'a T` needs to be specified to support rustc 1.31 and older.
// On modern compilers we would be able to write just IterTrait<'a, T> where
// Item can be inferred unambiguously from the supertrait.
- inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+ inner: Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>,
}
trait IterTrait<'a, T: 'a>:
DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T>
{
- fn clone_box(&self) -> Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>;
+ fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>;
}
struct PrivateIter<'a, T: 'a, P: 'a> {
@@ -735,10 +736,17 @@ struct PrivateIter<'a, T: 'a, P: 'a> {
last: option::IntoIter<&'a T>,
}
+impl<'a, T, P> TrivialDrop for PrivateIter<'a, T, P>
+where
+ slice::Iter<'a, (T, P)>: TrivialDrop,
+ option::IntoIter<&'a T>: TrivialDrop,
+{
+}
+
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn empty_punctuated_iter<'a, T>() -> Iter<'a, T> {
Iter {
- inner: Box::new(iter::empty()),
+ inner: Box::new(NoDrop::new(iter::empty())),
}
}
@@ -813,10 +821,14 @@ impl<'a, T, P> Clone for PrivateIter<'a, T, P> {
impl<'a, T, I> IterTrait<'a, T> for I
where
T: 'a,
- I: DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T> + Clone + 'a,
+ I: DoubleEndedIterator<Item = &'a T>
+ + ExactSizeIterator<Item = &'a T>
+ + Clone
+ + TrivialDrop
+ + 'a,
{
- fn clone_box(&self) -> Box<dyn IterTrait<'a, T, Item = &'a T> + 'a> {
- Box::new(self.clone())
+ fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>> {
+ Box::new(NoDrop::new(self.clone()))
}
}
@@ -826,7 +838,7 @@ where
///
/// [module documentation]: self
pub struct IterMut<'a, T: 'a> {
- inner: Box<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>,
+ inner: Box<NoDrop<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>>,
}
trait IterMutTrait<'a, T: 'a>:
@@ -839,10 +851,17 @@ struct PrivateIterMut<'a, T: 'a, P: 'a> {
last: option::IntoIter<&'a mut T>,
}
+impl<'a, T, P> TrivialDrop for PrivateIterMut<'a, T, P>
+where
+ slice::IterMut<'a, (T, P)>: TrivialDrop,
+ option::IntoIter<&'a mut T>: TrivialDrop,
+{
+}
+
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn empty_punctuated_iter_mut<'a, T>() -> IterMut<'a, T> {
IterMut {
- inner: Box::new(iter::empty()),
+ inner: Box::new(NoDrop::new(iter::empty())),
}
}
diff --git a/vendor/syn/src/ty.rs b/vendor/syn/src/ty.rs
index 4068be3c7..8c841e2f7 100644
--- a/vendor/syn/src/ty.rs
+++ b/vendor/syn/src/ty.rs
@@ -337,7 +337,7 @@ pub mod parsing {
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
use crate::path;
- use proc_macro2::{Punct, Spacing, TokenTree};
+ use proc_macro2::{Punct, Spacing, Span, TokenTree};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Type {
@@ -546,13 +546,17 @@ pub mod parsing {
|| lookahead.peek(Token![<])
{
let dyn_token: Option<Token![dyn]> = input.parse()?;
- if dyn_token.is_some() {
+ if let Some(dyn_token) = dyn_token {
+ let dyn_span = dyn_token.span;
let star_token: Option<Token![*]> = input.parse()?;
- let bounds = TypeTraitObject::parse_bounds(input, allow_plus)?;
+ let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?;
return Ok(if star_token.is_some() {
Type::Verbatim(verbatim::between(begin, input))
} else {
- Type::TraitObject(TypeTraitObject { dyn_token, bounds })
+ Type::TraitObject(TypeTraitObject {
+ dyn_token: Some(dyn_token),
+ bounds,
+ })
});
}
@@ -896,15 +900,6 @@ pub mod parsing {
}
}
- fn at_least_one_type(bounds: &Punctuated<TypeParamBound, Token![+]>) -> bool {
- for bound in bounds {
- if let TypeParamBound::Trait(_) = *bound {
- return true;
- }
- }
- false
- }
-
impl TypeTraitObject {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn without_plus(input: ParseStream) -> Result<Self> {
@@ -914,35 +909,38 @@ pub mod parsing {
// Only allow multiple trait references if allow_plus is true.
pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
- Ok(TypeTraitObject {
- dyn_token: input.parse()?,
- bounds: Self::parse_bounds(input, allow_plus)?,
- })
+ let dyn_token: Option<Token![dyn]> = input.parse()?;
+ let dyn_span = match &dyn_token {
+ Some(token) => token.span,
+ None => input.span(),
+ };
+ let bounds = Self::parse_bounds(dyn_span, input, allow_plus)?;
+ Ok(TypeTraitObject { dyn_token, bounds })
}
fn parse_bounds(
+ dyn_span: Span,
input: ParseStream,
allow_plus: bool,
) -> Result<Punctuated<TypeParamBound, Token![+]>> {
- let mut bounds = Punctuated::new();
- loop {
- bounds.push_value(input.parse()?);
- if !(allow_plus && input.peek(Token![+])) {
- break;
- }
- bounds.push_punct(input.parse()?);
- if !(input.peek(Ident::peek_any)
- || input.peek(Token![::])
- || input.peek(Token![?])
- || input.peek(Lifetime)
- || input.peek(token::Paren))
- {
- break;
+ let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+ let mut last_lifetime_span = None;
+ let mut at_least_one_trait = false;
+ for bound in &bounds {
+ match bound {
+ TypeParamBound::Trait(_) => {
+ at_least_one_trait = true;
+ break;
+ }
+ TypeParamBound::Lifetime(lifetime) => {
+ last_lifetime_span = Some(lifetime.ident.span());
+ }
}
}
// Just lifetimes like `'a + 'b` is not a TraitObject.
- if !at_least_one_type(&bounds) {
- return Err(input.error("expected at least one type"));
+ if !at_least_one_trait {
+ let msg = "at least one trait is required for an object type";
+ return Err(error::new2(dyn_span, last_lifetime_span.unwrap(), msg));
}
Ok(bounds)
}
@@ -964,10 +962,30 @@ pub mod parsing {
}
pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
- Ok(TypeImplTrait {
- impl_token: input.parse()?,
- bounds: TypeTraitObject::parse_bounds(input, allow_plus)?,
- })
+ let impl_token: Token![impl] = input.parse()?;
+ let bounds = TypeParamBound::parse_multiple(input, allow_plus)?;
+ let mut last_lifetime_span = None;
+ let mut at_least_one_trait = false;
+ for bound in &bounds {
+ match bound {
+ TypeParamBound::Trait(_) => {
+ at_least_one_trait = true;
+ break;
+ }
+ TypeParamBound::Lifetime(lifetime) => {
+ last_lifetime_span = Some(lifetime.ident.span());
+ }
+ }
+ }
+ if !at_least_one_trait {
+ let msg = "at least one trait must be specified";
+ return Err(error::new2(
+ impl_token.span,
+ last_lifetime_span.unwrap(),
+ msg,
+ ));
+ }
+ Ok(TypeImplTrait { impl_token, bounds })
}
}
diff --git a/vendor/syn/src/verbatim.rs b/vendor/syn/src/verbatim.rs
index 0686352f7..58cf68d17 100644
--- a/vendor/syn/src/verbatim.rs
+++ b/vendor/syn/src/verbatim.rs
@@ -1,13 +1,31 @@
use crate::parse::{ParseBuffer, ParseStream};
-use proc_macro2::TokenStream;
+use proc_macro2::{Delimiter, TokenStream};
+use std::cmp::Ordering;
use std::iter;
pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
let end = end.cursor();
let mut cursor = begin.cursor();
+ assert!(crate::buffer::same_buffer(end, cursor));
+
let mut tokens = TokenStream::new();
while cursor != end {
let (tt, next) = cursor.token_tree().unwrap();
+
+ if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less {
+ // A syntax node can cross the boundary of a None-delimited group
+ // due to such groups being transparent to the parser in most cases.
+ // Any time this occurs the group is known to be semantically
+ // irrelevant. https://github.com/dtolnay/syn/issues/1235
+ if let Some((inside, _span, after)) = cursor.group(Delimiter::None) {
+ assert!(next == after);
+ cursor = inside;
+ continue;
+ } else {
+ panic!("verbatim end must not be inside a delimited group");
+ }
+ }
+
tokens.extend(iter::once(tt));
cursor = next;
}
diff --git a/vendor/syn/tests/common/eq.rs b/vendor/syn/tests/common/eq.rs
index a53146241..41d6d4118 100644
--- a/vendor/syn/tests/common/eq.rs
+++ b/vendor/syn/tests/common/eq.rs
@@ -13,6 +13,8 @@ use rustc_ast::ast::AssocConstraint;
use rustc_ast::ast::AssocConstraintKind;
use rustc_ast::ast::AssocItemKind;
use rustc_ast::ast::Async;
+use rustc_ast::ast::AttrArgs;
+use rustc_ast::ast::AttrArgsEq;
use rustc_ast::ast::AttrId;
use rustc_ast::ast::AttrItem;
use rustc_ast::ast::AttrKind;
@@ -26,10 +28,12 @@ use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::ByRef;
use rustc_ast::ast::CaptureBy;
+use rustc_ast::ast::Closure;
use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
+use rustc_ast::ast::DelimArgs;
use rustc_ast::ast::EnumDef;
use rustc_ast::ast::Expr;
use rustc_ast::ast::ExprField;
@@ -65,19 +69,18 @@ use rustc_ast::ast::Item;
use rustc_ast::ast::ItemKind;
use rustc_ast::ast::Label;
use rustc_ast::ast::Lifetime;
-use rustc_ast::ast::Lit;
use rustc_ast::ast::LitFloatType;
use rustc_ast::ast::LitIntType;
use rustc_ast::ast::LitKind;
use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
-use rustc_ast::ast::MacArgs;
-use rustc_ast::ast::MacArgsEq;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
use rustc_ast::ast::MacDelimiter;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
+use rustc_ast::ast::MetaItemLit;
+use rustc_ast::ast::MethodCall;
use rustc_ast::ast::ModKind;
use rustc_ast::ast::ModSpans;
use rustc_ast::ast::Movability;
@@ -128,7 +131,7 @@ use rustc_ast::ast::WhereEqPredicate;
use rustc_ast::ast::WherePredicate;
use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ptr::P;
-use rustc_ast::token::{self, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
+use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{
AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing,
TokenStream, TokenTree,
@@ -411,7 +414,9 @@ spanless_eq_struct!(AttributesData; attrs tokens);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
spanless_eq_struct!(BindingAnnotation; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
+spanless_eq_struct!(Closure; binder capture_clause asyncness movability fn_decl body !fn_decl_span);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
+spanless_eq_struct!(DelimArgs; dspan delim tokens);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
@@ -429,11 +434,13 @@ spanless_eq_struct!(InlineAsmSym; id qself path);
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
-spanless_eq_struct!(Lit; token_lit kind span);
+spanless_eq_struct!(Lit; kind symbol suffix);
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
spanless_eq_struct!(MacCall; path args prior_type_ascription);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
+spanless_eq_struct!(MetaItemLit; token_lit kind span);
+spanless_eq_struct!(MethodCall; seg receiver args !span);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
spanless_eq_struct!(NormalAttr; item tokens);
@@ -460,11 +467,12 @@ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bo
spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
-spanless_eq_struct!(token::Lit; kind symbol suffix);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
-spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) TyAlias(0) MacCall(0));
+spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) Type(0) MacCall(0));
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
+spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
+spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0));
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
@@ -492,8 +500,6 @@ spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
-spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
-spanless_eq_enum!(MacArgsEq; Ast(0) Hir(0));
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
@@ -512,18 +518,18 @@ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
-spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
- MethodCall(0 1 2 3) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1)
- Type(0 1) Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1)
- Match(0 1) Closure(0 1 2 3 4 5 6) Block(0 1) Async(0 1 2) Await(0)
- TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore
- Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
- InlineAsm(0) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0)
- Yeet(0) Err);
+ MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
+ Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1)
+ Closure(0) Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2)
+ AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2) Path(0 1)
+ AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) MacCall(0)
+ Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) IncludedBytes(0)
+ Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
@@ -697,8 +703,8 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
Token {
kind: TokenKind::Literal(lit),
span: _,
- } => match Lit::from_token_lit(*lit, DUMMY_SP) {
- Ok(lit) => is_escaped_literal(&lit, unescaped),
+ } => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
+ Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
Err(_) => false,
},
Token {
@@ -706,7 +712,7 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
span: _,
} => match nonterminal.as_ref() {
Nonterminal::NtExpr(expr) => match &expr.kind {
- ExprKind::Lit(lit) => is_escaped_literal(lit, unescaped),
+ ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
_ => false,
},
_ => false,
@@ -715,28 +721,51 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
}
}
-fn is_escaped_literal_macro_arg(arg: &MacArgsEq, unescaped: Symbol) -> bool {
- match arg {
- MacArgsEq::Ast(expr) => match &expr.kind {
- ExprKind::Lit(lit) => is_escaped_literal(lit, unescaped),
+fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool {
+ match value {
+ AttrArgsEq::Ast(expr) => match &expr.kind {
+ ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
_ => false,
},
- MacArgsEq::Hir(lit) => is_escaped_literal(lit, unescaped),
+ AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped),
}
}
-fn is_escaped_literal(lit: &Lit, unescaped: Symbol) -> bool {
+fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
match lit {
- Lit {
+ MetaItemLit {
token_lit:
- token::Lit {
+ Lit {
kind: token::LitKind::Str,
symbol: _,
suffix: None,
},
- kind: LitKind::Str(symbol, StrStyle::Cooked),
+ kind,
span: _,
- } => symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', ""),
+ } => is_escaped_lit_kind(kind, unescaped),
+ _ => false,
+ }
+}
+
+fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
+ match lit {
+ Lit {
+ kind: token::LitKind::Str,
+ symbol: _,
+ suffix: None,
+ } => match LitKind::from_token_lit(*lit) {
+ Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
+ match kind {
+ LitKind::Str(symbol, StrStyle::Cooked) => {
+ symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "")
+ }
_ => false,
}
}
@@ -765,9 +794,9 @@ impl SpanlessEq for AttrKind {
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
SpanlessEq::eq(&path, &normal2.item.path)
&& match &normal2.item.args {
- MacArgs::Empty | MacArgs::Delimited(..) => false,
- MacArgs::Eq(_span, token) => {
- is_escaped_literal_macro_arg(token, *unescaped)
+ AttrArgs::Empty | AttrArgs::Delimited(_) => false,
+ AttrArgs::Eq(_span, value) => {
+ is_escaped_literal_attr_args(value, *unescaped)
}
}
}
diff --git a/vendor/syn/tests/regression.rs b/vendor/syn/tests/regression.rs
index 8311a91bf..fb2b25c89 100644
--- a/vendor/syn/tests/regression.rs
+++ b/vendor/syn/tests/regression.rs
@@ -1,5 +1,3 @@
-#![allow(clippy::let_underscore_drop)]
-
mod regression {
automod::dir!("tests/regression");
}
diff --git a/vendor/syn/tests/regression/issue1235.rs b/vendor/syn/tests/regression/issue1235.rs
new file mode 100644
index 000000000..883603066
--- /dev/null
+++ b/vendor/syn/tests/regression/issue1235.rs
@@ -0,0 +1,32 @@
+use proc_macro2::{Delimiter, Group};
+use quote::quote;
+
+#[test]
+fn main() {
+ // Okay. Rustc allows top-level `static` with no value syntactically, but
+ // not semantically. Syn parses as Item::Verbatim.
+ let tokens = quote! {
+ pub static FOO: usize;
+ pub static BAR: usize;
+ };
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+
+ // Okay.
+ let inner = Group::new(
+ Delimiter::None,
+ quote!(static FOO: usize = 0; pub static BAR: usize = 0),
+ );
+ let tokens = quote!(pub #inner;);
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+
+ // Formerly parser crash.
+ let inner = Group::new(
+ Delimiter::None,
+ quote!(static FOO: usize; pub static BAR: usize),
+ );
+ let tokens = quote!(pub #inner;);
+ let file = syn::parse2::<syn::File>(tokens).unwrap();
+ println!("{:#?}", file);
+}
diff --git a/vendor/syn/tests/repo/mod.rs b/vendor/syn/tests/repo/mod.rs
index 4c7be853b..8418b8719 100644
--- a/vendor/syn/tests/repo/mod.rs
+++ b/vendor/syn/tests/repo/mod.rs
@@ -188,7 +188,7 @@ fn download_and_unpack() -> Result<()> {
"https://github.com/rust-lang/rust/archive/{}.tar.gz",
REVISION
);
- let response = reqwest::blocking::get(&url)?.error_for_status()?;
+ let response = reqwest::blocking::get(url)?.error_for_status()?;
let progress = Progress::new(response);
let decoder = GzDecoder::new(progress);
let mut archive = Archive::new(decoder);
diff --git a/vendor/syn/tests/test_expr.rs b/vendor/syn/tests/test_expr.rs
index 8fd911e2a..e5b151fd8 100644
--- a/vendor/syn/tests/test_expr.rs
+++ b/vendor/syn/tests/test_expr.rs
@@ -53,36 +53,37 @@ fn test_await() {
#[rustfmt::skip]
#[test]
fn test_tuple_multi_index() {
+ let expected = snapshot!("tuple.0.0" as Expr, @r###"
+ Expr::Field {
+ base: Expr::Field {
+ base: Expr::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "tuple",
+ arguments: None,
+ },
+ ],
+ },
+ },
+ member: Unnamed(Index {
+ index: 0,
+ }),
+ },
+ member: Unnamed(Index {
+ index: 0,
+ }),
+ }
+ "###);
+
for &input in &[
- "tuple.0.0",
"tuple .0.0",
"tuple. 0.0",
"tuple.0 .0",
"tuple.0. 0",
"tuple . 0 . 0",
] {
- snapshot!(input as Expr, @r###"
- Expr::Field {
- base: Expr::Field {
- base: Expr::Path {
- path: Path {
- segments: [
- PathSegment {
- ident: "tuple",
- arguments: None,
- },
- ],
- },
- },
- member: Unnamed(Index {
- index: 0,
- }),
- },
- member: Unnamed(Index {
- index: 0,
- }),
- }
- "###);
+ assert_eq!(expected, syn::parse_str(input).unwrap());
}
for tokens in vec![
@@ -93,28 +94,7 @@ fn test_tuple_multi_index() {
quote!(tuple.0. 0),
quote!(tuple . 0 . 0),
] {
- snapshot!(tokens as Expr, @r###"
- Expr::Field {
- base: Expr::Field {
- base: Expr::Path {
- path: Path {
- segments: [
- PathSegment {
- ident: "tuple",
- arguments: None,
- },
- ],
- },
- },
- member: Unnamed(Index {
- index: 0,
- }),
- },
- member: Unnamed(Index {
- index: 0,
- }),
- }
- "###);
+ assert_eq!(expected, syn::parse2(tokens).unwrap());
}
}
diff --git a/vendor/syn/tests/test_iterators.rs b/vendor/syn/tests/test_iterators.rs
index 2c8359c15..0ab0fb914 100644
--- a/vendor/syn/tests/test_iterators.rs
+++ b/vendor/syn/tests/test_iterators.rs
@@ -47,3 +47,22 @@ fn iter() {
assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
assert_eq!(p.into_iter().next_back(), Some(4));
}
+
+#[test]
+fn may_dangle() {
+ let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+ for element in &p {
+ if *element == 2 {
+ drop(p);
+ break;
+ }
+ }
+
+ let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
+ for element in &mut p {
+ if *element == 2 {
+ drop(p);
+ break;
+ }
+ }
+}
diff --git a/vendor/syn/tests/test_round_trip.rs b/vendor/syn/tests/test_round_trip.rs
index c7e6e488f..9a5801d44 100644
--- a/vendor/syn/tests/test_round_trip.rs
+++ b/vendor/syn/tests/test_round_trip.rs
@@ -21,8 +21,8 @@ use rustc_ast::ast::{
WhereClause,
};
use rustc_ast::mut_visit::{self, MutVisitor};
-use rustc_error_messages::{DiagnosticMessage, FluentArgs, LazyFallbackBundle};
-use rustc_errors::{Diagnostic, PResult};
+use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle};
+use rustc_errors::{translation, Diagnostic, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
@@ -168,10 +168,10 @@ fn translate_message(diagnostic: &Diagnostic) -> String {
}
let message = &diagnostic.message[0].0;
- let args = diagnostic.args().iter().cloned().collect::<FluentArgs>();
+ let args = translation::to_fluent_args(diagnostic.args());
let (identifier, attr) = match message {
- DiagnosticMessage::Str(msg) => return msg.clone(),
+ DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(),
DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
};
diff --git a/vendor/syn/tests/test_size.rs b/vendor/syn/tests/test_size.rs
index 02b0700f0..32c6edaed 100644
--- a/vendor/syn/tests/test_size.rs
+++ b/vendor/syn/tests/test_size.rs
@@ -5,7 +5,7 @@ use syn::{Expr, Item, Lit, Pat, Type};
#[test]
fn test_expr_size() {
- assert_eq!(mem::size_of::<Expr>(), 264);
+ assert_eq!(mem::size_of::<Expr>(), 272);
}
#[test]
@@ -15,7 +15,7 @@ fn test_item_size() {
#[test]
fn test_type_size() {
- assert_eq!(mem::size_of::<Type>(), 280);
+ assert_eq!(mem::size_of::<Type>(), 288);
}
#[test]
diff --git a/vendor/thin-vec/.cargo-checksum.json b/vendor/thin-vec/.cargo-checksum.json
index 33ca74234..9e4702880 100644
--- a/vendor/thin-vec/.cargo-checksum.json
+++ b/vendor/thin-vec/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"49f429a38e2c6216dfddc1f23af64a3b06cf92217d75cae2a9ac389bc3f76e46","README.md":"9f102f13ccbabe9cdec7a206aa298d65e33dea84da9f08dd17b358ff44fe0286","src/lib.rs":"6a5451d75037e3cb12bde5198266a0db00432e1370cad74b0dda8df8fb64f067"},"package":"ceb05e71730d396f960f8f3901cdb41be2d339b303e9d7d3a07c5ff0536e671b"} \ No newline at end of file
+{"files":{"Cargo.toml":"391230d6db1276baa00856a9ded6ccc426a447d04a23661d7b4461137f398745","README.md":"9f102f13ccbabe9cdec7a206aa298d65e33dea84da9f08dd17b358ff44fe0286","src/lib.rs":"d3367f69119c46ac4ca8bb0a4c86c77606119200aebd56b7a30096c08a22ba40"},"package":"aac81b6fd6beb5884b0cf3321b8117e6e5d47ecb6fc89f414cfdcca8b2fe2dd8"} \ No newline at end of file
diff --git a/vendor/thin-vec/Cargo.toml b/vendor/thin-vec/Cargo.toml
index 8361d94d4..01b84e4ca 100644
--- a/vendor/thin-vec/Cargo.toml
+++ b/vendor/thin-vec/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "thin-vec"
-version = "0.2.9"
+version = "0.2.12"
authors = ["Aria Beingessner <a.beingessner@gmail.com>"]
description = "A vec that takes up less space on the stack"
homepage = "https://github.com/gankra/thin-vec"
diff --git a/vendor/thin-vec/src/lib.rs b/vendor/thin-vec/src/lib.rs
index a2384c62e..ea24aed2e 100644
--- a/vendor/thin-vec/src/lib.rs
+++ b/vendor/thin-vec/src/lib.rs
@@ -1,30 +1,32 @@
-//! ThinVec is exactly the same as Vec, except that it stores its `len` and `capacity` in the buffer
+#![deny(missing_docs)]
+
+//! `ThinVec` is exactly the same as `Vec`, except that it stores its `len` and `capacity` in the buffer
//! it allocates.
//!
//! This makes the memory footprint of ThinVecs lower; notably in cases where space is reserved for
-//! a non-existence ThinVec<T>. So `Vec<ThinVec<T>>` and `Option<ThinVec<T>>::None` will waste less
+//! a non-existence `ThinVec<T>`. So `Vec<ThinVec<T>>` and `Option<ThinVec<T>>::None` will waste less
//! space. Being pointer-sized also means it can be passed/stored in registers.
//!
-//! Of course, any actually constructed ThinVec will theoretically have a bigger allocation, but
+//! Of course, any actually constructed `ThinVec` will theoretically have a bigger allocation, but
//! the fuzzy nature of allocators means that might not actually be the case.
//!
-//! Properties of Vec that are preserved:
+//! Properties of `Vec` that are preserved:
//! * `ThinVec::new()` doesn't allocate (it points to a statically allocated singleton)
//! * reallocation can be done in place
//! * `size_of::<ThinVec<T>>()` == `size_of::<Option<ThinVec<T>>>()`
//!
-//! Properties of Vec that aren't preserved:
+//! Properties of `Vec` that aren't preserved:
//! * `ThinVec<T>` can't ever be zero-cost roundtripped to a `Box<[T]>`, `String`, or `*mut T`
//! * `from_raw_parts` doesn't exist
-//! * ThinVec currently doesn't bother to not-allocate for Zero Sized Types (e.g. `ThinVec<()>`),
+//! * `ThinVec` currently doesn't bother to not-allocate for Zero Sized Types (e.g. `ThinVec<()>`),
//! but it could be done if someone cared enough to implement it.
//!
//!
//!
//! # Gecko FFI
//!
-//! If you enable the gecko-ffi feature, ThinVec will verbatim bridge with the nsTArray type in
-//! Gecko (Firefox). That is, ThinVec and nsTArray have identical layouts *but not ABIs*,
+//! If you enable the gecko-ffi feature, `ThinVec` will verbatim bridge with the nsTArray type in
+//! Gecko (Firefox). That is, `ThinVec` and nsTArray have identical layouts *but not ABIs*,
//! so nsTArrays/ThinVecs an be natively manipulated by C++ and Rust, and ownership can be
//! transferred across the FFI boundary (**IF YOU ARE CAREFUL, SEE BELOW!!**).
//!
@@ -105,17 +107,17 @@
//! While relocations are generally predictable if you're very careful, **you should avoid using
//! types with significant locations with Rust FFI**.
//!
-//! Specifically, ThinVec will trivially relocate its contents whenever it needs to reallocate its
+//! Specifically, `ThinVec` will trivially relocate its contents whenever it needs to reallocate its
//! buffer to change its capacity. This is the default reallocation strategy for nsTArray, and is
//! suitable for the vast majority of types. Just be aware of this limitation!
//!
//! ## Auto Arrays Are Dangerous
//!
-//! ThinVec has *some* support for handling auto arrays which store their buffer on the stack,
+//! `ThinVec` has *some* support for handling auto arrays which store their buffer on the stack,
//! but this isn't well tested.
//!
//! Regardless of how much support we provide, Rust won't be aware of the buffer's limited lifetime,
-//! so standard auto array safety caveats apply about returning/storing them! ThinVec won't ever
+//! so standard auto array safety caveats apply about returning/storing them! `ThinVec` won't ever
//! produce an auto array on its own, so this is only an issue for transferring an nsTArray into
//! Rust.
//!
@@ -133,7 +135,7 @@
//! defined. Specifically, we must share the symbol for nsTArray's empty singleton. You will get
//! linking errors if that isn't defined.
//!
-//! The gecko-ffi feature also limits ThinVec to the legacy behaviors of nsTArray. Most notably,
+//! The gecko-ffi feature also limits `ThinVec` to the legacy behaviors of nsTArray. Most notably,
//! nsTArray has a maximum capacity of i32::MAX (~2.1 billion items). Probably not an issue.
//! Probably.
//!
@@ -144,6 +146,8 @@
use std::alloc::*;
use std::borrow::*;
use std::cmp::*;
+use std::convert::TryFrom;
+use std::convert::TryInto;
use std::hash::*;
use std::iter::FromIterator;
use std::marker::PhantomData;
@@ -172,7 +176,7 @@ mod impl_details {
mod impl_details {
// Support for briding a gecko nsTArray verbatim into a ThinVec.
//
- // ThinVec can't see copy/move/delete implementations
+ // `ThinVec` can't see copy/move/delete implementations
// from C++
//
// The actual layout of an nsTArray is:
@@ -187,7 +191,7 @@ mod impl_details {
//
// Rust doesn't natively support bit-fields, so we manually mask
// and shift the bit. When the "auto" bit is set, the header and buffer
- // are actually on the stack, meaning the ThinVec pointer-to-header
+ // are actually on the stack, meaning the `ThinVec` pointer-to-header
// is essentially an "owned borrow", and therefore dangerous to handle.
// There are no safety guards for this situation.
//
@@ -195,7 +199,7 @@ mod impl_details {
// our capacity u32. On big-endian platforms, it will be the low bit.
// Hence we need some platform-specific CFGs for the necessary masking/shifting.
//
- // ThinVec won't ever construct an auto array. They only happen when
+ // `ThinVec` won't ever construct an auto array. They only happen when
// bridging from C++. This means we don't need to ever set/preserve the bit.
// We just need to be able to read and handle it if it happens to be there.
//
@@ -270,10 +274,13 @@ struct Header {
}
impl Header {
+ #[inline]
+ #[allow(clippy::unnecessary_cast)]
fn len(&self) -> usize {
self._len as usize
}
+ #[inline]
fn set_len(&mut self, len: usize) {
self._len = assert_size(len);
}
@@ -303,6 +310,7 @@ impl Header {
#[cfg(not(feature = "gecko-ffi"))]
impl Header {
+ #[allow(clippy::unnecessary_cast)]
fn cap(&self) -> usize {
self._cap as usize
}
@@ -326,24 +334,40 @@ extern "C" {
static EMPTY_HEADER: Header;
}
-// TODO: overflow checks everywhere
-
// Utils for computing layouts of allocations
+/// Gets the size necessary to allocate a `ThinVec<T>` with the give capacity.
+///
+/// # Panics
+///
+/// This will panic if isize::MAX is overflowed at any point.
fn alloc_size<T>(cap: usize) -> usize {
// Compute "real" header size with pointer math
- let header_size = mem::size_of::<Header>();
- let elem_size = mem::size_of::<T>();
- let padding = padding::<T>();
-
- // TODO: care about isize::MAX overflow?
- let data_size = elem_size.checked_mul(cap).expect("capacity overflow");
+ //
+ // We turn everything into isizes here so that we can catch isize::MAX overflow,
+ // we never want to allow allocations larger than that!
+ let header_size = mem::size_of::<Header>() as isize;
+ let padding = padding::<T>() as isize;
+
+ let data_size = if mem::size_of::<T>() == 0 {
+ // If we're allocating an array for ZSTs we need a header/padding but no actual
+ // space for items, so we don't care about the capacity that was requested!
+ 0
+ } else {
+ let cap: isize = cap.try_into().expect("capacity overflow");
+ let elem_size = mem::size_of::<T>() as isize;
+ elem_size.checked_mul(cap).expect("capacity overflow")
+ };
- data_size
+ let final_size = data_size
.checked_add(header_size + padding)
- .expect("capacity overflow")
+ .expect("capacity overflow");
+
+ // Ok now we can turn it back into a usize (don't need to worry about negatives)
+ final_size as usize
}
+/// Gets the padding necessary for the array of a `ThinVec<T>`
fn padding<T>() -> usize {
let alloc_align = alloc_align::<T>();
let header_size = mem::size_of::<Header>();
@@ -361,14 +385,25 @@ fn padding<T>() -> usize {
}
}
+/// Gets the align necessary to allocate a `ThinVec<T>`
fn alloc_align<T>() -> usize {
max(mem::align_of::<T>(), mem::align_of::<Header>())
}
+/// Gets the layout necessary to allocate a `ThinVec<T>`
+///
+/// # Panics
+///
+/// Panics if the required size overflows `isize::MAX`.
fn layout<T>(cap: usize) -> Layout {
unsafe { Layout::from_size_align_unchecked(alloc_size::<T>(cap), alloc_align::<T>()) }
}
+/// Allocates a header (and array) for a `ThinVec<T>` with the given capacity.
+///
+/// # Panics
+///
+/// Panics if the required size overflows `isize::MAX`.
fn header_with_capacity<T>(cap: usize) -> NonNull<Header> {
debug_assert!(cap > 0);
unsafe {
@@ -439,10 +474,67 @@ macro_rules! thin_vec {
}
impl<T> ThinVec<T> {
+ /// Creates a new empty ThinVec.
+ ///
+ /// This will not allocate.
pub fn new() -> ThinVec<T> {
ThinVec::with_capacity(0)
}
+ /// Constructs a new, empty `ThinVec<T>` with at least the specified capacity.
+ ///
+ /// The vector will be able to hold at least `capacity` elements without
+ /// reallocating. This method is allowed to allocate for more elements than
+ /// `capacity`. If `capacity` is 0, the vector will not allocate.
+ ///
+ /// It is important to note that although the returned vector has the
+ /// minimum *capacity* specified, the vector will have a zero *length*.
+ ///
+ /// If it is important to know the exact allocated capacity of a `ThinVec`,
+ /// always use the [`capacity`] method after construction.
+ ///
+ /// **NOTE**: unlike `Vec`, `ThinVec` **MUST** allocate once to keep track of non-zero
+ /// lengths. As such, we cannot provide the same guarantees about ThinVecs
+ /// of ZSTs not allocating. However the allocation never needs to be resized
+ /// to add more ZSTs, since the underlying array is still length 0.
+ ///
+ /// [Capacity and reallocation]: #capacity-and-reallocation
+ /// [`capacity`]: Vec::capacity
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::ThinVec;
+ ///
+ /// let mut vec = ThinVec::with_capacity(10);
+ ///
+ /// // The vector contains no items, even though it has capacity for more
+ /// assert_eq!(vec.len(), 0);
+ /// assert!(vec.capacity() >= 10);
+ ///
+ /// // These are all done without reallocating...
+ /// for i in 0..10 {
+ /// vec.push(i);
+ /// }
+ /// assert_eq!(vec.len(), 10);
+ /// assert!(vec.capacity() >= 10);
+ ///
+ /// // ...but this may make the vector reallocate
+ /// vec.push(11);
+ /// assert_eq!(vec.len(), 11);
+ /// assert!(vec.capacity() >= 11);
+ ///
+ /// // A vector of a zero-sized type will always over-allocate, since no
+ /// // space is needed to store the actual elements.
+ /// let vec_units = ThinVec::<()>::with_capacity(10);
+ ///
+ /// // Only true **without** the gecko-ffi feature!
+ /// // assert_eq!(vec_units.capacity(), usize::MAX);
+ /// ```
pub fn with_capacity(cap: usize) -> ThinVec<T> {
// `padding` contains ~static assertions against types that are
// incompatible with the current feature flags. We also call it to
@@ -525,16 +617,134 @@ impl<T> ThinVec<T> {
&mut *self.ptr()
}
+ /// Returns the number of elements in the vector, also referred to
+ /// as its 'length'.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let a = thin_vec![1, 2, 3];
+ /// assert_eq!(a.len(), 3);
+ /// ```
pub fn len(&self) -> usize {
self.header().len()
}
+
+ /// Returns `true` if the vector contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::ThinVec;
+ ///
+ /// let mut v = ThinVec::new();
+ /// assert!(v.is_empty());
+ ///
+ /// v.push(1);
+ /// assert!(!v.is_empty());
+ /// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
+
+ /// Returns the number of elements the vector can hold without
+ /// reallocating.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::ThinVec;
+ ///
+ /// let vec: ThinVec<i32> = ThinVec::with_capacity(10);
+ /// assert_eq!(vec.capacity(), 10);
+ /// ```
pub fn capacity(&self) -> usize {
self.header().cap()
}
+ /// Forces the length of the vector to `new_len`.
+ ///
+ /// This is a low-level operation that maintains none of the normal
+ /// invariants of the type. Normally changing the length of a vector
+ /// is done using one of the safe operations instead, such as
+ /// [`truncate`], [`resize`], [`extend`], or [`clear`].
+ ///
+ /// [`truncate`]: ThinVec::truncate
+ /// [`resize`]: ThinVec::resize
+ /// [`extend`]: ThinVec::extend
+ /// [`clear`]: ThinVec::clear
+ ///
+ /// # Safety
+ ///
+ /// - `new_len` must be less than or equal to [`capacity()`].
+ /// - The elements at `old_len..new_len` must be initialized.
+ ///
+ /// [`capacity()`]: ThinVec::capacity
+ ///
+ /// # Examples
+ ///
+ /// This method can be useful for situations in which the vector
+ /// is serving as a buffer for other code, particularly over FFI:
+ ///
+ /// ```no_run
+ /// use thin_vec::ThinVec;
+ ///
+ /// # // This is just a minimal skeleton for the doc example;
+ /// # // don't use this as a starting point for a real library.
+ /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
+ /// # const Z_OK: i32 = 0;
+ /// # extern "C" {
+ /// # fn deflateGetDictionary(
+ /// # strm: *mut std::ffi::c_void,
+ /// # dictionary: *mut u8,
+ /// # dictLength: *mut usize,
+ /// # ) -> i32;
+ /// # }
+ /// # impl StreamWrapper {
+ /// pub fn get_dictionary(&self) -> Option<ThinVec<u8>> {
+ /// // Per the FFI method's docs, "32768 bytes is always enough".
+ /// let mut dict = ThinVec::with_capacity(32_768);
+ /// let mut dict_length = 0;
+ /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
+ /// // 1. `dict_length` elements were initialized.
+ /// // 2. `dict_length` <= the capacity (32_768)
+ /// // which makes `set_len` safe to call.
+ /// unsafe {
+ /// // Make the FFI call...
+ /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
+ /// if r == Z_OK {
+ /// // ...and update the length to what was initialized.
+ /// dict.set_len(dict_length);
+ /// Some(dict)
+ /// } else {
+ /// None
+ /// }
+ /// }
+ /// }
+ /// # }
+ /// ```
+ ///
+ /// While the following example is sound, there is a memory leak since
+ /// the inner vectors were not freed prior to the `set_len` call:
+ ///
+ /// ```no_run
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![thin_vec![1, 0, 0],
+ /// thin_vec![0, 1, 0],
+ /// thin_vec![0, 0, 1]];
+ /// // SAFETY:
+ /// // 1. `old_len..0` is empty so no elements need to be initialized.
+ /// // 2. `0 <= capacity` always holds whatever `capacity` is.
+ /// unsafe {
+ /// vec.set_len(0);
+ /// }
+ /// ```
+ ///
+ /// Normally, here, one would use [`clear`] instead to correctly drop
+ /// the contents and thus not leak memory.
pub unsafe fn set_len(&mut self, len: usize) {
if self.is_singleton() {
// A prerequisite of `Vec::set_len` is that `new_len` must be
@@ -550,6 +760,21 @@ impl<T> ThinVec<T> {
self.header_mut().set_len(len)
}
+ /// Appends an element to the back of a collection.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2];
+ /// vec.push(3);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
pub fn push(&mut self, val: T) {
let old_len = self.len();
if old_len == self.capacity() {
@@ -561,6 +786,18 @@ impl<T> ThinVec<T> {
}
}
+ /// Removes the last element from a vector and returns it, or [`None`] if it
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// assert_eq!(vec.pop(), Some(3));
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
pub fn pop(&mut self) -> Option<T> {
let old_len = self.len();
if old_len == 0 {
@@ -573,6 +810,24 @@ impl<T> ThinVec<T> {
}
}
+ /// Inserts an element at position `index` within the vector, shifting all
+ /// elements after it to the right.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// vec.insert(1, 4);
+ /// assert_eq!(vec, [1, 4, 2, 3]);
+ /// vec.insert(4, 5);
+ /// assert_eq!(vec, [1, 4, 2, 3, 5]);
+ /// ```
pub fn insert(&mut self, idx: usize, elem: T) {
let old_len = self.len();
@@ -588,6 +843,29 @@ impl<T> ThinVec<T> {
}
}
+ /// Removes and returns the element at position `index` within the vector,
+ /// shifting all elements after it to the left.
+ ///
+ /// Note: Because this shifts over the remaining elements, it has a
+ /// worst-case performance of *O*(*n*). If you don't need the order of elements
+ /// to be preserved, use [`swap_remove`] instead. If you'd like to remove
+ /// elements from the beginning of the `ThinVec`, consider using `std::collections::VecDeque`.
+ ///
+ /// [`swap_remove`]: ThinVec::swap_remove
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut v = thin_vec![1, 2, 3];
+ /// assert_eq!(v.remove(1), 2);
+ /// assert_eq!(v, [1, 3]);
+ /// ```
pub fn remove(&mut self, idx: usize) -> T {
let old_len = self.len();
@@ -602,6 +880,32 @@ impl<T> ThinVec<T> {
}
}
+ /// Removes an element from the vector and returns it.
+ ///
+ /// The removed element is replaced by the last element of the vector.
+ ///
+ /// This does not preserve ordering, but is *O*(1).
+ /// If you need to preserve the element order, use [`remove`] instead.
+ ///
+ /// [`remove`]: ThinVec::remove
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut v = thin_vec!["foo", "bar", "baz", "qux"];
+ ///
+ /// assert_eq!(v.swap_remove(1), "bar");
+ /// assert_eq!(v, ["foo", "qux", "baz"]);
+ ///
+ /// assert_eq!(v.swap_remove(0), "foo");
+ /// assert_eq!(v, ["baz", "qux"]);
+ /// ```
pub fn swap_remove(&mut self, idx: usize) -> T {
let old_len = self.len();
@@ -615,6 +919,54 @@ impl<T> ThinVec<T> {
}
}
+ /// Shortens the vector, keeping the first `len` elements and dropping
+ /// the rest.
+ ///
+ /// If `len` is greater than the vector's current length, this has no
+ /// effect.
+ ///
+ /// The [`drain`] method can emulate `truncate`, but causes the excess
+ /// elements to be returned instead of dropped.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// Truncating a five element vector to two elements:
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3, 4, 5];
+ /// vec.truncate(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// No truncation occurs when `len` is greater than the vector's current
+ /// length:
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// vec.truncate(8);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
+ ///
+ /// Truncating when `len == 0` is equivalent to calling the [`clear`]
+ /// method.
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// vec.truncate(0);
+ /// assert_eq!(vec, []);
+ /// ```
+ ///
+ /// [`clear`]: ThinVec::clear
+ /// [`drain`]: ThinVec::drain
pub fn truncate(&mut self, len: usize) {
unsafe {
// drop any extra elements
@@ -628,6 +980,20 @@ impl<T> ThinVec<T> {
}
}
+ /// Clears the vector, removing all values.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut v = thin_vec![1, 2, 3];
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
pub fn clear(&mut self) {
unsafe {
ptr::drop_in_place(&mut self[..]);
@@ -635,10 +1001,34 @@ impl<T> ThinVec<T> {
}
}
+ /// Extracts a slice containing the entire vector.
+ ///
+ /// Equivalent to `&s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ /// use std::io::{self, Write};
+ /// let buffer = thin_vec![1, 2, 3, 5, 8];
+ /// io::sink().write(buffer.as_slice()).unwrap();
+ /// ```
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.data_raw(), self.len()) }
}
+ /// Extracts a mutable slice of the entire vector.
+ ///
+ /// Equivalent to `&mut s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ /// use std::io::{self, Read};
+ /// let mut buffer = vec![0; 3];
+ /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
+ /// ```
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.data_raw(), self.len()) }
}
@@ -751,6 +1141,22 @@ impl<T> ThinVec<T> {
}
}
+ /// Shrinks the capacity of the vector as much as possible.
+ ///
+ /// It will drop down as close as possible to the length but the allocator
+ /// may still inform the vector that there is space for a few more elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::ThinVec;
+ ///
+ /// let mut vec = ThinVec::with_capacity(10);
+ /// vec.extend([1, 2, 3]);
+ /// assert_eq!(vec.capacity(), 10);
+ /// vec.shrink_to_fit();
+ /// assert!(vec.capacity() >= 3);
+ /// ```
pub fn shrink_to_fit(&mut self) {
let old_cap = self.capacity();
let new_cap = self.len();
@@ -915,6 +1321,26 @@ impl<T> ThinVec<T> {
}
}
+ /// Splits the collection into two at the given index.
+ ///
+ /// Returns a newly allocated vector containing the elements in the range
+ /// `[at, len)`. After the call, the original vector will be left containing
+ /// the elements `[0, at)` with its previous capacity unchanged.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// let vec2 = vec.split_off(1);
+ /// assert_eq!(vec, [1]);
+ /// assert_eq!(vec2, [2, 3]);
+ /// ```
pub fn split_off(&mut self, at: usize) -> ThinVec<T> {
let old_len = self.len();
let new_vec_len = old_len - at;
@@ -933,14 +1359,64 @@ impl<T> ThinVec<T> {
}
}
+ /// Moves all the elements of `other` into `self`, leaving `other` empty.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1, 2, 3];
+ /// let mut vec2 = thin_vec![4, 5, 6];
+ /// vec.append(&mut vec2);
+ /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
+ /// assert_eq!(vec2, []);
+ /// ```
pub fn append(&mut self, other: &mut ThinVec<T>) {
self.extend(other.drain(..))
}
+ /// Removes the specified range from the vector in bulk, returning all
+ /// removed elements as an iterator. If the iterator is dropped before
+ /// being fully consumed, it drops the remaining removed elements.
+ ///
+ /// The returned iterator keeps a mutable borrow on the vector to optimize
+ /// its implementation.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Leaking
+ ///
+ /// If the returned iterator goes out of scope without being dropped (due to
+ /// [`mem::forget`], for example), the vector may have lost and leaked
+ /// elements arbitrarily, including elements outside the range.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// let mut v = thin_vec![1, 2, 3];
+ /// let u: ThinVec<_> = v.drain(1..).collect();
+ /// assert_eq!(v, &[1]);
+ /// assert_eq!(u, &[2, 3]);
+ ///
+ /// // A full range clears the vector, like `clear()` does
+ /// v.drain(..);
+ /// assert_eq!(v, &[]);
+ /// ```
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
where
R: RangeBounds<usize>,
{
+ // See comments in the Drain struct itself for details on this
let len = self.len();
let start = match range.start_bound() {
Bound::Included(&n) => n,
@@ -971,6 +1447,53 @@ impl<T> ThinVec<T> {
}
}
+ /// Creates a splicing iterator that replaces the specified range in the vector
+ /// with the given `replace_with` iterator and yields the removed items.
+ /// `replace_with` does not need to be the same length as `range`.
+ ///
+ /// `range` is removed even if the iterator is not consumed until the end.
+ ///
+ /// It is unspecified how many elements are removed from the vector
+ /// if the `Splice` value is leaked.
+ ///
+ /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped.
+ ///
+ /// This is optimal if:
+ ///
+ /// * The tail (elements in the vector after `range`) is empty,
+ /// * or `replace_with` yields fewer or equal elements than `range`’s length
+ /// * or the lower bound of its `size_hint()` is exact.
+ ///
+ /// Otherwise, a temporary vector is allocated and the tail is moved twice.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// let mut v = thin_vec![1, 2, 3, 4];
+ /// let new = [7, 8, 9];
+ /// let u: ThinVec<_> = v.splice(1..3, new).collect();
+ /// assert_eq!(v, &[1, 7, 8, 9, 4]);
+ /// assert_eq!(u, &[2, 3]);
+ /// ```
+ #[inline]
+ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter>
+ where
+ R: RangeBounds<usize>,
+ I: IntoIterator<Item = T>,
+ {
+ Splice {
+ drain: self.drain(range),
+ replace_with: replace_with.into_iter(),
+ }
+ }
+
/// Resize the buffer and update its capacity, without changing the length.
/// Unsafe because it can cause length to be greater than capacity.
unsafe fn reallocate(&mut self, new_cap: usize) {
@@ -1018,7 +1541,12 @@ impl<T> ThinVec<T> {
#[cfg(feature = "gecko-ffi")]
#[inline]
+ #[allow(unused_unsafe)]
fn is_singleton(&self) -> bool {
+ // NOTE: the tests will complain that this "unsafe" isn't needed, but it *IS*!
+ // In production this refers to an *extern static* which *is* unsafe to reference.
+ // In tests this refers to a local static because we don't have Firefox's codebase
+ // providing the symbol!
unsafe { self.ptr.as_ptr() as *const Header == &EMPTY_HEADER }
}
@@ -1082,6 +1610,27 @@ impl<T: Clone> ThinVec<T> {
}
}
+ /// Clones and appends all elements in a slice to the `ThinVec`.
+ ///
+ /// Iterates over the slice `other`, clones each element, and then appends
+ /// it to this `ThinVec`. The `other` slice is traversed in-order.
+ ///
+ /// Note that this function is same as [`extend`] except that it is
+ /// specialized to work with slices instead. If and when Rust gets
+ /// specialization this function will likely be deprecated (but still
+ /// available).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec![1];
+ /// vec.extend_from_slice(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ ///
+ /// [`extend`]: ThinVec::extend
pub fn extend_from_slice(&mut self, other: &[T]) {
self.extend(other.iter().cloned())
}
@@ -1415,16 +1964,256 @@ impl<T> FromIterator<T> for ThinVec<T> {
}
}
+impl<T: Clone> From<&[T]> for ThinVec<T> {
+ /// Allocate a `ThinVec<T>` and fill it by cloning `s`'s items.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// assert_eq!(ThinVec::from(&[1, 2, 3][..]), thin_vec![1, 2, 3]);
+ /// ```
+ fn from(s: &[T]) -> ThinVec<T> {
+ s.iter().cloned().collect()
+ }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T: Clone> From<&mut [T]> for ThinVec<T> {
+ /// Allocate a `ThinVec<T>` and fill it by cloning `s`'s items.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// assert_eq!(ThinVec::from(&mut [1, 2, 3][..]), thin_vec![1, 2, 3]);
+ /// ```
+ fn from(s: &mut [T]) -> ThinVec<T> {
+ s.iter().cloned().collect()
+ }
+}
+
+impl<T, const N: usize> From<[T; N]> for ThinVec<T> {
+ /// Allocate a `ThinVec<T>` and move `s`'s items into it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// assert_eq!(ThinVec::from([1, 2, 3]), thin_vec![1, 2, 3]);
+ /// ```
+ fn from(s: [T; N]) -> ThinVec<T> {
+ std::iter::IntoIterator::into_iter(s).collect()
+ }
+}
+
+impl<T> From<Box<[T]>> for ThinVec<T> {
+ /// Convert a boxed slice into a vector by transferring ownership of
+ /// the existing heap allocation.
+ ///
+ /// **NOTE:** unlike `std`, this must reallocate to change the layout!
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// let b: Box<[i32]> = thin_vec![1, 2, 3].into_iter().collect();
+ /// assert_eq!(ThinVec::from(b), thin_vec![1, 2, 3]);
+ /// ```
+ fn from(s: Box<[T]>) -> Self {
+ // Can just lean on the fact that `Box<[T]>` -> `Vec<T>` is Free.
+ Vec::from(s).into_iter().collect()
+ }
+}
+
+impl<T> From<Vec<T>> for ThinVec<T> {
+ /// Convert a `std::Vec` into a `ThinVec`.
+ ///
+ /// **NOTE:** this must reallocate to change the layout!
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// let b: Vec<i32> = vec![1, 2, 3];
+ /// assert_eq!(ThinVec::from(b), thin_vec![1, 2, 3]);
+ /// ```
+ fn from(s: Vec<T>) -> Self {
+ s.into_iter().collect()
+ }
+}
+
+impl<T> From<ThinVec<T>> for Vec<T> {
+ /// Convert a `ThinVec` into a `std::Vec`.
+ ///
+ /// **NOTE:** this must reallocate to change the layout!
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// let b: ThinVec<i32> = thin_vec![1, 2, 3];
+ /// assert_eq!(Vec::from(b), vec![1, 2, 3]);
+ /// ```
+ fn from(s: ThinVec<T>) -> Self {
+ s.into_iter().collect()
+ }
+}
+
+impl<T> From<ThinVec<T>> for Box<[T]> {
+ /// Convert a vector into a boxed slice.
+ ///
+ /// If `v` has excess capacity, its items will be moved into a
+ /// newly-allocated buffer with exactly the right capacity.
+ ///
+ /// **NOTE:** unlike `std`, this must reallocate to change the layout!
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ /// assert_eq!(Box::from(thin_vec![1, 2, 3]), thin_vec![1, 2, 3].into_iter().collect());
+ /// ```
+ fn from(v: ThinVec<T>) -> Self {
+ v.into_iter().collect()
+ }
+}
+
+impl From<&str> for ThinVec<u8> {
+ /// Allocate a `ThinVec<u8>` and fill it with a UTF-8 string.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ ///
+ /// assert_eq!(ThinVec::from("123"), thin_vec![b'1', b'2', b'3']);
+ /// ```
+ fn from(s: &str) -> ThinVec<u8> {
+ From::from(s.as_bytes())
+ }
+}
+
+impl<T, const N: usize> TryFrom<ThinVec<T>> for [T; N] {
+ type Error = ThinVec<T>;
+
+ /// Gets the entire contents of the `ThinVec<T>` as an array,
+ /// if its size exactly matches that of the requested array.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ /// use std::convert::TryInto;
+ ///
+ /// assert_eq!(thin_vec![1, 2, 3].try_into(), Ok([1, 2, 3]));
+ /// assert_eq!(<ThinVec<i32>>::new().try_into(), Ok([]));
+ /// ```
+ ///
+ /// If the length doesn't match, the input comes back in `Err`:
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ /// use std::convert::TryInto;
+ ///
+ /// let r: Result<[i32; 4], _> = (0..10).collect::<ThinVec<_>>().try_into();
+ /// assert_eq!(r, Err(thin_vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]));
+ /// ```
+ ///
+ /// If you're fine with just getting a prefix of the `ThinVec<T>`,
+ /// you can call [`.truncate(N)`](ThinVec::truncate) first.
+ /// ```
+ /// use thin_vec::{ThinVec, thin_vec};
+ /// use std::convert::TryInto;
+ ///
+ /// let mut v = ThinVec::from("hello world");
+ /// v.sort();
+ /// v.truncate(2);
+ /// let [a, b]: [_; 2] = v.try_into().unwrap();
+ /// assert_eq!(a, b' ');
+ /// assert_eq!(b, b'd');
+ /// ```
+ fn try_from(mut vec: ThinVec<T>) -> Result<[T; N], ThinVec<T>> {
+ if vec.len() != N {
+ return Err(vec);
+ }
+
+ // SAFETY: `.set_len(0)` is always sound.
+ unsafe { vec.set_len(0) };
+
+ // SAFETY: A `ThinVec`'s pointer is always aligned properly, and
+ // the alignment the array needs is the same as the items.
+ // We checked earlier that we have sufficient items.
+ // The items will not double-drop as the `set_len`
+ // tells the `ThinVec` not to also drop them.
+ let array = unsafe { ptr::read(vec.data_raw() as *const [T; N]) };
+ Ok(array)
+ }
+}
+
+/// An iterator that moves out of a vector.
+///
+/// This `struct` is created by the [`ThinVec::into_iter`][]
+/// (provided by the [`IntoIterator`] trait).
+///
+/// # Example
+///
+/// ```
+/// use thin_vec::thin_vec;
+///
+/// let v = thin_vec![0, 1, 2];
+/// let iter: thin_vec::IntoIter<_> = v.into_iter();
+/// ```
pub struct IntoIter<T> {
vec: ThinVec<T>,
start: usize,
}
-pub struct Drain<'a, T> {
- iter: IterMut<'a, T>,
- vec: *mut ThinVec<T>,
- end: usize,
- tail: usize,
+impl<T> IntoIter<T> {
+ /// Returns the remaining items of this iterator as a slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let vec = thin_vec!['a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// let _ = into_iter.next().unwrap();
+ /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
+ /// ```
+ pub fn as_slice(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.vec.data_raw().add(self.start), self.len()) }
+ }
+
+ /// Returns the remaining items of this iterator as a mutable slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let vec = thin_vec!['a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// into_iter.as_mut_slice()[2] = 'z';
+ /// assert_eq!(into_iter.next().unwrap(), 'a');
+ /// assert_eq!(into_iter.next().unwrap(), 'b');
+ /// assert_eq!(into_iter.next().unwrap(), 'z');
+ /// ```
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ unsafe { &mut *self.as_raw_mut_slice() }
+ }
+
+ fn as_raw_mut_slice(&mut self) -> *mut [T] {
+ unsafe { ptr::slice_from_raw_parts_mut(self.vec.data_raw().add(self.start), self.len()) }
+ }
}
impl<T> Iterator for IntoIter<T> {
@@ -1452,12 +2241,19 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
if self.start == self.vec.len() {
None
} else {
- // FIXME?: extra bounds check
self.vec.pop()
}
}
}
+impl<T> ExactSizeIterator for IntoIter<T> {}
+
+impl<T> std::iter::FusedIterator for IntoIter<T> {}
+
+// SAFETY: the length calculation is trivial, we're an array! And if it's wrong we're So Screwed.
+#[cfg(feature = "unstable")]
+unsafe impl<T> std::iter::TrustedLen for IntoIter<T> {}
+
impl<T> Drop for IntoIter<T> {
#[inline]
fn drop(&mut self) {
@@ -1477,6 +2273,126 @@ impl<T> Drop for IntoIter<T> {
}
}
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
+ }
+}
+
+impl<T> AsRef<[T]> for IntoIter<T> {
+ fn as_ref(&self) -> &[T] {
+ self.as_slice()
+ }
+}
+
+impl<T: Clone> Clone for IntoIter<T> {
+ #[allow(clippy::into_iter_on_ref)]
+ fn clone(&self) -> Self {
+ // Just create a new `ThinVec` from the remaining elements and IntoIter it
+ self.as_slice()
+ .into_iter()
+ .cloned()
+ .collect::<ThinVec<_>>()
+ .into_iter()
+ }
+}
+
+/// A draining iterator for `ThinVec<T>`.
+///
+/// This `struct` is created by [`ThinVec::drain`].
+/// See its documentation for more.
+///
+/// # Example
+///
+/// ```
+/// use thin_vec::thin_vec;
+///
+/// let mut v = thin_vec![0, 1, 2];
+/// let iter: thin_vec::Drain<_> = v.drain(..);
+/// ```
+pub struct Drain<'a, T> {
+ // Ok so ThinVec::drain takes a range of the ThinVec and yields the contents by-value,
+ // then backshifts the array. During iteration the array is in an unsound state
+ // (big deinitialized hole in it), and this is very dangerous.
+ //
+ // Our first line of defense is the borrow checker: we have a mutable borrow, so nothing
+ // can access the ThinVec while we exist. As long as we make sure the ThinVec is in a valid
+ // state again before we release the borrow, everything should be A-OK! We do this cleanup
+ // in our Drop impl.
+ //
+ // Unfortunately, that's unsound, because mem::forget exists and The Leakpocalypse Is Real.
+ // So we can't actually guarantee our destructor runs before our borrow expires. Thankfully
+ // this isn't fatal: we can just set the ThinVec's len to 0 at the start, so if anyone
+ // leaks the Drain, we just leak everything the ThinVec contained out of spite! If they
+ // *don't* leak us then we can properly repair the len in our Drop impl. This is known
+ // as "leak amplification", and is the same approach std uses.
+ //
+ // But we can do slightly better than setting the len to 0! The drain breaks us up into
+ // these parts:
+ //
+ // ```text
+ //
+ // [A, B, C, D, E, F, G, H, _, _]
+ // ____ __________ ____ ____
+ // | | | |
+ // prefix drain tail spare-cap
+ // ```
+ //
+ // As the drain iterator is consumed from both ends (DoubleEnded!), we'll start to look
+ // like this:
+ //
+ // ```text
+ // [A, B, _, _, E, _, G, H, _, _]
+ // ____ __________ ____ ____
+ // | | | |
+ // prefix drain tail spare-cap
+ // ```
+ //
+ // Note that the prefix is always valid and untouched, as such we can set the len
+ // to the prefix when doing leak-amplification. As a bonus, we can use this value
+ // to remember where the drain range starts. At the end we'll look like this
+ // (we exhaust ourselves in our Drop impl):
+ //
+ // ```text
+ // [A, B, _, _, _, _, G, H, _, _]
+ // _____ __________ _____ ____
+ // | | | |
+ // len drain tail spare-cap
+ // ```
+ //
+ // And need to become this:
+ //
+ // ```text
+ // [A, B, G, H, _, _, _, _, _, _]
+ // ___________ ________________
+ // | |
+ // len spare-cap
+ // ```
+ //
+ // All this requires is moving the tail back to the prefix (stored in `len`)
+ // and setting `len` to `len + tail_len` to undo the leak amplification.
+ /// An iterator over the elements we're removing.
+ ///
+ /// As we go we'll be `read`ing out of the mutable refs yielded by this.
+ /// It's ok to use IterMut here because it promises to only take mutable
+ /// refs to the parts we haven't yielded yet.
+ ///
+ /// A downside of this (and the *mut below) is that it makes this type invariant, when
+ /// technically it could be covariant?
+ iter: IterMut<'a, T>,
+ /// The actual ThinVec, which we need to hold onto to undo the leak amplification
+ /// and backshift the tail into place. This should only be accessed when we're
+ /// completely done with the IterMut in the `drop` impl of this type (or miri will get mad).
+ ///
+ /// Since we set the `len` of this to be before `IterMut`, we can use that `len`
+ /// to retrieve the index of the start of the drain range later.
+ vec: *mut ThinVec<T>,
+ /// The one-past-the-end index of the drain range, or equivalently the start of the tail.
+ end: usize,
+ /// The length of the tail.
+ tail: usize,
+}
+
impl<'a, T> Iterator for Drain<'a, T> {
type Item = T;
fn next(&mut self) -> Option<T> {
@@ -1496,6 +2412,12 @@ impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
+// SAFETY: we need to keep track of this perfectly Or Else anyway!
+#[cfg(feature = "unstable")]
+unsafe impl<T> std::iter::TrustedLen for Drain<'_, T> {}
+
+impl<T> std::iter::FusedIterator for Drain<'_, T> {}
+
impl<'a, T> Drop for Drain<'a, T> {
fn drop(&mut self) {
// Consume the rest of the iterator.
@@ -1517,6 +2439,167 @@ impl<'a, T> Drop for Drain<'a, T> {
}
}
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
+ }
+}
+
+impl<'a, T> Drain<'a, T> {
+ /// Returns the remaining items of this iterator as a slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use thin_vec::thin_vec;
+ ///
+ /// let mut vec = thin_vec!['a', 'b', 'c'];
+ /// let mut drain = vec.drain(..);
+ /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
+ /// let _ = drain.next().unwrap();
+ /// assert_eq!(drain.as_slice(), &['b', 'c']);
+ /// ```
+ #[must_use]
+ pub fn as_slice(&self) -> &[T] {
+ // SAFETY: this is A-OK because the elements that the underlying
+ // iterator still points at are still logically initialized and contiguous.
+ self.iter.as_slice()
+ }
+}
+
+impl<'a, T> AsRef<[T]> for Drain<'a, T> {
+ fn as_ref(&self) -> &[T] {
+ self.as_slice()
+ }
+}
+
+/// A splicing iterator for `ThinVec`.
+///
+/// This struct is created by [`ThinVec::splice`][].
+/// See its documentation for more.
+///
+/// # Example
+///
+/// ```
+/// use thin_vec::thin_vec;
+///
+/// let mut v = thin_vec![0, 1, 2];
+/// let new = [7, 8];
+/// let iter: thin_vec::Splice<_> = v.splice(1.., new);
+/// ```
+#[derive(Debug)]
+pub struct Splice<'a, I: Iterator + 'a> {
+ drain: Drain<'a, I::Item>,
+ replace_with: I,
+}
+
+impl<I: Iterator> Iterator for Splice<'_, I> {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.drain.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.drain.size_hint()
+ }
+}
+
+impl<I: Iterator> DoubleEndedIterator for Splice<'_, I> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.drain.next_back()
+ }
+}
+
+impl<I: Iterator> ExactSizeIterator for Splice<'_, I> {}
+
+impl<I: Iterator> Drop for Splice<'_, I> {
+ fn drop(&mut self) {
+ // Ensure we've fully drained out the range
+ self.drain.by_ref().for_each(drop);
+
+ unsafe {
+ // If there's no tail elements, then the inner ThinVec is already
+ // correct and we can just extend it like normal.
+ if self.drain.tail == 0 {
+ (*self.drain.vec).extend(self.replace_with.by_ref());
+ return;
+ }
+
+ // First fill the range left by drain().
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+
+ // There may be more elements. Use the lower bound as an estimate.
+ let (lower_bound, _upper_bound) = self.replace_with.size_hint();
+ if lower_bound > 0 {
+ self.drain.move_tail(lower_bound);
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+ }
+
+ // Collect any remaining elements.
+ // This is a zero-length vector which does not allocate if `lower_bound` was exact.
+ let mut collected = self
+ .replace_with
+ .by_ref()
+ .collect::<Vec<I::Item>>()
+ .into_iter();
+ // Now we have an exact count.
+ if collected.len() > 0 {
+ self.drain.move_tail(collected.len());
+ let filled = self.drain.fill(&mut collected);
+ debug_assert!(filled);
+ debug_assert_eq!(collected.len(), 0);
+ }
+ }
+ // Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
+ }
+}
+
+/// Private helper methods for `Splice::drop`
+impl<T> Drain<'_, T> {
+ /// The range from `self.vec.len` to `self.tail_start` contains elements
+ /// that have been moved out.
+ /// Fill that range as much as possible with new elements from the `replace_with` iterator.
+ /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
+ unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
+ let vec = unsafe { &mut *self.vec };
+ let range_start = vec.len();
+ let range_end = self.end;
+ let range_slice = unsafe {
+ slice::from_raw_parts_mut(vec.data_raw().add(range_start), range_end - range_start)
+ };
+
+ for place in range_slice {
+ if let Some(new_item) = replace_with.next() {
+ unsafe { ptr::write(place, new_item) };
+ vec.set_len(vec.len() + 1);
+ } else {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Makes room for inserting more elements before the tail.
+ unsafe fn move_tail(&mut self, additional: usize) {
+ let vec = unsafe { &mut *self.vec };
+ let len = self.end + self.tail;
+ vec.reserve(len.checked_add(additional).expect("capacity overflow"));
+
+ let new_tail_start = self.end + additional;
+ unsafe {
+ let src = vec.data_raw().add(self.end);
+ let dst = vec.data_raw().add(new_tail_start);
+ ptr::copy(src, dst, self.tail);
+ }
+ self.end = new_tail_start;
+ }
+}
+
/// Write is implemented for `ThinVec<u8>` by appending to the vector.
/// The vector will grow as needed.
/// This implementation is identical to the one for `Vec<u8>`.
@@ -1755,6 +2838,19 @@ mod tests {
{
let mut v = ThinVec::<i32>::new();
+ assert_eq!(v.splice(.., []).len(), 0);
+
+ for _ in v.splice(.., []) {
+ unreachable!()
+ }
+
+ assert_eq!(v.len(), 0);
+ assert_eq!(v.capacity(), 0);
+ assert_eq!(&v[..], &[]);
+ }
+
+ {
+ let mut v = ThinVec::<i32>::new();
v.truncate(1);
assert_eq!(v.len(), 0);
assert_eq!(v.capacity(), 0);
@@ -2507,70 +3603,76 @@ mod std_tests {
v.drain(5..=5);
}
- /* TODO: implement splice?
- #[test]
- fn test_splice() {
- let mut v = thin_vec![1, 2, 3, 4, 5];
- let a = [10, 11, 12];
- v.splice(2..4, a.iter().cloned());
- assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
- v.splice(1..3, Some(20));
- assert_eq!(v, &[1, 20, 11, 12, 5]);
- }
+ #[test]
+ fn test_splice() {
+ let mut v = thin_vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(2..4, a.iter().cloned());
+ assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
+ v.splice(1..3, Some(20));
+ assert_eq!(v, &[1, 20, 11, 12, 5]);
+ }
- #[test]
- fn test_splice_inclusive_range() {
- let mut v = thin_vec![1, 2, 3, 4, 5];
- let a = [10, 11, 12];
- let t1: ThinVec<_> = v.splice(2..=3, a.iter().cloned()).collect();
- assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
- assert_eq!(t1, &[3, 4]);
- let t2: ThinVec<_> = v.splice(1..=2, Some(20)).collect();
- assert_eq!(v, &[1, 20, 11, 12, 5]);
- assert_eq!(t2, &[2, 10]);
- }
+ #[test]
+ fn test_splice_inclusive_range() {
+ let mut v = thin_vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ let t1: ThinVec<_> = v.splice(2..=3, a.iter().cloned()).collect();
+ assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
+ assert_eq!(t1, &[3, 4]);
+ let t2: ThinVec<_> = v.splice(1..=2, Some(20)).collect();
+ assert_eq!(v, &[1, 20, 11, 12, 5]);
+ assert_eq!(t2, &[2, 10]);
+ }
- #[test]
- #[should_panic]
- fn test_splice_out_of_bounds() {
- let mut v = thin_vec![1, 2, 3, 4, 5];
- let a = [10, 11, 12];
- v.splice(5..6, a.iter().cloned());
- }
+ #[test]
+ #[should_panic]
+ fn test_splice_out_of_bounds() {
+ let mut v = thin_vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(5..6, a.iter().cloned());
+ }
- #[test]
- #[should_panic]
- fn test_splice_inclusive_out_of_bounds() {
- let mut v = thin_vec![1, 2, 3, 4, 5];
- let a = [10, 11, 12];
- v.splice(5..=5, a.iter().cloned());
- }
+ #[test]
+ #[should_panic]
+ fn test_splice_inclusive_out_of_bounds() {
+ let mut v = thin_vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(5..=5, a.iter().cloned());
+ }
- #[test]
- fn test_splice_items_zero_sized() {
- let mut vec = thin_vec![(), (), ()];
- let vec2 = thin_vec![];
- let t: ThinVec<_> = vec.splice(1..2, vec2.iter().cloned()).collect();
- assert_eq!(vec, &[(), ()]);
- assert_eq!(t, &[()]);
- }
+ #[test]
+ fn test_splice_items_zero_sized() {
+ let mut vec = thin_vec![(), (), ()];
+ let vec2 = thin_vec![];
+ let t: ThinVec<_> = vec.splice(1..2, vec2.iter().cloned()).collect();
+ assert_eq!(vec, &[(), ()]);
+ assert_eq!(t, &[()]);
+ }
- #[test]
- fn test_splice_unbounded() {
- let mut vec = thin_vec![1, 2, 3, 4, 5];
- let t: ThinVec<_> = vec.splice(.., None).collect();
- assert_eq!(vec, &[]);
- assert_eq!(t, &[1, 2, 3, 4, 5]);
- }
+ #[test]
+ fn test_splice_unbounded() {
+ let mut vec = thin_vec![1, 2, 3, 4, 5];
+ let t: ThinVec<_> = vec.splice(.., None).collect();
+ assert_eq!(vec, &[]);
+ assert_eq!(t, &[1, 2, 3, 4, 5]);
+ }
- #[test]
- fn test_splice_forget() {
- let mut v = thin_vec![1, 2, 3, 4, 5];
- let a = [10, 11, 12];
- ::std::mem::forget(v.splice(2..4, a.iter().cloned()));
- assert_eq!(v, &[1, 2]);
- }
- */
+ #[test]
+ fn test_splice_forget() {
+ let mut v = thin_vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ ::std::mem::forget(v.splice(2..4, a.iter().cloned()));
+ assert_eq!(v, &[1, 2]);
+ }
+
+ #[test]
+ fn test_splice_from_empty() {
+ let mut v = thin_vec![];
+ let a = [10, 11, 12];
+ v.splice(.., a.iter().cloned());
+ assert_eq!(v, &[10, 11, 12]);
+ }
/* probs won't ever impl this
#[test]
@@ -2598,81 +3700,59 @@ mod std_tests {
assert_eq!(vec2, [5, 6]);
}
- /* TODO: implement into_iter methods?
- #[test]
- fn test_into_iter_as_slice() {
- let vec = thin_vec!['a', 'b', 'c'];
- let mut into_iter = vec.into_iter();
- assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
- let _ = into_iter.next().unwrap();
- assert_eq!(into_iter.as_slice(), &['b', 'c']);
- let _ = into_iter.next().unwrap();
- let _ = into_iter.next().unwrap();
- assert_eq!(into_iter.as_slice(), &[]);
- }
-
- #[test]
- fn test_into_iter_as_mut_slice() {
- let vec = thin_vec!['a', 'b', 'c'];
- let mut into_iter = vec.into_iter();
- assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
- into_iter.as_mut_slice()[0] = 'x';
- into_iter.as_mut_slice()[1] = 'y';
- assert_eq!(into_iter.next().unwrap(), 'x');
- assert_eq!(into_iter.as_slice(), &['y', 'c']);
- }
-
- #[test]
- fn test_into_iter_debug() {
- let vec = thin_vec!['a', 'b', 'c'];
- let into_iter = vec.into_iter();
- let debug = format!("{:?}", into_iter);
- assert_eq!(debug, "IntoIter(['a', 'b', 'c'])");
- }
+ #[test]
+ fn test_into_iter_as_slice() {
+ let vec = thin_vec!['a', 'b', 'c'];
+ let mut into_iter = vec.into_iter();
+ assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ let _ = into_iter.next().unwrap();
+ assert_eq!(into_iter.as_slice(), &['b', 'c']);
+ let _ = into_iter.next().unwrap();
+ let _ = into_iter.next().unwrap();
+ assert_eq!(into_iter.as_slice(), &[]);
+ }
- #[test]
- fn test_into_iter_count() {
- assert_eq!(thin_vec![1, 2, 3].into_iter().count(), 3);
- }
+ #[test]
+ fn test_into_iter_as_mut_slice() {
+ let vec = thin_vec!['a', 'b', 'c'];
+ let mut into_iter = vec.into_iter();
+ assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ into_iter.as_mut_slice()[0] = 'x';
+ into_iter.as_mut_slice()[1] = 'y';
+ assert_eq!(into_iter.next().unwrap(), 'x');
+ assert_eq!(into_iter.as_slice(), &['y', 'c']);
+ }
- #[test]
- fn test_into_iter_clone() {
- fn iter_equal<I: Iterator<Item = i32>>(it: I, slice: &[i32]) {
- let v: ThinVec<i32> = it.collect();
- assert_eq!(&v[..], slice);
- }
- let mut it = thin_vec![1, 2, 3].into_iter();
- iter_equal(it.clone(), &[1, 2, 3]);
- assert_eq!(it.next(), Some(1));
- let mut it = it.rev();
- iter_equal(it.clone(), &[3, 2]);
- assert_eq!(it.next(), Some(3));
- iter_equal(it.clone(), &[2]);
- assert_eq!(it.next(), Some(2));
- iter_equal(it.clone(), &[]);
- assert_eq!(it.next(), None);
- }
- */
+ #[test]
+ fn test_into_iter_debug() {
+ let vec = thin_vec!['a', 'b', 'c'];
+ let into_iter = vec.into_iter();
+ let debug = format!("{:?}", into_iter);
+ assert_eq!(debug, "IntoIter(['a', 'b', 'c'])");
+ }
- /* TODO: implement CoW interop?
- #[test]
- fn test_cow_from() {
- let borrowed: &[_] = &["borrowed", "(slice)"];
- let owned = thin_vec!["owned", "(vec)"];
- match (Cow::from(owned.clone()), Cow::from(borrowed)) {
- (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed),
- _ => panic!("invalid `Cow::from`"),
- }
- }
+ #[test]
+ fn test_into_iter_count() {
+ assert_eq!(thin_vec![1, 2, 3].into_iter().count(), 3);
+ }
- #[test]
- fn test_from_cow() {
- let borrowed: &[_] = &["borrowed", "(slice)"];
- let owned = thin_vec!["owned", "(vec)"];
- assert_eq!(ThinVec::from(Cow::Borrowed(borrowed)), thin_vec!["borrowed", "(slice)"]);
- assert_eq!(ThinVec::from(Cow::Owned(owned)), thin_vec!["owned", "(vec)"]);
+ #[test]
+ fn test_into_iter_clone() {
+ fn iter_equal<I: Iterator<Item = i32>>(it: I, slice: &[i32]) {
+ let v: ThinVec<i32> = it.collect();
+ assert_eq!(&v[..], slice);
}
- */
+ let mut it = thin_vec![1, 2, 3].into_iter();
+ iter_equal(it.clone(), &[1, 2, 3]);
+ assert_eq!(it.next(), Some(1));
+ let mut it = it.rev();
+ iter_equal(it.clone(), &[3, 2]);
+ assert_eq!(it.next(), Some(3));
+ iter_equal(it.clone(), &[2]);
+ assert_eq!(it.next(), Some(2));
+ iter_equal(it.clone(), &[]);
+ assert_eq!(it.next(), None);
+ }
/* TODO: make drain covariant
#[allow(dead_code)]
@@ -2704,22 +3784,21 @@ mod std_tests {
}
*/
- /* TODO: implement higher than 16 alignment
- #[test]
- fn overaligned_allocations() {
- #[repr(align(256))]
- struct Foo(usize);
- let mut v = thin_vec![Foo(273)];
- for i in 0..0x1000 {
- v.reserve_exact(i);
- assert!(v[0].0 == 273);
- assert!(v.as_ptr() as usize & 0xff == 0);
- v.shrink_to_fit();
- assert!(v[0].0 == 273);
- assert!(v.as_ptr() as usize & 0xff == 0);
- }
+ #[test]
+ #[cfg_attr(feature = "gecko-ffi", ignore)]
+ fn overaligned_allocations() {
+ #[repr(align(256))]
+ struct Foo(usize);
+ let mut v = thin_vec![Foo(273)];
+ for i in 0..0x1000 {
+ v.reserve_exact(i);
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
+ v.shrink_to_fit();
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
}
- */
+ }
/* TODO: implement drain_filter?
#[test]
@@ -3175,4 +4254,35 @@ mod std_tests {
vec.set_len(1);
}
}
+
+ #[test]
+ #[should_panic(expected = "capacity overflow")]
+ fn test_capacity_overflow_header_too_big() {
+ let vec: ThinVec<u8> = ThinVec::with_capacity(isize::MAX as usize - 2);
+ assert!(vec.capacity() > 0);
+ }
+ #[test]
+ #[should_panic(expected = "capacity overflow")]
+ fn test_capacity_overflow_cap_too_big() {
+ let vec: ThinVec<u8> = ThinVec::with_capacity(isize::MAX as usize + 1);
+ assert!(vec.capacity() > 0);
+ }
+ #[test]
+ #[should_panic(expected = "capacity overflow")]
+ fn test_capacity_overflow_size_mul1() {
+ let vec: ThinVec<u16> = ThinVec::with_capacity(isize::MAX as usize + 1);
+ assert!(vec.capacity() > 0);
+ }
+ #[test]
+ #[should_panic(expected = "capacity overflow")]
+ fn test_capacity_overflow_size_mul2() {
+ let vec: ThinVec<u16> = ThinVec::with_capacity(isize::MAX as usize / 2 + 1);
+ assert!(vec.capacity() > 0);
+ }
+ #[test]
+ #[should_panic(expected = "capacity overflow")]
+ fn test_capacity_overflow_cap_really_isnt_isize() {
+ let vec: ThinVec<u8> = ThinVec::with_capacity(isize::MAX as usize);
+ assert!(vec.capacity() > 0);
+ }
}
diff --git a/vendor/time-macros/.cargo-checksum.json b/vendor/time-macros/.cargo-checksum.json
deleted file mode 100644
index 98cc1d644..000000000
--- a/vendor/time-macros/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"eb16c06efbfbf2ff5f48260785d4ecefbae6873d9d55c0ba2d388c6762e69b1f","LICENSE-Apache":"b8929fea28678da67251fb2daf9438f67503814211051861612441806d8edb05","LICENSE-MIT":"04620bf27e4a643dd47bf27652320c205acdb776c1f9f24bb8c3bfaba10804c5","src/date.rs":"ffcd3d0998ec67abb43a3f8eccc6104172f5061b855312b89d53bb82fece2460","src/datetime.rs":"5c7f6e07dc2f0dcfcd86216664df53bc008dbc86f346df57a9ff57f52fe43bc6","src/error.rs":"b597f98f425f1628b93ffea19f5f32163aa204e4cd25351bc114853a798e14b0","src/format_description/component.rs":"a05e7549db9bab4f3836f5fd5af18cacbfa6b323d0106b027e21bf438a5885e5","src/format_description/error.rs":"41253d7a02e14597915cf588811a272a90d1ce0f857f7769914e076dd5a66774","src/format_description/mod.rs":"da47af329408e9428753ad98ce433eaf026cfdd6e73e3142b23285251d32d0dd","src/format_description/modifier.rs":"c252c8a7d6608b594a6f715210ff67e804ae2f308025f62c8dd99d707627e4a9","src/format_description/parse.rs":"d65d6e7008030414ce6a860ff37c462c07ed89176a3f1462eeb46468a38fce7e","src/helpers/mod.rs":"54ce8e93512e18ef8761687eaac898a8227852a732f92aa5e80c28e23315bd0c","src/helpers/string.rs":"ba5699a4df344cbd71c4143f642f6bc07591f53978a9800d4b49ca1f461f87d9","src/lib.rs":"f99bded51bb861be5d708a3f756407f5b936a5febb719760c253a15113687e0d","src/offset.rs":"fc9341648e091b4d8f7bec47006c01c21cb038c7ef98bd36a492cf78e7533023","src/quote.rs":"b40251b0ca68e2362aff4297b87a027e48053f1a419113d3d0f7fe089a845a9c","src/serde_format_description.rs":"aa279c8005005fc87c52fa5e8be8ef8fc13ef456a18e3cd5d702ae81194ba4d9","src/time.rs":"3c06562358aed7ef624319c96e3f9c150a069606ab930de98ac379ef16b08100","src/to_tokens.rs":"825150a92396a019fee44f21da0bd257349e276d5e75a23ff86cfc625bef6f10"},"package":"d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"} \ No newline at end of file
diff --git a/vendor/time-macros/Cargo.toml b/vendor/time-macros/Cargo.toml
deleted file mode 100644
index c770e23ad..000000000
--- a/vendor/time-macros/Cargo.toml
+++ /dev/null
@@ -1,45 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.60.0"
-name = "time-macros"
-version = "0.2.6"
-authors = [
- "Jacob Pratt <open-source@jhpratt.dev>",
- "Time contributors",
-]
-description = """
- Procedural macros for the time crate.
- This crate is an implementation detail and should not be relied upon directly.
-"""
-keywords = [
- "date",
- "time",
- "calendar",
- "duration",
-]
-categories = ["date-and-time"]
-license = "MIT OR Apache-2.0"
-repository = "https://github.com/time-rs/time"
-
-[lib]
-proc-macro = true
-
-[dependencies.time-core]
-version = "=0.1.0"
-
-[features]
-formatting = []
-large-dates = []
-parsing = []
-serde = []
diff --git a/vendor/time-macros/src/date.rs b/vendor/time-macros/src/date.rs
deleted file mode 100644
index 574ef8ce6..000000000
--- a/vendor/time-macros/src/date.rs
+++ /dev/null
@@ -1,137 +0,0 @@
-use std::iter::Peekable;
-
-use proc_macro::{token_stream, TokenTree};
-use time_core::util::{days_in_year, weeks_in_year};
-
-use crate::helpers::{
- consume_any_ident, consume_number, consume_punct, days_in_year_month, ymd_to_yo, ywd_to_yo,
-};
-use crate::to_tokens::ToTokenTree;
-use crate::Error;
-
-#[cfg(feature = "large-dates")]
-const MAX_YEAR: i32 = 999_999;
-#[cfg(not(feature = "large-dates"))]
-const MAX_YEAR: i32 = 9_999;
-
-pub(crate) struct Date {
- pub(crate) year: i32,
- pub(crate) ordinal: u16,
-}
-
-pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Date, Error> {
- let (year_sign_span, year_sign, explicit_sign) = if let Ok(span) = consume_punct('-', chars) {
- (Some(span), -1, true)
- } else if let Ok(span) = consume_punct('+', chars) {
- (Some(span), 1, true)
- } else {
- (None, 1, false)
- };
- let (year_span, mut year) = consume_number::<i32>("year", chars)?;
- year *= year_sign;
- if year.abs() > MAX_YEAR {
- return Err(Error::InvalidComponent {
- name: "year",
- value: year.to_string(),
- span_start: Some(year_sign_span.unwrap_or(year_span)),
- span_end: Some(year_span),
- });
- }
- if !explicit_sign && year.abs() >= 10_000 {
- return Err(Error::Custom {
- message: "years with more than four digits must have an explicit sign".into(),
- span_start: Some(year_sign_span.unwrap_or(year_span)),
- span_end: Some(year_span),
- });
- }
-
- consume_punct('-', chars)?;
-
- // year-week-day
- if let Ok(w_span) = consume_any_ident(&["W"], chars) {
- let (week_span, week) = consume_number::<u8>("week", chars)?;
- consume_punct('-', chars)?;
- let (day_span, day) = consume_number::<u8>("day", chars)?;
-
- if week > weeks_in_year(year) {
- return Err(Error::InvalidComponent {
- name: "week",
- value: week.to_string(),
- span_start: Some(w_span),
- span_end: Some(week_span),
- });
- }
- if day == 0 || day > 7 {
- return Err(Error::InvalidComponent {
- name: "day",
- value: day.to_string(),
- span_start: Some(day_span),
- span_end: Some(day_span),
- });
- }
-
- let (year, ordinal) = ywd_to_yo(year, week, day);
-
- return Ok(Date { year, ordinal });
- }
-
- // We don't yet know whether it's year-month-day or year-ordinal.
- let (month_or_ordinal_span, month_or_ordinal) =
- consume_number::<u16>("month or ordinal", chars)?;
-
- // year-month-day
- #[allow(clippy::branches_sharing_code)] // clarity
- if consume_punct('-', chars).is_ok() {
- let (month_span, month) = (month_or_ordinal_span, month_or_ordinal);
- let (day_span, day) = consume_number::<u8>("day", chars)?;
-
- if month == 0 || month > 12 {
- return Err(Error::InvalidComponent {
- name: "month",
- value: month.to_string(),
- span_start: Some(month_span),
- span_end: Some(month_span),
- });
- }
- let month = month as _;
- if day == 0 || day > days_in_year_month(year, month) {
- return Err(Error::InvalidComponent {
- name: "day",
- value: day.to_string(),
- span_start: Some(day_span),
- span_end: Some(day_span),
- });
- }
-
- let (year, ordinal) = ymd_to_yo(year, month, day);
-
- Ok(Date { year, ordinal })
- }
- // year-ordinal
- else {
- let (ordinal_span, ordinal) = (month_or_ordinal_span, month_or_ordinal);
-
- if ordinal == 0 || ordinal > days_in_year(year) {
- return Err(Error::InvalidComponent {
- name: "ordinal",
- value: ordinal.to_string(),
- span_start: Some(ordinal_span),
- span_end: Some(ordinal_span),
- });
- }
-
- Ok(Date { year, ordinal })
- }
-}
-
-impl ToTokenTree for Date {
- fn into_token_tree(self) -> TokenTree {
- quote_group! {{
- const DATE: ::time::Date = ::time::Date::__from_ordinal_date_unchecked(
- #(self.year),
- #(self.ordinal),
- );
- DATE
- }}
- }
-}
diff --git a/vendor/time-macros/src/datetime.rs b/vendor/time-macros/src/datetime.rs
deleted file mode 100644
index 2d41e9a53..000000000
--- a/vendor/time-macros/src/datetime.rs
+++ /dev/null
@@ -1,57 +0,0 @@
-use std::iter::Peekable;
-
-use proc_macro::{token_stream, Ident, Span, TokenTree};
-
-use crate::date::Date;
-use crate::error::Error;
-use crate::offset::Offset;
-use crate::time::Time;
-use crate::to_tokens::ToTokenTree;
-use crate::{date, offset, time};
-
-pub(crate) struct DateTime {
- date: Date,
- time: Time,
- offset: Option<Offset>,
-}
-
-pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<DateTime, Error> {
- let date = date::parse(chars)?;
- let time = time::parse(chars)?;
- let offset = match offset::parse(chars) {
- Ok(offset) => Some(offset),
- Err(Error::UnexpectedEndOfInput | Error::MissingComponent { name: "sign", .. }) => None,
- Err(err) => return Err(err),
- };
-
- if let Some(token) = chars.peek() {
- return Err(Error::UnexpectedToken {
- tree: token.clone(),
- });
- }
-
- Ok(DateTime { date, time, offset })
-}
-
-impl ToTokenTree for DateTime {
- fn into_token_tree(self) -> TokenTree {
- let (type_name, maybe_offset) = match self.offset {
- Some(offset) => (
- Ident::new("OffsetDateTime", Span::mixed_site()),
- quote!(.assume_offset(#(offset))),
- ),
- None => (
- Ident::new("PrimitiveDateTime", Span::mixed_site()),
- quote!(),
- ),
- };
-
- quote_group! {{
- const DATE_TIME: ::time::#(type_name) = ::time::PrimitiveDateTime::new(
- #(self.date),
- #(self.time),
- ) #S(maybe_offset);
- DATE_TIME
- }}
- }
-}
diff --git a/vendor/time-macros/src/error.rs b/vendor/time-macros/src/error.rs
deleted file mode 100644
index 4de369daf..000000000
--- a/vendor/time-macros/src/error.rs
+++ /dev/null
@@ -1,136 +0,0 @@
-use std::borrow::Cow;
-use std::fmt;
-
-use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
-
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-use crate::format_description::error::InvalidFormatDescription;
-
-trait WithSpan {
- fn with_span(self, span: Span) -> Self;
-}
-
-impl WithSpan for TokenTree {
- fn with_span(mut self, span: Span) -> Self {
- self.set_span(span);
- self
- }
-}
-
-pub(crate) enum Error {
- MissingComponent {
- name: &'static str,
- span_start: Option<Span>,
- span_end: Option<Span>,
- },
- InvalidComponent {
- name: &'static str,
- value: String,
- span_start: Option<Span>,
- span_end: Option<Span>,
- },
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- ExpectedString {
- span_start: Option<Span>,
- span_end: Option<Span>,
- },
- UnexpectedToken {
- tree: TokenTree,
- },
- UnexpectedEndOfInput,
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- InvalidFormatDescription {
- error: InvalidFormatDescription,
- span_start: Option<Span>,
- span_end: Option<Span>,
- },
- Custom {
- message: Cow<'static, str>,
- span_start: Option<Span>,
- span_end: Option<Span>,
- },
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Self::MissingComponent { name, .. } => write!(f, "missing component: {name}"),
- Self::InvalidComponent { name, value, .. } => {
- write!(f, "invalid component: {name} was {value}")
- }
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- Self::ExpectedString { .. } => f.write_str("expected string"),
- Self::UnexpectedToken { tree } => write!(f, "unexpected token: {tree}"),
- Self::UnexpectedEndOfInput => f.write_str("unexpected end of input"),
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- Self::InvalidFormatDescription { error, .. } => error.fmt(f),
- Self::Custom { message, .. } => f.write_str(message),
- }
- }
-}
-
-impl Error {
- fn span_start(&self) -> Span {
- match self {
- Self::MissingComponent { span_start, .. }
- | Self::InvalidComponent { span_start, .. }
- | Self::Custom { span_start, .. } => *span_start,
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- Self::ExpectedString { span_start, .. }
- | Self::InvalidFormatDescription { span_start, .. } => *span_start,
- Self::UnexpectedToken { tree } => Some(tree.span()),
- Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
- }
- .unwrap_or_else(Span::mixed_site)
- }
-
- fn span_end(&self) -> Span {
- match self {
- Self::MissingComponent { span_end, .. }
- | Self::InvalidComponent { span_end, .. }
- | Self::Custom { span_end, .. } => *span_end,
- #[cfg(any(feature = "formatting", feature = "parsing"))]
- Self::ExpectedString { span_end, .. }
- | Self::InvalidFormatDescription { span_end, .. } => *span_end,
- Self::UnexpectedToken { tree, .. } => Some(tree.span()),
- Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
- }
- .unwrap_or_else(|| self.span_start())
- }
-
- pub(crate) fn to_compile_error(&self) -> TokenStream {
- let (start, end) = (self.span_start(), self.span_end());
-
- [
- TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
- TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
- TokenTree::from(Ident::new("core", start)),
- TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
- TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
- TokenTree::from(Ident::new("compile_error", start)),
- TokenTree::from(Punct::new('!', Spacing::Alone)).with_span(start),
- TokenTree::from(Group::new(
- Delimiter::Parenthesis,
- TokenStream::from(
- TokenTree::from(Literal::string(&self.to_string())).with_span(end),
- ),
- ))
- .with_span(end),
- ]
- .iter()
- .cloned()
- .collect()
- }
-
- /// Like `to_compile_error`, but for use in macros that produce items.
- #[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
- pub(crate) fn to_compile_error_standalone(&self) -> TokenStream {
- let end = self.span_end();
- self.to_compile_error()
- .into_iter()
- .chain(std::iter::once(
- TokenTree::from(Punct::new(';', Spacing::Alone)).with_span(end),
- ))
- .collect()
- }
-}
diff --git a/vendor/time-macros/src/format_description/component.rs b/vendor/time-macros/src/format_description/component.rs
deleted file mode 100644
index 850da91d2..000000000
--- a/vendor/time-macros/src/format_description/component.rs
+++ /dev/null
@@ -1,168 +0,0 @@
-use proc_macro::{Ident, Span, TokenStream};
-
-use crate::format_description::error::InvalidFormatDescription;
-use crate::format_description::modifier;
-use crate::format_description::modifier::Modifiers;
-use crate::to_tokens::ToTokenStream;
-
-pub(crate) enum Component {
- Day(modifier::Day),
- Month(modifier::Month),
- Ordinal(modifier::Ordinal),
- Weekday(modifier::Weekday),
- WeekNumber(modifier::WeekNumber),
- Year(modifier::Year),
- Hour(modifier::Hour),
- Minute(modifier::Minute),
- Period(modifier::Period),
- Second(modifier::Second),
- Subsecond(modifier::Subsecond),
- OffsetHour(modifier::OffsetHour),
- OffsetMinute(modifier::OffsetMinute),
- OffsetSecond(modifier::OffsetSecond),
-}
-
-impl ToTokenStream for Component {
- fn append_to(self, ts: &mut TokenStream) {
- let mut mts = TokenStream::new();
-
- macro_rules! component_name_and_append {
- ($($name:ident)*) => {
- match self {
- $(Self::$name(modifier) => {
- modifier.append_to(&mut mts);
- stringify!($name)
- })*
- }
- };
- }
-
- let component = component_name_and_append![
- Day
- Month
- Ordinal
- Weekday
- WeekNumber
- Year
- Hour
- Minute
- Period
- Second
- Subsecond
- OffsetHour
- OffsetMinute
- OffsetSecond
- ];
- let component = Ident::new(component, Span::mixed_site());
-
- quote_append! { ts
- ::time::format_description::Component::#(component)(#S(mts))
- }
- }
-}
-
-pub(crate) enum NakedComponent {
- Day,
- Month,
- Ordinal,
- Weekday,
- WeekNumber,
- Year,
- Hour,
- Minute,
- Period,
- Second,
- Subsecond,
- OffsetHour,
- OffsetMinute,
- OffsetSecond,
-}
-
-impl NakedComponent {
- pub(crate) fn parse(
- component_name: &[u8],
- component_index: usize,
- ) -> Result<Self, InvalidFormatDescription> {
- match component_name {
- b"day" => Ok(Self::Day),
- b"month" => Ok(Self::Month),
- b"ordinal" => Ok(Self::Ordinal),
- b"weekday" => Ok(Self::Weekday),
- b"week_number" => Ok(Self::WeekNumber),
- b"year" => Ok(Self::Year),
- b"hour" => Ok(Self::Hour),
- b"minute" => Ok(Self::Minute),
- b"period" => Ok(Self::Period),
- b"second" => Ok(Self::Second),
- b"subsecond" => Ok(Self::Subsecond),
- b"offset_hour" => Ok(Self::OffsetHour),
- b"offset_minute" => Ok(Self::OffsetMinute),
- b"offset_second" => Ok(Self::OffsetSecond),
- b"" => Err(InvalidFormatDescription::MissingComponentName {
- index: component_index,
- }),
- _ => Err(InvalidFormatDescription::InvalidComponentName {
- name: String::from_utf8_lossy(component_name).into_owned(),
- index: component_index,
- }),
- }
- }
-
- pub(crate) fn attach_modifiers(self, modifiers: Modifiers) -> Component {
- match self {
- Self::Day => Component::Day(modifier::Day {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::Month => Component::Month(modifier::Month {
- padding: modifiers.padding.unwrap_or_default(),
- repr: modifiers.month_repr.unwrap_or_default(),
- case_sensitive: modifiers.case_sensitive.unwrap_or(true),
- }),
- Self::Ordinal => Component::Ordinal(modifier::Ordinal {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::Weekday => Component::Weekday(modifier::Weekday {
- repr: modifiers.weekday_repr.unwrap_or_default(),
- one_indexed: modifiers.weekday_is_one_indexed.unwrap_or(true),
- case_sensitive: modifiers.case_sensitive.unwrap_or(true),
- }),
- Self::WeekNumber => Component::WeekNumber(modifier::WeekNumber {
- padding: modifiers.padding.unwrap_or_default(),
- repr: modifiers.week_number_repr.unwrap_or_default(),
- }),
- Self::Year => Component::Year(modifier::Year {
- padding: modifiers.padding.unwrap_or_default(),
- repr: modifiers.year_repr.unwrap_or_default(),
- iso_week_based: modifiers.year_is_iso_week_based.unwrap_or_default(),
- sign_is_mandatory: modifiers.sign_is_mandatory.unwrap_or_default(),
- }),
- Self::Hour => Component::Hour(modifier::Hour {
- padding: modifiers.padding.unwrap_or_default(),
- is_12_hour_clock: modifiers.hour_is_12_hour_clock.unwrap_or_default(),
- }),
- Self::Minute => Component::Minute(modifier::Minute {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::Period => Component::Period(modifier::Period {
- is_uppercase: modifiers.period_is_uppercase.unwrap_or(true),
- case_sensitive: modifiers.case_sensitive.unwrap_or(true),
- }),
- Self::Second => Component::Second(modifier::Second {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::Subsecond => Component::Subsecond(modifier::Subsecond {
- digits: modifiers.subsecond_digits.unwrap_or_default(),
- }),
- Self::OffsetHour => Component::OffsetHour(modifier::OffsetHour {
- sign_is_mandatory: modifiers.sign_is_mandatory.unwrap_or_default(),
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::OffsetMinute => Component::OffsetMinute(modifier::OffsetMinute {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- Self::OffsetSecond => Component::OffsetSecond(modifier::OffsetSecond {
- padding: modifiers.padding.unwrap_or_default(),
- }),
- }
- }
-}
diff --git a/vendor/time-macros/src/format_description/error.rs b/vendor/time-macros/src/format_description/error.rs
deleted file mode 100644
index 9aacd7dc9..000000000
--- a/vendor/time-macros/src/format_description/error.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use std::fmt;
-
-pub(crate) enum InvalidFormatDescription {
- UnclosedOpeningBracket { index: usize },
- InvalidComponentName { name: String, index: usize },
- InvalidModifier { value: String, index: usize },
- MissingComponentName { index: usize },
-}
-
-impl fmt::Display for InvalidFormatDescription {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- #[allow(clippy::enum_glob_use)]
- use InvalidFormatDescription::*;
- match self {
- UnclosedOpeningBracket { index } => {
- write!(f, "unclosed opening bracket at byte index {index}")
- }
- InvalidComponentName { name, index } => {
- write!(f, "invalid component name `{name}` at byte index {index}",)
- }
- InvalidModifier { value, index } => {
- write!(f, "invalid modifier `{value}` at byte index {index}")
- }
- MissingComponentName { index } => {
- write!(f, "missing component name at byte index {index}")
- }
- }
- }
-}
diff --git a/vendor/time-macros/src/format_description/mod.rs b/vendor/time-macros/src/format_description/mod.rs
deleted file mode 100644
index dd32db74d..000000000
--- a/vendor/time-macros/src/format_description/mod.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-mod component;
-pub(crate) mod error;
-pub(crate) mod modifier;
-pub(crate) mod parse;
-
-use proc_macro::{Literal, TokenStream};
-
-pub(crate) use self::component::Component;
-pub(crate) use self::parse::parse;
-use crate::to_tokens::ToTokenStream;
-
-mod helper {
- #[must_use = "This does not modify the original slice."]
- pub(crate) fn consume_whitespace<'a>(bytes: &'a [u8], index: &mut usize) -> &'a [u8] {
- let first_non_whitespace = bytes
- .iter()
- .position(|c| !c.is_ascii_whitespace())
- .unwrap_or(bytes.len());
- *index += first_non_whitespace;
- &bytes[first_non_whitespace..]
- }
-}
-
-#[allow(single_use_lifetimes)] // false positive
-#[allow(variant_size_differences)]
-pub(crate) enum FormatItem<'a> {
- Literal(&'a [u8]),
- Component(Component),
-}
-
-impl ToTokenStream for FormatItem<'_> {
- fn append_to(self, ts: &mut TokenStream) {
- quote_append! { ts
- ::time::format_description::FormatItem::#S(match self {
- FormatItem::Literal(bytes) => quote! { Literal(#(Literal::byte_string(bytes))) },
- FormatItem::Component(component) => quote! { Component(#S(component)) },
- })
- }
- }
-}
diff --git a/vendor/time-macros/src/format_description/modifier.rs b/vendor/time-macros/src/format_description/modifier.rs
deleted file mode 100644
index f4e641a7b..000000000
--- a/vendor/time-macros/src/format_description/modifier.rs
+++ /dev/null
@@ -1,417 +0,0 @@
-use core::mem;
-
-use proc_macro::{Ident, Span, TokenStream, TokenTree};
-
-use crate::format_description::error::InvalidFormatDescription;
-use crate::format_description::helper;
-use crate::to_tokens::{ToTokenStream, ToTokenTree};
-
-macro_rules! to_tokens {
- (
- $(#[$struct_attr:meta])*
- $struct_vis:vis struct $struct_name:ident {$(
- $(#[$field_attr:meta])*
- $field_vis:vis $field_name:ident : $field_ty:ty
- ),+ $(,)?}
- ) => {
- $(#[$struct_attr])*
- $struct_vis struct $struct_name {$(
- $(#[$field_attr])*
- $field_vis $field_name: $field_ty
- ),+}
-
- impl ToTokenTree for $struct_name {
- fn into_token_tree(self) -> TokenTree {
- let mut tokens = TokenStream::new();
- let Self {$($field_name),+} = self;
-
- quote_append! { tokens
- let mut value = ::time::format_description::modifier::$struct_name::default();
- };
- $(
- quote_append!(tokens value.$field_name =);
- $field_name.append_to(&mut tokens);
- quote_append!(tokens ;);
- )+
- quote_append!(tokens value);
-
- proc_macro::TokenTree::Group(proc_macro::Group::new(
- proc_macro::Delimiter::Brace,
- tokens,
- ))
- }
- }
- };
-
- (
- $(#[$enum_attr:meta])*
- $enum_vis:vis enum $enum_name:ident {$(
- $(#[$variant_attr:meta])*
- $variant_name:ident
- ),+ $(,)?}
- ) => {
- $(#[$enum_attr])*
- $enum_vis enum $enum_name {$(
- $(#[$variant_attr])*
- $variant_name
- ),+}
-
- impl ToTokenStream for $enum_name {
- fn append_to(self, ts: &mut TokenStream) {
- quote_append! { ts
- ::time::format_description::modifier::$enum_name::
- };
- let name = match self {
- $(Self::$variant_name => stringify!($variant_name)),+
- };
- ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
- }
- }
- }
-}
-
-to_tokens! {
- pub(crate) struct Day {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) enum MonthRepr {
- Numerical,
- Long,
- Short,
- }
-}
-
-to_tokens! {
- pub(crate) struct Month {
- pub(crate) padding: Padding,
- pub(crate) repr: MonthRepr,
- pub(crate) case_sensitive: bool,
- }
-}
-
-to_tokens! {
- pub(crate) struct Ordinal {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) enum WeekdayRepr {
- Short,
- Long,
- Sunday,
- Monday,
- }
-}
-
-to_tokens! {
- pub(crate) struct Weekday {
- pub(crate) repr: WeekdayRepr,
- pub(crate) one_indexed: bool,
- pub(crate) case_sensitive: bool,
- }
-}
-
-to_tokens! {
- pub(crate) enum WeekNumberRepr {
- Iso,
- Sunday,
- Monday,
- }
-}
-
-to_tokens! {
- pub(crate) struct WeekNumber {
- pub(crate) padding: Padding,
- pub(crate) repr: WeekNumberRepr,
- }
-}
-
-to_tokens! {
- pub(crate) enum YearRepr {
- Full,
- LastTwo,
- }
-}
-
-to_tokens! {
- pub(crate) struct Year {
- pub(crate) padding: Padding,
- pub(crate) repr: YearRepr,
- pub(crate) iso_week_based: bool,
- pub(crate) sign_is_mandatory: bool,
- }
-}
-
-to_tokens! {
- pub(crate) struct Hour {
- pub(crate) padding: Padding,
- pub(crate) is_12_hour_clock: bool,
- }
-}
-
-to_tokens! {
- pub(crate) struct Minute {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) struct Period {
- pub(crate) is_uppercase: bool,
- pub(crate) case_sensitive: bool,
- }
-}
-
-to_tokens! {
- pub(crate) struct Second {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) enum SubsecondDigits {
- One,
- Two,
- Three,
- Four,
- Five,
- Six,
- Seven,
- Eight,
- Nine,
- OneOrMore,
- }
-}
-
-to_tokens! {
- pub(crate) struct Subsecond {
- pub(crate) digits: SubsecondDigits,
- }
-}
-
-to_tokens! {
- pub(crate) struct OffsetHour {
- pub(crate) sign_is_mandatory: bool,
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) struct OffsetMinute {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) struct OffsetSecond {
- pub(crate) padding: Padding,
- }
-}
-
-to_tokens! {
- pub(crate) enum Padding {
- Space,
- Zero,
- None,
- }
-}
-
-macro_rules! impl_default {
- ($($type:ty => $default:expr;)*) => {$(
- impl Default for $type {
- fn default() -> Self {
- $default
- }
- }
- )*};
-}
-
-impl_default! {
- Day => Self { padding: Padding::default() };
- MonthRepr => Self::Numerical;
- Month => Self {
- padding: Padding::default(),
- repr: MonthRepr::default(),
- case_sensitive: true,
- };
- Ordinal => Self { padding: Padding::default() };
- WeekdayRepr => Self::Long;
- Weekday => Self {
- repr: WeekdayRepr::default(),
- one_indexed: true,
- case_sensitive: true,
- };
- WeekNumberRepr => Self::Iso;
- WeekNumber => Self {
- padding: Padding::default(),
- repr: WeekNumberRepr::default(),
- };
- YearRepr => Self::Full;
- Year => Self {
- padding: Padding::default(),
- repr: YearRepr::default(),
- iso_week_based: false,
- sign_is_mandatory: false,
- };
- Hour => Self {
- padding: Padding::default(),
- is_12_hour_clock: false,
- };
- Minute => Self { padding: Padding::default() };
- Period => Self { is_uppercase: true, case_sensitive: true };
- Second => Self { padding: Padding::default() };
- SubsecondDigits => Self::OneOrMore;
- Subsecond => Self { digits: SubsecondDigits::default() };
- OffsetHour => Self {
- sign_is_mandatory: true,
- padding: Padding::default(),
- };
- OffsetMinute => Self { padding: Padding::default() };
- OffsetSecond => Self { padding: Padding::default() };
- Padding => Self::Zero;
-}
-
-#[derive(Default)]
-pub(crate) struct Modifiers {
- pub(crate) padding: Option<Padding>,
- pub(crate) hour_is_12_hour_clock: Option<bool>,
- pub(crate) period_is_uppercase: Option<bool>,
- pub(crate) month_repr: Option<MonthRepr>,
- pub(crate) subsecond_digits: Option<SubsecondDigits>,
- pub(crate) weekday_repr: Option<WeekdayRepr>,
- pub(crate) weekday_is_one_indexed: Option<bool>,
- pub(crate) week_number_repr: Option<WeekNumberRepr>,
- pub(crate) year_repr: Option<YearRepr>,
- pub(crate) year_is_iso_week_based: Option<bool>,
- pub(crate) sign_is_mandatory: Option<bool>,
- pub(crate) case_sensitive: Option<bool>,
-}
-
-impl Modifiers {
- #[allow(clippy::too_many_lines)]
- pub(crate) fn parse(
- component_name: &[u8],
- mut bytes: &[u8],
- index: &mut usize,
- ) -> Result<Self, InvalidFormatDescription> {
- let mut modifiers = Self::default();
-
- while !bytes.is_empty() {
- // Trim any whitespace between modifiers.
- bytes = helper::consume_whitespace(bytes, index);
-
- let modifier;
- if let Some(whitespace_loc) = bytes.iter().position(u8::is_ascii_whitespace) {
- *index += whitespace_loc;
- modifier = &bytes[..whitespace_loc];
- bytes = &bytes[whitespace_loc..];
- } else {
- modifier = mem::take(&mut bytes);
- }
-
- if modifier.is_empty() {
- break;
- }
-
- match (component_name, modifier) {
- (
- b"day" | b"hour" | b"minute" | b"month" | b"offset_hour" | b"offset_minute"
- | b"offset_second" | b"ordinal" | b"second" | b"week_number" | b"year",
- b"padding:space",
- ) => modifiers.padding = Some(Padding::Space),
- (
- b"day" | b"hour" | b"minute" | b"month" | b"offset_hour" | b"offset_minute"
- | b"offset_second" | b"ordinal" | b"second" | b"week_number" | b"year",
- b"padding:zero",
- ) => modifiers.padding = Some(Padding::Zero),
- (
- b"day" | b"hour" | b"minute" | b"month" | b"offset_hour" | b"offset_minute"
- | b"offset_second" | b"ordinal" | b"second" | b"week_number" | b"year",
- b"padding:none",
- ) => modifiers.padding = Some(Padding::None),
- (b"hour", b"repr:24") => modifiers.hour_is_12_hour_clock = Some(false),
- (b"hour", b"repr:12") => modifiers.hour_is_12_hour_clock = Some(true),
- (b"month" | b"period" | b"weekday", b"case_sensitive:true") => {
- modifiers.case_sensitive = Some(true)
- }
- (b"month" | b"period" | b"weekday", b"case_sensitive:false") => {
- modifiers.case_sensitive = Some(false)
- }
- (b"month", b"repr:numerical") => modifiers.month_repr = Some(MonthRepr::Numerical),
- (b"month", b"repr:long") => modifiers.month_repr = Some(MonthRepr::Long),
- (b"month", b"repr:short") => modifiers.month_repr = Some(MonthRepr::Short),
- (b"offset_hour" | b"year", b"sign:automatic") => {
- modifiers.sign_is_mandatory = Some(false);
- }
- (b"offset_hour" | b"year", b"sign:mandatory") => {
- modifiers.sign_is_mandatory = Some(true);
- }
- (b"period", b"case:upper") => modifiers.period_is_uppercase = Some(true),
- (b"period", b"case:lower") => modifiers.period_is_uppercase = Some(false),
- (b"subsecond", b"digits:1") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::One);
- }
- (b"subsecond", b"digits:2") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Two);
- }
- (b"subsecond", b"digits:3") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Three);
- }
- (b"subsecond", b"digits:4") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Four);
- }
- (b"subsecond", b"digits:5") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Five);
- }
- (b"subsecond", b"digits:6") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Six);
- }
- (b"subsecond", b"digits:7") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Seven);
- }
- (b"subsecond", b"digits:8") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Eight);
- }
- (b"subsecond", b"digits:9") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::Nine);
- }
- (b"subsecond", b"digits:1+") => {
- modifiers.subsecond_digits = Some(SubsecondDigits::OneOrMore);
- }
- (b"weekday", b"repr:short") => modifiers.weekday_repr = Some(WeekdayRepr::Short),
- (b"weekday", b"repr:long") => modifiers.weekday_repr = Some(WeekdayRepr::Long),
- (b"weekday", b"repr:sunday") => modifiers.weekday_repr = Some(WeekdayRepr::Sunday),
- (b"weekday", b"repr:monday") => modifiers.weekday_repr = Some(WeekdayRepr::Monday),
- (b"weekday", b"one_indexed:true") => modifiers.weekday_is_one_indexed = Some(true),
- (b"weekday", b"one_indexed:false") => {
- modifiers.weekday_is_one_indexed = Some(false);
- }
- (b"week_number", b"repr:iso") => {
- modifiers.week_number_repr = Some(WeekNumberRepr::Iso);
- }
- (b"week_number", b"repr:sunday") => {
- modifiers.week_number_repr = Some(WeekNumberRepr::Sunday);
- }
- (b"week_number", b"repr:monday") => {
- modifiers.week_number_repr = Some(WeekNumberRepr::Monday);
- }
- (b"year", b"repr:full") => modifiers.year_repr = Some(YearRepr::Full),
- (b"year", b"repr:last_two") => modifiers.year_repr = Some(YearRepr::LastTwo),
- (b"year", b"base:calendar") => modifiers.year_is_iso_week_based = Some(false),
- (b"year", b"base:iso_week") => modifiers.year_is_iso_week_based = Some(true),
- _ => {
- return Err(InvalidFormatDescription::InvalidModifier {
- value: String::from_utf8_lossy(modifier).into_owned(),
- index: *index,
- });
- }
- }
- }
-
- Ok(modifiers)
- }
-}
diff --git a/vendor/time-macros/src/format_description/parse.rs b/vendor/time-macros/src/format_description/parse.rs
deleted file mode 100644
index 19c7bf608..000000000
--- a/vendor/time-macros/src/format_description/parse.rs
+++ /dev/null
@@ -1,84 +0,0 @@
-use proc_macro::Span;
-
-use crate::format_description::component::{Component, NakedComponent};
-use crate::format_description::error::InvalidFormatDescription;
-use crate::format_description::{helper, modifier, FormatItem};
-use crate::Error;
-
-struct ParsedItem<'a> {
- item: FormatItem<'a>,
- remaining: &'a [u8],
-}
-
-fn parse_component(mut s: &[u8], index: &mut usize) -> Result<Component, InvalidFormatDescription> {
- s = helper::consume_whitespace(s, index);
-
- let component_index = *index;
- let whitespace_loc = s
- .iter()
- .position(u8::is_ascii_whitespace)
- .unwrap_or(s.len());
- *index += whitespace_loc;
- let component_name = &s[..whitespace_loc];
- s = &s[whitespace_loc..];
- s = helper::consume_whitespace(s, index);
-
- Ok(NakedComponent::parse(component_name, component_index)?
- .attach_modifiers(modifier::Modifiers::parse(component_name, s, index)?))
-}
-
-fn parse_literal<'a>(s: &'a [u8], index: &mut usize) -> ParsedItem<'a> {
- let loc = s.iter().position(|&c| c == b'[').unwrap_or(s.len());
- *index += loc;
- ParsedItem {
- item: FormatItem::Literal(&s[..loc]),
- remaining: &s[loc..],
- }
-}
-
-fn parse_item<'a>(
- s: &'a [u8],
- index: &mut usize,
-) -> Result<ParsedItem<'a>, InvalidFormatDescription> {
- if let [b'[', b'[', remaining @ ..] = s {
- *index += 2;
- return Ok(ParsedItem {
- item: FormatItem::Literal(b"["),
- remaining,
- });
- };
-
- if s.starts_with(b"[") {
- if let Some(bracket_index) = s.iter().position(|&c| c == b']') {
- *index += 1; // opening bracket
- let ret_val = ParsedItem {
- item: FormatItem::Component(parse_component(&s[1..bracket_index], index)?),
- remaining: &s[bracket_index + 1..],
- };
- *index += 1; // closing bracket
- Ok(ret_val)
- } else {
- Err(InvalidFormatDescription::UnclosedOpeningBracket { index: *index })
- }
- } else {
- Ok(parse_literal(s, index))
- }
-}
-
-pub(crate) fn parse(mut s: &[u8], span: Span) -> Result<Vec<FormatItem<'_>>, Error> {
- let mut compound = Vec::new();
- let mut loc = 0;
-
- while !s.is_empty() {
- let ParsedItem { item, remaining } =
- parse_item(s, &mut loc).map_err(|error| Error::InvalidFormatDescription {
- error,
- span_start: Some(span),
- span_end: Some(span),
- })?;
- s = remaining;
- compound.push(item);
- }
-
- Ok(compound)
-}
diff --git a/vendor/time-macros/src/helpers/mod.rs b/vendor/time-macros/src/helpers/mod.rs
deleted file mode 100644
index cbf3ba3ed..000000000
--- a/vendor/time-macros/src/helpers/mod.rs
+++ /dev/null
@@ -1,129 +0,0 @@
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-mod string;
-
-use std::iter::Peekable;
-use std::str::FromStr;
-
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-use proc_macro::TokenStream;
-use proc_macro::{token_stream, Span, TokenTree};
-use time_core::util::{days_in_year, is_leap_year};
-
-use crate::Error;
-
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-pub(crate) fn get_string_literal(tokens: TokenStream) -> Result<(Span, Vec<u8>), Error> {
- let mut tokens = tokens.into_iter();
-
- match (tokens.next(), tokens.next()) {
- (Some(TokenTree::Literal(literal)), None) => string::parse(&literal),
- (Some(tree), None) => Err(Error::ExpectedString {
- span_start: Some(tree.span()),
- span_end: Some(tree.span()),
- }),
- (_, Some(tree)) => Err(Error::UnexpectedToken { tree }),
- (None, None) => Err(Error::ExpectedString {
- span_start: None,
- span_end: None,
- }),
- }
-}
-
-pub(crate) fn consume_number<T: FromStr>(
- component_name: &'static str,
- chars: &mut Peekable<token_stream::IntoIter>,
-) -> Result<(Span, T), Error> {
- let (span, digits) = match chars.next() {
- Some(TokenTree::Literal(literal)) => (literal.span(), literal.to_string()),
- Some(tree) => return Err(Error::UnexpectedToken { tree }),
- None => return Err(Error::UnexpectedEndOfInput),
- };
-
- if let Ok(value) = digits.replace('_', "").parse() {
- Ok((span, value))
- } else {
- Err(Error::InvalidComponent {
- name: component_name,
- value: digits,
- span_start: Some(span),
- span_end: Some(span),
- })
- }
-}
-
-pub(crate) fn consume_any_ident(
- idents: &[&str],
- chars: &mut Peekable<token_stream::IntoIter>,
-) -> Result<Span, Error> {
- match chars.peek() {
- Some(TokenTree::Ident(char)) if idents.contains(&char.to_string().as_str()) => {
- let ret = Ok(char.span());
- drop(chars.next());
- ret
- }
- Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
- None => Err(Error::UnexpectedEndOfInput),
- }
-}
-
-pub(crate) fn consume_punct(
- c: char,
- chars: &mut Peekable<token_stream::IntoIter>,
-) -> Result<Span, Error> {
- match chars.peek() {
- Some(TokenTree::Punct(punct)) if *punct == c => {
- let ret = Ok(punct.span());
- drop(chars.next());
- ret
- }
- Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
- None => Err(Error::UnexpectedEndOfInput),
- }
-}
-
-fn jan_weekday(year: i32, ordinal: i32) -> u8 {
- macro_rules! div_floor {
- ($a:expr, $b:expr) => {{
- let (_quotient, _remainder) = ($a / $b, $a % $b);
- if (_remainder > 0 && $b < 0) || (_remainder < 0 && $b > 0) {
- _quotient - 1
- } else {
- _quotient
- }
- }};
- }
-
- let adj_year = year - 1;
- ((ordinal + adj_year + div_floor!(adj_year, 4) - div_floor!(adj_year, 100)
- + div_floor!(adj_year, 400)
- + 6)
- .rem_euclid(7)) as _
-}
-
-pub(crate) fn days_in_year_month(year: i32, month: u8) -> u8 {
- [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month as usize - 1]
- + (month == 2 && is_leap_year(year)) as u8
-}
-
-pub(crate) fn ywd_to_yo(year: i32, week: u8, iso_weekday_number: u8) -> (i32, u16) {
- let (ordinal, overflow) = (u16::from(week) * 7 + u16::from(iso_weekday_number))
- .overflowing_sub(u16::from(jan_weekday(year, 4)) + 4);
-
- if overflow || ordinal == 0 {
- return (year - 1, (ordinal.wrapping_add(days_in_year(year - 1))));
- }
-
- let days_in_cur_year = days_in_year(year);
- if ordinal > days_in_cur_year {
- (year + 1, ordinal - days_in_cur_year)
- } else {
- (year, ordinal)
- }
-}
-
-pub(crate) fn ymd_to_yo(year: i32, month: u8, day: u8) -> (i32, u16) {
- let ordinal = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334][month as usize - 1]
- + (month > 2 && is_leap_year(year)) as u16;
-
- (year, ordinal + u16::from(day))
-}
diff --git a/vendor/time-macros/src/helpers/string.rs b/vendor/time-macros/src/helpers/string.rs
deleted file mode 100644
index fa3780f5e..000000000
--- a/vendor/time-macros/src/helpers/string.rs
+++ /dev/null
@@ -1,188 +0,0 @@
-use std::ops::{Index, RangeFrom};
-
-use proc_macro::Span;
-
-use crate::Error;
-
-pub(crate) fn parse(token: &proc_macro::Literal) -> Result<(Span, Vec<u8>), Error> {
- let span = token.span();
- let repr = token.to_string();
-
- match repr.as_bytes() {
- [b'"', ..] => Ok((span, parse_lit_str_cooked(&repr[1..]))),
- [b'b', b'"', rest @ ..] => Ok((span, parse_lit_byte_str_cooked(rest))),
- [b'r', rest @ ..] | [b'b', b'r', rest @ ..] => Ok((span, parse_lit_str_raw(rest))),
- _ => Err(Error::ExpectedString {
- span_start: Some(span),
- span_end: Some(span),
- }),
- }
-}
-
-fn byte(s: impl AsRef<[u8]>, idx: usize) -> u8 {
- s.as_ref().get(idx).copied().unwrap_or_default()
-}
-
-fn parse_lit_str_cooked(mut s: &str) -> Vec<u8> {
- let mut content = String::new();
- 'outer: loop {
- let ch = match byte(s, 0) {
- b'"' => break,
- b'\\' => {
- let b = byte(s, 1);
- s = &s[2..];
- match b {
- b'x' => {
- let (byte, rest) = backslash_x(s);
- s = rest;
- char::from_u32(u32::from(byte)).expect("byte was just validated")
- }
- b'u' => {
- let (chr, rest) = backslash_u(s);
- s = rest;
- chr
- }
- b'n' => '\n',
- b'r' => '\r',
- b't' => '\t',
- b'\\' => '\\',
- b'0' => '\0',
- b'\'' => '\'',
- b'"' => '"',
- b'\r' | b'\n' => loop {
- let ch = s.chars().next().unwrap_or_default();
- if ch.is_whitespace() {
- s = &s[ch.len_utf8()..];
- } else {
- continue 'outer;
- }
- },
- _ => unreachable!("invalid escape"),
- }
- }
- b'\r' => {
- // bare CR not permitted
- s = &s[2..];
- '\n'
- }
- _ => {
- let ch = s.chars().next().unwrap_or_default();
- s = &s[ch.len_utf8()..];
- ch
- }
- };
- content.push(ch);
- }
-
- content.into_bytes()
-}
-
-fn parse_lit_str_raw(s: &[u8]) -> Vec<u8> {
- let mut pounds = 0;
- while byte(s, pounds) == b'#' {
- pounds += 1;
- }
- let close = s
- .iter()
- .rposition(|&b| b == b'"')
- .expect("had a string without trailing \"");
-
- s[pounds + 1..close].to_owned()
-}
-
-fn parse_lit_byte_str_cooked(mut v: &[u8]) -> Vec<u8> {
- let mut out = Vec::new();
- 'outer: loop {
- let byte = match byte(v, 0) {
- b'"' => break,
- b'\\' => {
- let b = byte(v, 1);
- v = &v[2..];
- match b {
- b'x' => {
- let (byte, rest) = backslash_x(v);
- v = rest;
- byte
- }
- b'n' => b'\n',
- b'r' => b'\r',
- b't' => b'\t',
- b'\\' => b'\\',
- b'0' => b'\0',
- b'\'' => b'\'',
- b'"' => b'"',
- b'\r' | b'\n' => loop {
- let byte = byte(v, 0);
- let ch = char::from_u32(u32::from(byte)).expect("invalid byte");
- if ch.is_whitespace() {
- v = &v[1..];
- } else {
- continue 'outer;
- }
- },
- _ => unreachable!("invalid escape"),
- }
- }
- b'\r' => {
- // bare CR not permitted
- v = &v[2..];
- b'\n'
- }
- b => {
- v = &v[1..];
- b
- }
- };
- out.push(byte);
- }
-
- out
-}
-
-fn backslash_x<S>(s: &S) -> (u8, &S)
-where
- S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
-{
- let mut ch = 0;
- let b0 = byte(s, 0);
- let b1 = byte(s, 1);
- ch += 0x10 * (b0 - b'0');
- ch += match b1 {
- b'0'..=b'9' => b1 - b'0',
- b'a'..=b'f' => 10 + (b1 - b'a'),
- b'A'..=b'F' => 10 + (b1 - b'A'),
- _ => unreachable!("invalid hex escape"),
- };
- (ch, &s[2..])
-}
-
-fn backslash_u(mut s: &str) -> (char, &str) {
- s = &s[1..];
-
- let mut ch = 0;
- let mut digits = 0;
- loop {
- let b = byte(s, 0);
- let digit = match b {
- b'0'..=b'9' => b - b'0',
- b'a'..=b'f' => 10 + b - b'a',
- b'A'..=b'F' => 10 + b - b'A',
- b'_' if digits > 0 => {
- s = &s[1..];
- continue;
- }
- b'}' if digits != 0 => break,
- _ => unreachable!("invalid unicode escape"),
- };
- ch *= 0x10;
- ch += u32::from(digit);
- digits += 1;
- s = &s[1..];
- }
- s = &s[1..];
-
- (
- char::from_u32(ch).expect("invalid unicode escape passed by compiler"),
- s,
- )
-}
diff --git a/vendor/time-macros/src/lib.rs b/vendor/time-macros/src/lib.rs
deleted file mode 100644
index 1afc313ea..000000000
--- a/vendor/time-macros/src/lib.rs
+++ /dev/null
@@ -1,167 +0,0 @@
-#![deny(
- anonymous_parameters,
- clippy::all,
- const_err,
- illegal_floating_point_literal_pattern,
- late_bound_lifetime_arguments,
- path_statements,
- patterns_in_fns_without_body,
- rust_2018_idioms,
- trivial_casts,
- trivial_numeric_casts,
- unreachable_pub,
- unsafe_code,
- unused_extern_crates
-)]
-#![warn(
- clippy::dbg_macro,
- clippy::decimal_literal_representation,
- clippy::get_unwrap,
- clippy::nursery,
- clippy::print_stdout,
- clippy::todo,
- clippy::unimplemented,
- clippy::unnested_or_patterns,
- clippy::unwrap_used,
- clippy::use_debug,
- single_use_lifetimes,
- unused_qualifications,
- variant_size_differences
-)]
-#![allow(
- clippy::missing_const_for_fn, // useless in proc macro
- clippy::redundant_pub_crate, // suggests bad style
- clippy::option_if_let_else, // suggests terrible code
-)]
-
-#[macro_use]
-mod quote;
-
-mod date;
-mod datetime;
-mod error;
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-mod format_description;
-mod helpers;
-mod offset;
-#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
-mod serde_format_description;
-mod time;
-mod to_tokens;
-
-use proc_macro::TokenStream;
-#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
-use proc_macro::TokenTree;
-
-use self::error::Error;
-
-macro_rules! impl_macros {
- ($($name:ident)*) => {$(
- #[proc_macro]
- pub fn $name(input: TokenStream) -> TokenStream {
- use crate::to_tokens::ToTokenTree;
-
- let mut iter = input.into_iter().peekable();
- match $name::parse(&mut iter) {
- Ok(value) => match iter.peek() {
- Some(tree) => Error::UnexpectedToken { tree: tree.clone() }.to_compile_error(),
- None => TokenStream::from(value.into_token_tree()),
- },
- Err(err) => err.to_compile_error(),
- }
- }
- )*};
-}
-
-impl_macros![date datetime offset time];
-
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-#[proc_macro]
-pub fn format_description(input: TokenStream) -> TokenStream {
- (|| {
- let (span, string) = helpers::get_string_literal(input)?;
- let items = format_description::parse(&string, span)?;
-
- Ok(quote! {{
- const DESCRIPTION: &[::time::format_description::FormatItem<'_>] = &[#S(
- items
- .into_iter()
- .map(|item| quote! { #S(item), })
- .collect::<TokenStream>()
- )];
- DESCRIPTION
- }})
- })()
- .unwrap_or_else(|err: Error| err.to_compile_error())
-}
-
-#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
-#[proc_macro]
-pub fn serde_format_description(input: TokenStream) -> TokenStream {
- (|| {
- let mut tokens = input.into_iter().peekable();
- // First, an identifier (the desired module name)
- let mod_name = match tokens.next() {
- Some(TokenTree::Ident(ident)) => Ok(ident),
- Some(tree) => Err(Error::UnexpectedToken { tree }),
- None => Err(Error::UnexpectedEndOfInput),
- }?;
-
- // Followed by a comma
- helpers::consume_punct(',', &mut tokens)?;
-
- // Then, the type to create serde serializers for (e.g., `OffsetDateTime`).
- let formattable = match tokens.next() {
- Some(tree @ TokenTree::Ident(_)) => Ok(tree),
- Some(tree) => Err(Error::UnexpectedToken { tree }),
- None => Err(Error::UnexpectedEndOfInput),
- }?;
-
- // Another comma
- helpers::consume_punct(',', &mut tokens)?;
-
- // We now have two options. The user can either provide a format description as a string or
- // they can provide a path to a format description. If the latter, all remaining tokens are
- // assumed to be part of the path.
- let (format, raw_format_string) = match tokens.peek() {
- // string literal
- Some(TokenTree::Literal(_)) => {
- let (span, format_string) = helpers::get_string_literal(tokens.collect())?;
- let items = format_description::parse(&format_string, span)?;
- let items: TokenStream =
- items.into_iter().map(|item| quote! { #S(item), }).collect();
- let items = quote! { &[#S(items)] };
-
- (
- items,
- Some(String::from_utf8_lossy(&format_string).into_owned()),
- )
- }
- // path
- Some(_) => (
- quote! {{
- // We can't just do `super::path` because the path could be an absolute path. In
- // that case, we'd be generating `super::::path`, which is invalid. Even if we
- // took that into account, it's not possible to know if it's an external crate,
- // which would just require emitting `path` directly. By taking this approach,
- // we can leave it to the compiler to do the actual resolution.
- mod __path_hack {
- pub(super) use super::super::*;
- pub(super) use #S(tokens.collect::<TokenStream>()) as FORMAT;
- }
- __path_hack::FORMAT
- }},
- None,
- ),
- None => return Err(Error::UnexpectedEndOfInput),
- };
-
- Ok(serde_format_description::build(
- mod_name,
- formattable,
- format,
- raw_format_string,
- ))
- })()
- .unwrap_or_else(|err: Error| err.to_compile_error_standalone())
-}
diff --git a/vendor/time-macros/src/offset.rs b/vendor/time-macros/src/offset.rs
deleted file mode 100644
index c2099073f..000000000
--- a/vendor/time-macros/src/offset.rs
+++ /dev/null
@@ -1,95 +0,0 @@
-use std::iter::Peekable;
-
-use proc_macro::{token_stream, Span, TokenTree};
-
-use crate::helpers::{consume_any_ident, consume_number, consume_punct};
-use crate::to_tokens::ToTokenTree;
-use crate::Error;
-
-pub(crate) struct Offset {
- pub(crate) hours: i8,
- pub(crate) minutes: i8,
- pub(crate) seconds: i8,
-}
-
-pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Offset, Error> {
- if consume_any_ident(&["utc", "UTC"], chars).is_ok() {
- return Ok(Offset {
- hours: 0,
- minutes: 0,
- seconds: 0,
- });
- }
-
- let sign = if consume_punct('+', chars).is_ok() {
- 1
- } else if consume_punct('-', chars).is_ok() {
- -1
- } else if let Some(tree) = chars.next() {
- return Err(Error::UnexpectedToken { tree });
- } else {
- return Err(Error::MissingComponent {
- name: "sign",
- span_start: None,
- span_end: None,
- });
- };
-
- let (hours_span, hours) = consume_number::<i8>("hour", chars)?;
- let (mut minutes_span, mut minutes) = (Span::mixed_site(), 0);
- let (mut seconds_span, mut seconds) = (Span::mixed_site(), 0);
-
- if consume_punct(':', chars).is_ok() {
- let min = consume_number::<i8>("minute", chars)?;
- minutes_span = min.0;
- minutes = min.1;
-
- if consume_punct(':', chars).is_ok() {
- let sec = consume_number::<i8>("second", chars)?;
- seconds_span = sec.0;
- seconds = sec.1;
- }
- }
-
- if hours >= 24 {
- Err(Error::InvalidComponent {
- name: "hour",
- value: hours.to_string(),
- span_start: Some(hours_span),
- span_end: Some(hours_span),
- })
- } else if minutes >= 60 {
- Err(Error::InvalidComponent {
- name: "minute",
- value: minutes.to_string(),
- span_start: Some(minutes_span),
- span_end: Some(minutes_span),
- })
- } else if seconds >= 60 {
- Err(Error::InvalidComponent {
- name: "second",
- value: seconds.to_string(),
- span_start: Some(seconds_span),
- span_end: Some(seconds_span),
- })
- } else {
- Ok(Offset {
- hours: sign * hours,
- minutes: sign * minutes,
- seconds: sign * seconds,
- })
- }
-}
-
-impl ToTokenTree for Offset {
- fn into_token_tree(self) -> TokenTree {
- quote_group! {{
- const OFFSET: ::time::UtcOffset = ::time::UtcOffset::__from_hms_unchecked(
- #(self.hours),
- #(self.minutes),
- #(self.seconds),
- );
- OFFSET
- }}
- }
-}
diff --git a/vendor/time-macros/src/quote.rs b/vendor/time-macros/src/quote.rs
deleted file mode 100644
index 2fe86cc98..000000000
--- a/vendor/time-macros/src/quote.rs
+++ /dev/null
@@ -1,134 +0,0 @@
-macro_rules! quote {
- () => (::proc_macro::TokenStream::new());
- ($($x:tt)*) => {{
- let mut ts = ::proc_macro::TokenStream::new();
- let ts_mut = &mut ts;
- quote_inner!(ts_mut $($x)*);
- ts
- }};
-}
-
-#[cfg(any(feature = "formatting", feature = "parsing"))]
-macro_rules! quote_append {
- ($ts:ident $($x:tt)*) => {{
- quote_inner!($ts $($x)*);
- }};
-}
-
-macro_rules! quote_group {
- ({ $($x:tt)* }) => {
- ::proc_macro::TokenTree::Group(::proc_macro::Group::new(
- ::proc_macro::Delimiter::Brace,
- quote!($($x)*)
- ))
- };
-}
-
-macro_rules! sym {
- ($ts:ident $x:tt $y:tt) => {
- $ts.extend([
- ::proc_macro::TokenTree::from(::proc_macro::Punct::new(
- $x,
- ::proc_macro::Spacing::Joint,
- )),
- ::proc_macro::TokenTree::from(::proc_macro::Punct::new(
- $y,
- ::proc_macro::Spacing::Alone,
- )),
- ]);
- };
- ($ts:ident $x:tt) => {
- $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Punct::new(
- $x,
- ::proc_macro::Spacing::Alone,
- ))]);
- };
-}
-
-macro_rules! quote_inner {
- // Base case
- ($ts:ident) => {};
-
- // Single or double symbols
- ($ts:ident :: $($tail:tt)*) => { sym!($ts ':' ':'); quote_inner!($ts $($tail)*); };
- ($ts:ident .. $($tail:tt)*) => { sym!($ts '.' '.'); quote_inner!($ts $($tail)*); };
- ($ts:ident : $($tail:tt)*) => { sym!($ts ':'); quote_inner!($ts $($tail)*); };
- ($ts:ident = $($tail:tt)*) => { sym!($ts '='); quote_inner!($ts $($tail)*); };
- ($ts:ident ; $($tail:tt)*) => { sym!($ts ';'); quote_inner!($ts $($tail)*); };
- ($ts:ident , $($tail:tt)*) => { sym!($ts ','); quote_inner!($ts $($tail)*); };
- ($ts:ident . $($tail:tt)*) => { sym!($ts '.'); quote_inner!($ts $($tail)*); };
- ($ts:ident & $($tail:tt)*) => { sym!($ts '&'); quote_inner!($ts $($tail)*); };
- ($ts:ident << $($tail:tt)*) => { sym!($ts '<' '<'); quote_inner!($ts $($tail)*); };
- ($ts:ident < $($tail:tt)*) => { sym!($ts '<'); quote_inner!($ts $($tail)*); };
- ($ts:ident >> $($tail:tt)*) => { sym!($ts '>' '>'); quote_inner!($ts $($tail)*); };
- ($ts:ident > $($tail:tt)*) => { sym!($ts '>'); quote_inner!($ts $($tail)*); };
- ($ts:ident -> $($tail:tt)*) => { sym!($ts '-' '>'); quote_inner!($ts $($tail)*); };
- ($ts:ident ? $($tail:tt)*) => { sym!($ts '?'); quote_inner!($ts $($tail)*); };
- ($ts:ident ! $($tail:tt)*) => { sym!($ts '!'); quote_inner!($ts $($tail)*); };
- ($ts:ident | $($tail:tt)*) => { sym!($ts '|'); quote_inner!($ts $($tail)*); };
- ($ts:ident * $($tail:tt)*) => { sym!($ts '*'); quote_inner!($ts $($tail)*); };
-
- // Identifier
- ($ts:ident $i:ident $($tail:tt)*) => {
- $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Ident::new(
- &stringify!($i),
- ::proc_macro::Span::mixed_site(),
- ))]);
- quote_inner!($ts $($tail)*);
- };
-
- // Literal
- ($ts:ident $l:literal $($tail:tt)*) => {
- $ts.extend([::proc_macro::TokenTree::from(::proc_macro::Literal::string(&$l))]);
- quote_inner!($ts $($tail)*);
- };
-
- // Lifetime
- ($ts:ident $l:lifetime $($tail:tt)*) => {
- $ts.extend([
- ::proc_macro::TokenTree::from(
- ::proc_macro::Punct::new('\'', ::proc_macro::Spacing::Joint)
- ),
- ::proc_macro::TokenTree::from(::proc_macro::Ident::new(
- stringify!($l).trim_start_matches(|c| c == '\''),
- ::proc_macro::Span::mixed_site(),
- )),
- ]);
- quote_inner!($ts $($tail)*);
- };
-
- // Groups
- ($ts:ident ($($inner:tt)*) $($tail:tt)*) => {
- $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
- ::proc_macro::Delimiter::Parenthesis,
- quote!($($inner)*)
- ))]);
- quote_inner!($ts $($tail)*);
- };
- ($ts:ident [$($inner:tt)*] $($tail:tt)*) => {
- $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
- ::proc_macro::Delimiter::Bracket,
- quote!($($inner)*)
- ))]);
- quote_inner!($ts $($tail)*);
- };
- ($ts:ident {$($inner:tt)*} $($tail:tt)*) => {
- $ts.extend([::proc_macro::TokenTree::Group(::proc_macro::Group::new(
- ::proc_macro::Delimiter::Brace,
- quote!($($inner)*)
- ))]);
- quote_inner!($ts $($tail)*);
- };
-
- // Interpolated values
- // TokenTree by default
- ($ts:ident #($e:expr) $($tail:tt)*) => {
- $ts.extend([$crate::to_tokens::ToTokenTree::into_token_tree($e)]);
- quote_inner!($ts $($tail)*);
- };
- // Allow a TokenStream by request. It's more expensive, so avoid if possible.
- ($ts:ident #S($e:expr) $($tail:tt)*) => {
- $crate::to_tokens::ToTokenStream::append_to($e, $ts);
- quote_inner!($ts $($tail)*);
- };
-}
diff --git a/vendor/time-macros/src/serde_format_description.rs b/vendor/time-macros/src/serde_format_description.rs
deleted file mode 100644
index c09a4e9e2..000000000
--- a/vendor/time-macros/src/serde_format_description.rs
+++ /dev/null
@@ -1,163 +0,0 @@
-use proc_macro::{Ident, TokenStream, TokenTree};
-
-pub(crate) fn build(
- mod_name: Ident,
- ty: TokenTree,
- format: TokenStream,
- raw_format_string: Option<String>,
-) -> TokenStream {
- let ty_s = &*ty.to_string();
-
- let format_description_display = raw_format_string.unwrap_or_else(|| format.to_string());
-
- let visitor = if cfg!(feature = "parsing") {
- quote! {
- struct Visitor;
- struct OptionVisitor;
-
- impl<'a> ::serde::de::Visitor<'a> for Visitor {
- type Value = __TimeSerdeType;
-
- fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
- write!(
- f,
- concat!(
- "a(n) `",
- #(ty_s),
- "` in the format \"{}\"",
- ),
- #(format_description_display.as_str())
- )
- }
-
- fn visit_str<E: ::serde::de::Error>(
- self,
- value: &str
- ) -> Result<__TimeSerdeType, E> {
- __TimeSerdeType::parse(value, &DESCRIPTION).map_err(E::custom)
- }
- }
-
- impl<'a> ::serde::de::Visitor<'a> for OptionVisitor {
- type Value = Option<__TimeSerdeType>;
-
- fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
- write!(
- f,
- concat!(
- "an `Option<",
- #(ty_s),
- ">` in the format \"{}\"",
- ),
- #(format_description_display.as_str())
- )
- }
-
- fn visit_some<D: ::serde::de::Deserializer<'a>>(
- self,
- deserializer: D
- ) -> Result<Option<__TimeSerdeType>, D::Error> {
- deserializer
- .deserialize_any(Visitor)
- .map(Some)
- }
-
- fn visit_none<E: ::serde::de::Error>(
- self
- ) -> Result<Option<__TimeSerdeType>, E> {
- Ok(None)
- }
- }
- }
- } else {
- quote!()
- };
-
- let serialize_primary = if cfg!(feature = "formatting") {
- quote! {
- pub fn serialize<S: ::serde::Serializer>(
- datetime: &__TimeSerdeType,
- serializer: S,
- ) -> Result<S::Ok, S::Error> {
- use ::serde::Serialize;
- datetime
- .format(&DESCRIPTION)
- .map_err(::time::error::Format::into_invalid_serde_value::<S>)?
- .serialize(serializer)
- }
- }
- } else {
- quote!()
- };
-
- let deserialize_primary = if cfg!(feature = "parsing") {
- quote! {
- pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
- deserializer: D
- ) -> Result<__TimeSerdeType, D::Error> {
- use ::serde::Deserialize;
- deserializer.deserialize_any(Visitor)
- }
- }
- } else {
- quote!()
- };
-
- let serialize_option = if cfg!(feature = "formatting") {
- quote! {
- pub fn serialize<S: ::serde::Serializer>(
- option: &Option<__TimeSerdeType>,
- serializer: S,
- ) -> Result<S::Ok, S::Error> {
- use ::serde::Serialize;
- option.map(|datetime| datetime.format(&DESCRIPTION))
- .transpose()
- .map_err(::time::error::Format::into_invalid_serde_value::<S>)?
- .serialize(serializer)
- }
- }
- } else {
- quote!()
- };
-
- let deserialize_option = if cfg!(feature = "parsing") {
- quote! {
- pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
- deserializer: D
- ) -> Result<Option<__TimeSerdeType>, D::Error> {
- use ::serde::Deserialize;
- deserializer.deserialize_option(OptionVisitor)
- }
- }
- } else {
- quote!()
- };
-
- let deserialize_option_imports = if cfg!(feature = "parsing") {
- quote! {
- use super::{OptionVisitor, Visitor};
- }
- } else {
- quote!()
- };
-
- quote! {
- mod #(mod_name) {
- use ::time::#(ty) as __TimeSerdeType;
-
- const DESCRIPTION: &[::time::format_description::FormatItem<'_>] = #S(format);
-
- #S(visitor)
- #S(serialize_primary)
- #S(deserialize_primary)
-
- pub(super) mod option {
- use super::{DESCRIPTION, __TimeSerdeType};
- #S(deserialize_option_imports)
-
- #S(serialize_option)
- #S(deserialize_option)
- }
- }
- }
-}
diff --git a/vendor/time-macros/src/time.rs b/vendor/time-macros/src/time.rs
deleted file mode 100644
index 719e2051f..000000000
--- a/vendor/time-macros/src/time.rs
+++ /dev/null
@@ -1,118 +0,0 @@
-use std::iter::Peekable;
-
-use proc_macro::{token_stream, Span, TokenTree};
-
-use crate::helpers::{consume_any_ident, consume_number, consume_punct};
-use crate::to_tokens::ToTokenTree;
-use crate::Error;
-
-enum Period {
- Am,
- Pm,
- _24,
-}
-
-pub(crate) struct Time {
- pub(crate) hour: u8,
- pub(crate) minute: u8,
- pub(crate) second: u8,
- pub(crate) nanosecond: u32,
-}
-
-pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Time, Error> {
- fn consume_period(chars: &mut Peekable<token_stream::IntoIter>) -> (Option<Span>, Period) {
- if let Ok(span) = consume_any_ident(&["am", "AM"], chars) {
- (Some(span), Period::Am)
- } else if let Ok(span) = consume_any_ident(&["pm", "PM"], chars) {
- (Some(span), Period::Pm)
- } else {
- (None, Period::_24)
- }
- }
-
- let (hour_span, hour) = consume_number("hour", chars)?;
-
- let ((minute_span, minute), (second_span, second), (period_span, period)) =
- match consume_period(chars) {
- // Nothing but the 12-hour clock hour and AM/PM
- (period_span @ Some(_), period) => (
- (Span::mixed_site(), 0),
- (Span::mixed_site(), 0.),
- (period_span, period),
- ),
- (None, _) => {
- consume_punct(':', chars)?;
- let (minute_span, minute) = consume_number::<u8>("minute", chars)?;
- let (second_span, second): (_, f64) = if consume_punct(':', chars).is_ok() {
- consume_number("second", chars)?
- } else {
- (Span::mixed_site(), 0.)
- };
- let (period_span, period) = consume_period(chars);
- (
- (minute_span, minute),
- (second_span, second),
- (period_span, period),
- )
- }
- };
-
- let hour = match (hour, period) {
- (0, Period::Am | Period::Pm) => {
- return Err(Error::InvalidComponent {
- name: "hour",
- value: hour.to_string(),
- span_start: Some(hour_span),
- span_end: Some(period_span.unwrap_or(hour_span)),
- });
- }
- (12, Period::Am) => 0,
- (12, Period::Pm) => 12,
- (hour, Period::Am | Period::_24) => hour,
- (hour, Period::Pm) => hour + 12,
- };
-
- if hour >= 24 {
- Err(Error::InvalidComponent {
- name: "hour",
- value: hour.to_string(),
- span_start: Some(hour_span),
- span_end: Some(period_span.unwrap_or(hour_span)),
- })
- } else if minute >= 60 {
- Err(Error::InvalidComponent {
- name: "minute",
- value: minute.to_string(),
- span_start: Some(minute_span),
- span_end: Some(minute_span),
- })
- } else if second >= 60. {
- Err(Error::InvalidComponent {
- name: "second",
- value: second.to_string(),
- span_start: Some(second_span),
- span_end: Some(second_span),
- })
- } else {
- Ok(Time {
- hour,
- minute,
- second: second.trunc() as _,
- nanosecond: (second.fract() * 1_000_000_000.).round() as _,
- })
- }
-}
-
-impl ToTokenTree for Time {
- fn into_token_tree(self) -> TokenTree {
- quote_group! {{
- const TIME: ::time::Time = ::time::Time::__from_hms_nanos_unchecked(
- #(self.hour),
- #(self.minute),
- #(self.second),
- #(self.nanosecond),
- );
- TIME
- }}
- }
-}
diff --git a/vendor/time-macros/src/to_tokens.rs b/vendor/time-macros/src/to_tokens.rs
deleted file mode 100644
index 3a293925c..000000000
--- a/vendor/time-macros/src/to_tokens.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
-
-pub(crate) trait ToTokenStream: Sized {
- fn append_to(self, ts: &mut TokenStream);
-}
-
-pub(crate) trait ToTokenTree: Sized {
- fn into_token_tree(self) -> TokenTree;
-}
-
-impl<T: ToTokenTree> ToTokenStream for T {
- fn append_to(self, ts: &mut TokenStream) {
- ts.extend([self.into_token_tree()])
- }
-}
-
-impl ToTokenTree for bool {
- fn into_token_tree(self) -> TokenTree {
- let lit = if self { "true" } else { "false" };
- TokenTree::Ident(Ident::new(lit, Span::mixed_site()))
- }
-}
-
-impl ToTokenStream for TokenStream {
- fn append_to(self, ts: &mut TokenStream) {
- ts.extend(self)
- }
-}
-
-impl ToTokenTree for TokenTree {
- fn into_token_tree(self) -> TokenTree {
- self
- }
-}
-
-impl ToTokenTree for &str {
- fn into_token_tree(self) -> TokenTree {
- TokenTree::Literal(Literal::string(self))
- }
-}
-
-macro_rules! impl_for_tree_types {
- ($($type:ty)*) => {$(
- impl ToTokenTree for $type {
- fn into_token_tree(self) -> TokenTree {
- TokenTree::from(self)
- }
- }
- )*};
-}
-impl_for_tree_types![Ident Literal Group Punct];
-
-macro_rules! impl_for_int {
- ($($type:ty => $method:ident)*) => {$(
- impl ToTokenTree for $type {
- fn into_token_tree(self) -> TokenTree {
- TokenTree::from(Literal::$method(self))
- }
- }
- )*};
-}
-impl_for_int! {
- i8 => i8_unsuffixed
- u8 => u8_unsuffixed
- u16 => u16_unsuffixed
- i32 => i32_unsuffixed
- u32 => u32_unsuffixed
-}
diff --git a/vendor/tinystr/.cargo-checksum.json b/vendor/tinystr/.cargo-checksum.json
index 36b7cede5..86027edd3 100644
--- a/vendor/tinystr/.cargo-checksum.json
+++ b/vendor/tinystr/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"279fef44ae26d0bda43cc629cbe22795d696b36635fcb7ca484e74b9d72b9eb4","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"ce7d4b009ab4878b4b647fa5c01037b4d3e8bf72e44942dbf6cf1ef217d10b4d","benches/common/mod.rs":"7a31f89cb68cec2574287636ac22fe3fc86a66688b8b1e99700a5da692bd485e","benches/construct.rs":"0e0e7c1459dd3efea0c734a999318078b53e18c3389c74a1ff5a226cd3d05cca","benches/overview.rs":"296d19b32a2d52e449140771d89f9c099d19177eb84e1395c942469d51c4c3f8","benches/read.rs":"cbf349393a50eb90e7ba53906f98a689d585242292f867a37acf6842263af4d9","benches/serde.rs":"5c88866d08c07088b82dbd5472e6276c632d11e064417f5d8f2025a5ade867f0","src/ascii.rs":"ca84603237893d515cf4d3cc5bf61470a81c499956b8bdf51239433c0d49785e","src/asciibyte.rs":"fa29de7403c0424c52c2f30bb47002b9abf4bd08b302c411ffe679d3decfb8de","src/databake.rs":"9f29e30e6deec989822cbdf01f5165e098fa544cf7e49ccea3f5de827648fc1e","src/error.rs":"859d03faa3e98d979e0d6b5d232810d42b58f9c6ef69403d442545327053265e","src/int_ops.rs":"c2be314d19dd41cf18fb3589901d7e58ee32fe3f764fb6a66b08a1e005336406","src/lib.rs":"41db27f31650945dbf41b72a21d42fa4de0722b6f0717a45a3569c3dd4f1e148","src/macros.rs":"3fe76e258b0db2896284bcf4f50a4ac35b7efc542649b4c9f13c6e71c5957ae4","src/serde.rs":"0bd6bbe2ee8195aea68dd235d59b94faa3419aaeb8939e3220dd64bd888873f5","src/ule.rs":"139543634949a95405bc49862840b0794db089abed6efe66533858376cae180f","tests/serde.rs":"cf8cee82f731928375888d1b5e7e5e50368d3e16ce372fced230c9b1ee2a7451"},"package":"f8aeafdfd935e4a7fe16a91ab711fa52d54df84f9c8f7ca5837a9d1d902ef4c2"} \ No newline at end of file
+{"files":{"Cargo.toml":"b28bb188b7c3b68f9d9a710921a42f337bd2e07eb7588d983b20724f64d6a8c8","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"8e79e986c696d6010a578b2872ee4144d86e26d30a409167ff2cf2af551ef231","benches/common/mod.rs":"7a31f89cb68cec2574287636ac22fe3fc86a66688b8b1e99700a5da692bd485e","benches/construct.rs":"0e0e7c1459dd3efea0c734a999318078b53e18c3389c74a1ff5a226cd3d05cca","benches/overview.rs":"296d19b32a2d52e449140771d89f9c099d19177eb84e1395c942469d51c4c3f8","benches/read.rs":"cbf349393a50eb90e7ba53906f98a689d585242292f867a37acf6842263af4d9","benches/serde.rs":"5c88866d08c07088b82dbd5472e6276c632d11e064417f5d8f2025a5ade867f0","src/ascii.rs":"403408b47d813110e840d4db688145c37a17fbcbff173038d9e3743aa712b321","src/asciibyte.rs":"fa29de7403c0424c52c2f30bb47002b9abf4bd08b302c411ffe679d3decfb8de","src/databake.rs":"9f29e30e6deec989822cbdf01f5165e098fa544cf7e49ccea3f5de827648fc1e","src/error.rs":"e0cbc912258d6e56aad148404d7cc3213d89736fa9ebe56c41f6cb0df7b2dd63","src/int_ops.rs":"c2be314d19dd41cf18fb3589901d7e58ee32fe3f764fb6a66b08a1e005336406","src/lib.rs":"7ddbd83bcb9091495de3c4a7eb7ecc25313c54991be8b463d67a7c2e97c076b6","src/macros.rs":"3fe76e258b0db2896284bcf4f50a4ac35b7efc542649b4c9f13c6e71c5957ae4","src/serde.rs":"0bd6bbe2ee8195aea68dd235d59b94faa3419aaeb8939e3220dd64bd888873f5","src/ule.rs":"139543634949a95405bc49862840b0794db089abed6efe66533858376cae180f","tests/serde.rs":"cf8cee82f731928375888d1b5e7e5e50368d3e16ce372fced230c9b1ee2a7451"},"package":"7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef"} \ No newline at end of file
diff --git a/vendor/tinystr/Cargo.toml b/vendor/tinystr/Cargo.toml
index 64682f74e..284ce6d2d 100644
--- a/vendor/tinystr/Cargo.toml
+++ b/vendor/tinystr/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "tinystr"
-version = "0.7.0"
+version = "0.7.1"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -36,9 +36,15 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
+
[[test]]
name = "serde"
required-features = ["serde"]
@@ -66,7 +72,7 @@ required-features = [
]
[dependencies.databake]
-version = "0.1"
+version = "0.1.3"
optional = true
[dependencies.displaydoc]
@@ -80,7 +86,7 @@ optional = true
default-features = false
[dependencies.zerovec]
-version = "0.9"
+version = "0.9.2"
optional = true
[dev-dependencies.bincode]
@@ -111,4 +117,4 @@ package = "tinystr"
alloc = []
bench = []
default = ["alloc"]
-zerovec = ["dep:zerovec"]
+std = []
diff --git a/vendor/tinystr/README.md b/vendor/tinystr/README.md
index 96b3f955f..5f9a8142b 100644
--- a/vendor/tinystr/README.md
+++ b/vendor/tinystr/README.md
@@ -19,8 +19,8 @@ assert_eq!(s1, "tEsT");
assert_eq!(s1.to_ascii_uppercase(), "TEST");
assert_eq!(s1.to_ascii_lowercase(), "test");
assert_eq!(s1.to_ascii_titlecase(), "Test");
-assert_eq!(s1.is_ascii_alphanumeric(), true);
-assert_eq!(s1.is_ascii_numeric(), false);
+assert!(s1.is_ascii_alphanumeric());
+assert!(!s1.is_ascii_numeric());
let s2 = TinyAsciiStr::<8>::try_from_raw(*b"New York")
.expect("Failed to parse.");
@@ -29,7 +29,7 @@ assert_eq!(s2, "New York");
assert_eq!(s2.to_ascii_uppercase(), "NEW YORK");
assert_eq!(s2.to_ascii_lowercase(), "new york");
assert_eq!(s2.to_ascii_titlecase(), "New york");
-assert_eq!(s2.is_ascii_alphanumeric(), false);
+assert!(!s2.is_ascii_alphanumeric());
```
## Details
diff --git a/vendor/tinystr/src/ascii.rs b/vendor/tinystr/src/ascii.rs
index 0be1125e3..f39f39b73 100644
--- a/vendor/tinystr/src/ascii.rs
+++ b/vendor/tinystr/src/ascii.rs
@@ -138,11 +138,6 @@ impl<const N: usize> TinyAsciiStr<N> {
#[inline]
#[must_use]
pub const fn as_bytes(&self) -> &[u8] {
- /// core::slice::from_raw_parts(a, b) = core::mem::transmute((a, b)) hack
- /// ```compile_fail
- /// const unsafe fn canary() { core::slice::from_raw_parts(0 as *const u8, 0); }
- /// ```
- const _: () = ();
// Safe because `self.bytes.as_slice()` pointer-casts to `&[u8]`,
// and changing the length of that slice to self.len() < N is safe.
unsafe { core::mem::transmute((self.bytes.as_slice().as_ptr(), self.len())) }
@@ -668,7 +663,7 @@ mod test {
use rand::seq::SliceRandom;
use rand::SeedableRng;
- const STRINGS: &[&str] = &[
+ const STRINGS: [&str; 26] = [
"Latn",
"laTn",
"windows",
@@ -723,8 +718,8 @@ mod test {
T: core::fmt::Debug + core::cmp::PartialEq,
{
for s in STRINGS
- .iter()
- .map(|s| s.to_string())
+ .into_iter()
+ .map(str::to_owned)
.chain(gen_strings(100, &[3, 4, 5, 8, 12]))
{
let t = match TinyAsciiStr::<N>::from_str(&s) {
@@ -930,7 +925,7 @@ mod test {
.map(|c| c.to_ascii_lowercase())
.collect::<String>()
},
- |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_lowercase(t).to_string(),
+ |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_lowercase(t).as_str().to_owned(),
)
}
check::<2>();
@@ -954,7 +949,7 @@ mod test {
unsafe { r.as_bytes_mut()[0].make_ascii_uppercase() };
r
},
- |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_titlecase(t).to_string(),
+ |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_titlecase(t).as_str().to_owned(),
)
}
check::<2>();
@@ -974,7 +969,7 @@ mod test {
.map(|c| c.to_ascii_uppercase())
.collect::<String>()
},
- |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_uppercase(t).to_string(),
+ |t: TinyAsciiStr<N>| TinyAsciiStr::to_ascii_uppercase(t).as_str().to_owned(),
)
}
check::<2>();
diff --git a/vendor/tinystr/src/error.rs b/vendor/tinystr/src/error.rs
index 03901431c..7910f8b48 100644
--- a/vendor/tinystr/src/error.rs
+++ b/vendor/tinystr/src/error.rs
@@ -4,6 +4,9 @@
use displaydoc::Display;
+#[cfg(feature = "std")]
+impl std::error::Error for TinyStrError {}
+
#[derive(Display, Debug, PartialEq, Eq)]
#[non_exhaustive]
pub enum TinyStrError {
diff --git a/vendor/tinystr/src/lib.rs b/vendor/tinystr/src/lib.rs
index 96018b8b2..7745da0e5 100644
--- a/vendor/tinystr/src/lib.rs
+++ b/vendor/tinystr/src/lib.rs
@@ -21,8 +21,8 @@
//! assert_eq!(s1.to_ascii_uppercase(), "TEST");
//! assert_eq!(s1.to_ascii_lowercase(), "test");
//! assert_eq!(s1.to_ascii_titlecase(), "Test");
-//! assert_eq!(s1.is_ascii_alphanumeric(), true);
-//! assert_eq!(s1.is_ascii_numeric(), false);
+//! assert!(s1.is_ascii_alphanumeric());
+//! assert!(!s1.is_ascii_numeric());
//!
//! let s2 = TinyAsciiStr::<8>::try_from_raw(*b"New York")
//! .expect("Failed to parse.");
@@ -31,7 +31,7 @@
//! assert_eq!(s2.to_ascii_uppercase(), "NEW YORK");
//! assert_eq!(s2.to_ascii_lowercase(), "new york");
//! assert_eq!(s2.to_ascii_titlecase(), "New york");
-//! assert_eq!(s2.is_ascii_alphanumeric(), false);
+//! assert!(!s2.is_ascii_alphanumeric());
//! ```
//!
//! # Details
@@ -52,7 +52,7 @@
//! [`ICU4X`]: ../icu/index.html
// https://github.com/unicode-org/icu4x/blob/main/docs/process/boilerplate.md#library-annotations
-#![cfg_attr(not(test), no_std)]
+#![cfg_attr(not(any(test, feature = "std")), no_std)]
#![cfg_attr(
not(test),
deny(
diff --git a/vendor/toml/.cargo-checksum.json b/vendor/toml-0.5.9/.cargo-checksum.json
index fc3af5a18..fc3af5a18 100644
--- a/vendor/toml/.cargo-checksum.json
+++ b/vendor/toml-0.5.9/.cargo-checksum.json
diff --git a/vendor/toml/Cargo.lock b/vendor/toml-0.5.9/Cargo.lock
index 1f4de004a..1f4de004a 100644
--- a/vendor/toml/Cargo.lock
+++ b/vendor/toml-0.5.9/Cargo.lock
diff --git a/vendor/toml/Cargo.toml b/vendor/toml-0.5.9/Cargo.toml
index 2e30a900a..2e30a900a 100644
--- a/vendor/toml/Cargo.toml
+++ b/vendor/toml-0.5.9/Cargo.toml
diff --git a/vendor/toml-0.5.9/LICENSE-APACHE b/vendor/toml-0.5.9/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/vendor/toml-0.5.9/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/toml/LICENSE-MIT b/vendor/toml-0.5.9/LICENSE-MIT
index 39e0ed660..39e0ed660 100644
--- a/vendor/toml/LICENSE-MIT
+++ b/vendor/toml-0.5.9/LICENSE-MIT
diff --git a/vendor/toml/README.md b/vendor/toml-0.5.9/README.md
index 21dd1088a..21dd1088a 100644
--- a/vendor/toml/README.md
+++ b/vendor/toml-0.5.9/README.md
diff --git a/vendor/toml/examples/decode.rs b/vendor/toml-0.5.9/examples/decode.rs
index 256069b35..256069b35 100644
--- a/vendor/toml/examples/decode.rs
+++ b/vendor/toml-0.5.9/examples/decode.rs
diff --git a/vendor/toml/examples/enum_external.rs b/vendor/toml-0.5.9/examples/enum_external.rs
index 7de061f61..7de061f61 100644
--- a/vendor/toml/examples/enum_external.rs
+++ b/vendor/toml-0.5.9/examples/enum_external.rs
diff --git a/vendor/toml/examples/toml2json.rs b/vendor/toml-0.5.9/examples/toml2json.rs
index 1b90c9fde..1b90c9fde 100644
--- a/vendor/toml/examples/toml2json.rs
+++ b/vendor/toml-0.5.9/examples/toml2json.rs
diff --git a/vendor/toml/src/datetime.rs b/vendor/toml-0.5.9/src/datetime.rs
index a68b07568..a68b07568 100644
--- a/vendor/toml/src/datetime.rs
+++ b/vendor/toml-0.5.9/src/datetime.rs
diff --git a/vendor/toml/src/de.rs b/vendor/toml-0.5.9/src/de.rs
index 40d88ae7d..40d88ae7d 100644
--- a/vendor/toml/src/de.rs
+++ b/vendor/toml-0.5.9/src/de.rs
diff --git a/vendor/toml/src/lib.rs b/vendor/toml-0.5.9/src/lib.rs
index 00421b118..00421b118 100644
--- a/vendor/toml/src/lib.rs
+++ b/vendor/toml-0.5.9/src/lib.rs
diff --git a/vendor/toml/src/macros.rs b/vendor/toml-0.5.9/src/macros.rs
index 0731afefd..0731afefd 100644
--- a/vendor/toml/src/macros.rs
+++ b/vendor/toml-0.5.9/src/macros.rs
diff --git a/vendor/toml/src/map.rs b/vendor/toml-0.5.9/src/map.rs
index d130a1d54..d130a1d54 100644
--- a/vendor/toml/src/map.rs
+++ b/vendor/toml-0.5.9/src/map.rs
diff --git a/vendor/toml/src/ser.rs b/vendor/toml-0.5.9/src/ser.rs
index 90c5acf61..90c5acf61 100644
--- a/vendor/toml/src/ser.rs
+++ b/vendor/toml-0.5.9/src/ser.rs
diff --git a/vendor/toml/src/spanned.rs b/vendor/toml-0.5.9/src/spanned.rs
index 9ee56ae6d..9ee56ae6d 100644
--- a/vendor/toml/src/spanned.rs
+++ b/vendor/toml-0.5.9/src/spanned.rs
diff --git a/vendor/toml/src/tokens.rs b/vendor/toml-0.5.9/src/tokens.rs
index c2422828e..c2422828e 100644
--- a/vendor/toml/src/tokens.rs
+++ b/vendor/toml-0.5.9/src/tokens.rs
diff --git a/vendor/toml/src/value.rs b/vendor/toml-0.5.9/src/value.rs
index 1a1756d10..1a1756d10 100644
--- a/vendor/toml/src/value.rs
+++ b/vendor/toml-0.5.9/src/value.rs
diff --git a/vendor/toml/tests/enum_external_deserialize.rs b/vendor/toml-0.5.9/tests/enum_external_deserialize.rs
index 6d45b4848..6d45b4848 100644
--- a/vendor/toml/tests/enum_external_deserialize.rs
+++ b/vendor/toml-0.5.9/tests/enum_external_deserialize.rs
diff --git a/vendor/tracing-subscriber-0.3.3/.cargo-checksum.json b/vendor/tracing-subscriber-0.3.3/.cargo-checksum.json
deleted file mode 100644
index 3fd14355e..000000000
--- a/vendor/tracing-subscriber-0.3.3/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"CHANGELOG.md":"fd7a732ca8d5997cf09a6ffbde0dda0667873031884aa3549d6044971dc00a66","Cargo.toml":"4908f04be83f94bc75fbcf4244fa57b8a52c984928a3320550b692c1038d5448","LICENSE":"898b1ae9821e98daf8964c8d6c7f61641f5f5aa78ad500020771c0939ee0dea1","README.md":"803714740b5ad75ac31a0f9dafd10e0d4d62f7c27c2c0e182d2076a313b0649b","benches/enter.rs":"4a94a04e2abd07950ef2f0b646f4dcdf4ff00abf6396edb5a53c8b41b7691b1a","benches/filter.rs":"6374005ffa47fa19880bb95e3e37406f40ea72a02c5136f4d5eb4c663d452b18","benches/filter_log.rs":"612716bdf9a188093e84d014a4847f18157f148f7d64e54150cd5c91ac709a8a","benches/fmt.rs":"5a0ff37967ffef3a221eebb78855d031e2e883a8a67528c8e794cc6f16cbee8a","benches/support/mod.rs":"72bef51154da9c9b3d81300195c1929a818858fa4b4fc2aa07b49ca586f4cd39","src/field/debug.rs":"4ab50198a0b042d92fefa77b5cac0aef7ba6936149fa555f4b6e2036dcd7f2d7","src/field/delimited.rs":"e6b2dcbf9cb1e9b5e862b462f91190adaf8e14f9c2c5d2048ad651f49cfa2007","src/field/display.rs":"9c06a52919dbe9bfd4cf7eec39293240c9facebe052a2fecc2f21184beb5195f","src/field/mod.rs":"35e3dd4ad7b49c99a24e80c6a40a00b3189a114488793657d6d733a90d2e10f6","src/filter/directive.rs":"7ecf87b17afbddadbc385764c2d9c1fda4b020a08d75f741f8e34c7dc475bd74","src/filter/env/directive.rs":"628f9f566ccee924d43d79b287c569abfc32c2fb74e078958aed6cb8285cfb4f","src/filter/env/field.rs":"9f2ceaedf2e2ecefaff863347ef8dfa85cd5f64a0fd09a0f77f64f412c9bb548","src/filter/env/mod.rs":"7f864a5ef0c008c7fe9a21d0a94bb87dd72746e628ff3e68c18b0fd173763918","src/filter/filter_fn.rs":"0debbc4a4b4d2a57b2a06905017ac908bf34b0a64aaf961535fbf6f4d5a700a9","src/filter/layer_filters.rs":"16ff19fed003b913de4f85a03b31864d71ee73c7ce86b07c80da07fe633f682e","src/filter/level.rs":"cc449757aac47caaf19dd5ba4d74c8efbcd7531fcd6c13da0c5f6fdda12cc9ca","src/filter/mod.rs":"8ebfd0dc92415ff27ec552f20919e598842a87186f13f120449053a96e1e3307","src/filter/targets.rs":"5cec882366d7f12de0a88f7daaac8499785ce9e3832619f251876a02ae19a6bf","src/fmt/fmt_layer.rs":"486264d810ab6b28428bd48e00774fc822762fc9f991332a2d94a42b3168f2e6","src/fmt/format/json.rs":"1a38c049e1bf99efaf7db1f1fd26d3a5bb1e768fc1524c95816708e5d39fca35","src/fmt/format/mod.rs":"ff92f7910bed4f3c0c2ce798333d273d6b5b5fc09cc78031f52d13e043986227","src/fmt/format/pretty.rs":"eecf278b15cc60b35c3f6aa5d05452401c7a4a29195357e92318d684fcfe3072","src/fmt/mod.rs":"270ffa0a9e6543a602247fccef276a3012daa558181f24cd9032292edbc8dc2c","src/fmt/time/datetime.rs":"778d4604d800e46b940087394e7b72750738b554e02aea523fa9820ab0768c08","src/fmt/time/mod.rs":"304c9383e5cfc0c42d79f47a10323ed5a98585e018b127924b0925ec067f0739","src/fmt/time/time_crate.rs":"bacec2c8bb31175b85f2fb3ae40155a08b2aab7a04adaf4c88679147dc651c58","src/fmt/writer.rs":"1d7a4e2dddddff1bfd1344f2cecdb6b2b69b015c9869d95987d26e177dffa793","src/layer/context.rs":"2478693e2faffdf2e519b6d37e1c3aa3dd75088185accc2b68ffa8612bf73195","src/layer/layered.rs":"ba918a9b944f2c083cbb75d6d7f99f90083aa0a29cf3f4f1dd78aa034e09ade6","src/layer/mod.rs":"e1804cfe91051020cac63fb1067d196552ebb844b6c6d1d2279b97dbec1c64df","src/layer/tests.rs":"3e974d627c4bc4269cfa10c82c890e596c9d46af8e6bc03c6c117bde1237e948","src/lib.rs":"cb362279b3c6d23645cda6d768707576f460e275d332d5350a036437cd5404e0","src/macros.rs":"e184bffc6b5999c48e365ad08343dca764a5fb711b789beb26bd1d5f1d767726","src/prelude.rs":"088635def33be9a4c4b6ed934dc22540c555e27d62f7625a43aa9c0e525ca467","src/registry/extensions.rs":"7333aefd69c767212a7924c57283442430edccb17092c91e02a7d13b2d312b11","src/registry/mod.rs":"4f0108e75e0f6e239b8eb69fcad052f25e3b887e412e951e0cbec02cf13f05d5","src/registry/sharded.rs":"972bdd94f43a33ef1f2ebf96ea69ebe4c1d4b0215e69315a3b525783c2025696","src/registry/stack.rs":"9ef333d6a8a28a064e80ff1e376dbb07bc597009010ec332b2dc3ab435d737c2","src/reload.rs":"41fa9a1a28fef626e302a80a68d665492e73ef6d1a2a3c2a7aac5d6c9a0bb496","src/sync.rs":"7f78f3de5b618a999be0e61f936a233975e7769f1ebb55a0e48c3d199e9c45e3","src/util.rs":"55b4e9d63112f9d5a12a287273a9b1212741058384332d3edc024168cacfd627","tests/cached_layer_filters_dont_break_other_layers.rs":"b2084542a014abeff821b30b2b8c21e32bfdcffae53ce5335fb588f557fa4244","tests/duplicate_spans.rs":"48f596bbfabcc6618244afddcf3c3f2e915b9d79284f17bdd0e0616ad29929be","tests/field_filter.rs":"c44d88ab711164a2b1b3a09377284b469f79ddf4651416515a035782c7c64b79","tests/filter.rs":"a43d23e867af779031b6245047092aca57ee26980a8f3faa19036542bcd37f06","tests/filter_log.rs":"e0cd9d394dbfeeb80570a7686bc7f588c5489657980436810711ed8852f86169","tests/fmt_max_level_hint.rs":"d4c6d6f976ae41ab8052fa610a7337ad7150802cbd5634cb30fc45c1f215cfcd","tests/hinted_layer_filters_dont_break_other_layers.rs":"d5ba9cfb6784cf59f007e673ad549dc722d109f6b3d4a69f6aa11b25ca10b469","tests/layer_filter_interests_are_cached.rs":"d036d1c4bc3754e94ebfdda9c841f4858ccec40aba0720f3fbf26c817bfe5a83","tests/layer_filters/boxed.rs":"04db459721a26d6502a2b3fbe42154c5a451021a9374a18c017d10971f44e0c0","tests/layer_filters/downcast_raw.rs":"9b90ead571543cbe14e89b4fe637360d9baf3069f6f656ed3bdf65e7318648f1","tests/layer_filters/filter_scopes.rs":"02611bc58d0d8a67a127eca8cab1b2d9a9901bd2c8a8daad41adf6089b28aee0","tests/layer_filters/main.rs":"0316d611c740e234b78ed9a9dae392fe80472c1e8b004a007ad2dd87d068c67b","tests/layer_filters/targets.rs":"138e3f9ddd68571d94c5aff9d54ee2fbc5f44724c6ee42477a411740ccb79ee6","tests/layer_filters/trees.rs":"4df7b5cf12da44a9255c56e5b80e2b0cf84820230ba916f324c67bc3ee4e4605","tests/multiple_layer_filter_interests_cached.rs":"1ea195f03e58d715228ec1b604f85bda2fc82812d05b2f6370d5edd34a035f32","tests/registry_max_level_hint.rs":"ba386d32b8d13832d7009163241c3d0723488c0393d85647eb9368776251e4fc","tests/registry_with_subscriber.rs":"13b92ed68d9013aefefbc4c73e695c690630e4460634206d214db4c19abb7c0f","tests/reload.rs":"4566386b1b26e6609f5a4bf0e6bef1c2245a591d12417cee189b26dfa14f7f95","tests/same_len_filters.rs":"50c8f5fa1494773410a9f52a56b303534a01a023b186cf2f3131e5e7706eb156","tests/support.rs":"75559505af8018012739d24b3c8743dd079b4d3a8ae28f08b4586a961720aa7b","tests/unhinted_layer_filters_dont_break_other_layers.rs":"519cfef4977e511af938546d4208c645a28248c8ed8666daf180f0ad32f0a261","tests/utils.rs":"2b04ce2d8b56a9062a025900104853e081eae8e3f113f990a915d5f9dea6577b"},"package":"245da694cc7fc4729f3f418b304cb57789f1bed2a78c575407ab8a23f53cb4d3"} \ No newline at end of file
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/format/pretty.rs b/vendor/tracing-subscriber-0.3.3/src/fmt/format/pretty.rs
deleted file mode 100644
index 3e47e2d93..000000000
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/format/pretty.rs
+++ /dev/null
@@ -1,415 +0,0 @@
-use super::*;
-use crate::{
- field::{VisitFmt, VisitOutput},
- fmt::fmt_layer::{FmtContext, FormattedFields},
- registry::LookupSpan,
-};
-
-use std::fmt;
-use tracing_core::{
- field::{self, Field},
- Event, Level, Subscriber,
-};
-
-#[cfg(feature = "tracing-log")]
-use tracing_log::NormalizeEvent;
-
-use ansi_term::{Colour, Style};
-
-/// An excessively pretty, human-readable event formatter.
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Pretty {
- display_location: bool,
-}
-
-/// The [visitor] produced by [`Pretty`]'s [`MakeVisitor`] implementation.
-///
-/// [visitor]: field::Visit
-/// [`MakeVisitor`]: crate::field::MakeVisitor
-#[derive(Debug)]
-pub struct PrettyVisitor<'a> {
- writer: Writer<'a>,
- is_empty: bool,
- style: Style,
- result: fmt::Result,
-}
-
-/// An excessively pretty, human-readable [`MakeVisitor`] implementation.
-///
-/// [`MakeVisitor`]: crate::field::MakeVisitor
-#[derive(Debug)]
-pub struct PrettyFields {
- /// A value to override the provided `Writer`'s ANSI formatting
- /// configuration.
- ///
- /// If this is `Some`, we override the `Writer`'s ANSI setting. This is
- /// necessary in order to continue supporting the deprecated
- /// `PrettyFields::with_ansi` method. If it is `None`, we don't override the
- /// ANSI formatting configuration (because the deprecated method was not
- /// called).
- // TODO: when `PrettyFields::with_ansi` is removed, we can get rid
- // of this entirely.
- ansi: Option<bool>,
-}
-
-// === impl Pretty ===
-
-impl Default for Pretty {
- fn default() -> Self {
- Self {
- display_location: true,
- }
- }
-}
-
-impl Pretty {
- fn style_for(level: &Level) -> Style {
- match *level {
- Level::TRACE => Style::new().fg(Colour::Purple),
- Level::DEBUG => Style::new().fg(Colour::Blue),
- Level::INFO => Style::new().fg(Colour::Green),
- Level::WARN => Style::new().fg(Colour::Yellow),
- Level::ERROR => Style::new().fg(Colour::Red),
- }
- }
-
- /// Sets whether the event's source code location is displayed.
- ///
- /// This defaults to `true`.
- pub fn with_source_location(self, display_location: bool) -> Self {
- Self {
- display_location,
- ..self
- }
- }
-}
-
-impl<T> Format<Pretty, T> {
- /// Sets whether or not the source code location from which an event
- /// originated is displayed.
- ///
- /// This defaults to `true`.
- pub fn with_source_location(mut self, display_location: bool) -> Self {
- self.format = self.format.with_source_location(display_location);
- self
- }
-}
-
-impl<C, N, T> FormatEvent<C, N> for Format<Pretty, T>
-where
- C: Subscriber + for<'a> LookupSpan<'a>,
- N: for<'a> FormatFields<'a> + 'static,
- T: FormatTime,
-{
- fn format_event(
- &self,
- ctx: &FmtContext<'_, C, N>,
- mut writer: Writer<'_>,
- event: &Event<'_>,
- ) -> fmt::Result {
- #[cfg(feature = "tracing-log")]
- let normalized_meta = event.normalized_metadata();
- #[cfg(feature = "tracing-log")]
- let meta = normalized_meta.as_ref().unwrap_or_else(|| event.metadata());
- #[cfg(not(feature = "tracing-log"))]
- let meta = event.metadata();
- write!(&mut writer, " ")?;
-
- // if the `Format` struct *also* has an ANSI color configuration,
- // override the writer...the API for configuring ANSI color codes on the
- // `Format` struct is deprecated, but we still need to honor those
- // configurations.
- if let Some(ansi) = self.ansi {
- writer = writer.with_ansi(ansi);
- }
-
- self.format_timestamp(&mut writer)?;
-
- let style = if self.display_level && writer.has_ansi_escapes() {
- Pretty::style_for(meta.level())
- } else {
- Style::new()
- };
-
- if self.display_level {
- write!(
- writer,
- "{} ",
- super::FmtLevel::new(meta.level(), writer.has_ansi_escapes())
- )?;
- }
-
- if self.display_target {
- let target_style = if writer.has_ansi_escapes() {
- style.bold()
- } else {
- style
- };
- write!(
- writer,
- "{}{}{}: ",
- target_style.prefix(),
- meta.target(),
- target_style.infix(style)
- )?;
- }
- let mut v = PrettyVisitor::new(writer.by_ref(), true).with_style(style);
- event.record(&mut v);
- v.finish()?;
- writer.write_char('\n')?;
-
- let dimmed = if writer.has_ansi_escapes() {
- Style::new().dimmed().italic()
- } else {
- Style::new()
- };
- let thread = self.display_thread_name || self.display_thread_id;
- if let (true, Some(file), Some(line)) =
- (self.format.display_location, meta.file(), meta.line())
- {
- write!(
- writer,
- " {} {}:{}{}",
- dimmed.paint("at"),
- file,
- line,
- dimmed.paint(if thread { " " } else { "\n" })
- )?;
- } else if thread {
- write!(writer, " ")?;
- }
-
- if thread {
- write!(writer, "{} ", dimmed.paint("on"))?;
- let thread = std::thread::current();
- if self.display_thread_name {
- if let Some(name) = thread.name() {
- write!(writer, "{}", name)?;
- if self.display_thread_id {
- write!(writer, " ({:?})", thread.id())?;
- }
- } else if !self.display_thread_id {
- write!(writer, " {:?}", thread.id())?;
- }
- } else if self.display_thread_id {
- write!(writer, " {:?}", thread.id())?;
- }
- writer.write_char('\n')?;
- }
-
- let bold = writer.bold();
- let span = event
- .parent()
- .and_then(|id| ctx.span(id))
- .or_else(|| ctx.lookup_current());
-
- let scope = span.into_iter().flat_map(|span| span.scope());
-
- for span in scope {
- let meta = span.metadata();
- if self.display_target {
- write!(
- writer,
- " {} {}::{}",
- dimmed.paint("in"),
- meta.target(),
- bold.paint(meta.name()),
- )?;
- } else {
- write!(
- writer,
- " {} {}",
- dimmed.paint("in"),
- bold.paint(meta.name()),
- )?;
- }
-
- let ext = span.extensions();
- let fields = &ext
- .get::<FormattedFields<N>>()
- .expect("Unable to find FormattedFields in extensions; this is a bug");
- if !fields.is_empty() {
- write!(writer, " {} {}", dimmed.paint("with"), fields)?;
- }
- writer.write_char('\n')?;
- }
-
- writer.write_char('\n')
- }
-}
-
-impl<'writer> FormatFields<'writer> for Pretty {
- fn format_fields<R: RecordFields>(&self, writer: Writer<'writer>, fields: R) -> fmt::Result {
- let mut v = PrettyVisitor::new(writer, false);
- fields.record(&mut v);
- v.finish()
- }
-
- fn add_fields(
- &self,
- current: &'writer mut FormattedFields<Self>,
- fields: &span::Record<'_>,
- ) -> fmt::Result {
- let empty = current.is_empty();
- let writer = current.as_writer();
- let mut v = PrettyVisitor::new(writer, empty);
- fields.record(&mut v);
- v.finish()
- }
-}
-
-// === impl PrettyFields ===
-
-impl Default for PrettyFields {
- fn default() -> Self {
- Self::new()
- }
-}
-
-impl PrettyFields {
- /// Returns a new default [`PrettyFields`] implementation.
- pub fn new() -> Self {
- // By default, don't override the `Writer`'s ANSI colors
- // configuration. We'll only do this if the user calls the
- // deprecated `PrettyFields::with_ansi` method.
- Self { ansi: None }
- }
-
- /// Enable ANSI encoding for formatted fields.
- #[deprecated(
- since = "0.3.3",
- note = "Use `fmt::Subscriber::with_ansi` or `fmt::Layer::with_ansi` instead."
- )]
- pub fn with_ansi(self, ansi: bool) -> Self {
- Self {
- ansi: Some(ansi),
- ..self
- }
- }
-}
-
-impl<'a> MakeVisitor<Writer<'a>> for PrettyFields {
- type Visitor = PrettyVisitor<'a>;
-
- #[inline]
- fn make_visitor(&self, mut target: Writer<'a>) -> Self::Visitor {
- if let Some(ansi) = self.ansi {
- target = target.with_ansi(ansi);
- }
- PrettyVisitor::new(target, true)
- }
-}
-
-// === impl PrettyVisitor ===
-
-impl<'a> PrettyVisitor<'a> {
- /// Returns a new default visitor that formats to the provided `writer`.
- ///
- /// # Arguments
- /// - `writer`: the writer to format to.
- /// - `is_empty`: whether or not any fields have been previously written to
- /// that writer.
- pub fn new(writer: Writer<'a>, is_empty: bool) -> Self {
- Self {
- writer,
- is_empty,
- style: Style::default(),
- result: Ok(()),
- }
- }
-
- pub(crate) fn with_style(self, style: Style) -> Self {
- Self { style, ..self }
- }
-
- fn write_padded(&mut self, value: &impl fmt::Debug) {
- let padding = if self.is_empty {
- self.is_empty = false;
- ""
- } else {
- ", "
- };
- self.result = write!(self.writer, "{}{:?}", padding, value);
- }
-
- fn bold(&self) -> Style {
- if self.writer.has_ansi_escapes() {
- self.style.bold()
- } else {
- Style::new()
- }
- }
-}
-
-impl<'a> field::Visit for PrettyVisitor<'a> {
- fn record_str(&mut self, field: &Field, value: &str) {
- if self.result.is_err() {
- return;
- }
-
- if field.name() == "message" {
- self.record_debug(field, &format_args!("{}", value))
- } else {
- self.record_debug(field, &value)
- }
- }
-
- fn record_error(&mut self, field: &Field, value: &(dyn std::error::Error + 'static)) {
- if let Some(source) = value.source() {
- let bold = self.bold();
- self.record_debug(
- field,
- &format_args!(
- "{}, {}{}.sources{}: {}",
- value,
- bold.prefix(),
- field,
- bold.infix(self.style),
- ErrorSourceList(source),
- ),
- )
- } else {
- self.record_debug(field, &format_args!("{}", value))
- }
- }
-
- fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
- if self.result.is_err() {
- return;
- }
- let bold = self.bold();
- match field.name() {
- "message" => self.write_padded(&format_args!("{}{:?}", self.style.prefix(), value,)),
- // Skip fields that are actually log metadata that have already been handled
- #[cfg(feature = "tracing-log")]
- name if name.starts_with("log.") => self.result = Ok(()),
- name if name.starts_with("r#") => self.write_padded(&format_args!(
- "{}{}{}: {:?}",
- bold.prefix(),
- &name[2..],
- bold.infix(self.style),
- value
- )),
- name => self.write_padded(&format_args!(
- "{}{}{}: {:?}",
- bold.prefix(),
- name,
- bold.infix(self.style),
- value
- )),
- };
- }
-}
-
-impl<'a> VisitOutput<fmt::Result> for PrettyVisitor<'a> {
- fn finish(mut self) -> fmt::Result {
- write!(&mut self.writer, "{}", self.style.suffix())?;
- self.result
- }
-}
-
-impl<'a> VisitFmt for PrettyVisitor<'a> {
- fn writer(&mut self) -> &mut dyn fmt::Write {
- &mut self.writer
- }
-}
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/time/time_crate.rs b/vendor/tracing-subscriber-0.3.3/src/fmt/time/time_crate.rs
deleted file mode 100644
index 64d274365..000000000
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/time/time_crate.rs
+++ /dev/null
@@ -1,276 +0,0 @@
-use crate::fmt::{format::Writer, time::FormatTime, writer::WriteAdaptor};
-use std::fmt;
-use time::{format_description::well_known, formatting::Formattable, OffsetDateTime};
-
-/// Formats the current [local time] using a [formatter] from the [`time` crate].
-///
-/// To format the current [UTC time] instead, use the [`UtcTime`] type.
-///
-/// [local time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_local
-/// [UTC time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_utc
-/// [formatter]: https://docs.rs/time/0.3/time/formatting/trait.Formattable.html
-/// [`time` crate]: https://docs.rs/time/0.3/time/
-#[derive(Clone, Debug)]
-#[cfg_attr(docsrs, doc(cfg(all(feature = "time", feature = "local-time"))))]
-#[cfg(feature = "local-time")]
-pub struct LocalTime<F> {
- format: F,
-}
-
-/// Formats the current [UTC time] using a [formatter] from the [`time` crate].
-///
-/// To format the current [local time] instead, use the [`LocalTime`] type.
-///
-/// [local time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_local
-/// [UTC time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_utc
-/// [formatter]: https://docs.rs/time/0.3/time/formatting/trait.Formattable.html
-/// [`time` crate]: https://docs.rs/time/0.3/time/
-#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
-#[derive(Clone, Debug)]
-pub struct UtcTime<F> {
- format: F,
-}
-
-// === impl LocalTime ===
-
-#[cfg(feature = "local-time")]
-impl LocalTime<well_known::Rfc3339> {
- /// Returns a formatter that formats the current [local time] in the
- /// [RFC 3339] format (a subset of the [ISO 8601] timestamp format).
- ///
- /// # Examples
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time};
- ///
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(time::LocalTime::rfc_3339());
- /// # drop(collector);
- /// ```
- ///
- /// [local time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_local
- /// [RFC 3339]: https://datatracker.ietf.org/doc/html/rfc3339
- /// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
- pub fn rfc_3339() -> Self {
- Self::new(well_known::Rfc3339)
- }
-}
-
-#[cfg(feature = "local-time")]
-impl<F: Formattable> LocalTime<F> {
- /// Returns a formatter that formats the current [local time] using the
- /// [`time` crate] with the provided provided format. The format may be any
- /// type that implements the [`Formattable`] trait.
- ///
- /// Typically, the format will be a format description string, or one of the
- /// `time` crate's [well-known formats].
- ///
- /// If the format description is statically known, then the
- /// [`format_description!`] macro should be used. This is identical to the
- /// [`time::format_description::parse`] method, but runs at compile-time,
- /// throwing an error if the format description is invalid. If the desired format
- /// is not known statically (e.g., a user is providing a format string), then the
- /// [`time::format_description::parse`] method should be used. Note that this
- /// method is fallible.
- ///
- /// See the [`time` book] for details on the format description syntax.
- ///
- /// # Examples
- ///
- /// Using the [`format_description!`] macro:
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::LocalTime};
- /// use time::macros::format_description;
- ///
- /// let timer = LocalTime::new(format_description!("[hour]:[minute]:[second]"));
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// Using [`time::format_description::parse`]:
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::LocalTime};
- ///
- /// let time_format = time::format_description::parse("[hour]:[minute]:[second]")
- /// .expect("format string should be valid!");
- /// let timer = LocalTime::new(time_format);
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// Using the [`format_description!`] macro requires enabling the `time`
- /// crate's "macros" feature flag.
- ///
- /// Using a [well-known format][well-known formats] (this is equivalent to
- /// [`LocalTime::rfc_3339`]):
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::LocalTime};
- ///
- /// let timer = LocalTime::new(time::format_description::well_known::Rfc3339);
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// [local time]: https://docs.rs/time/latest/time/struct.OffsetDateTime.html#method.now_local
- /// [`time` crate]: https://docs.rs/time/0.3/time/
- /// [`Formattable`]: https://docs.rs/time/0.3/time/formatting/trait.Formattable.html
- /// [well-known formats]: https://docs.rs/time/0.3/time/format_description/well_known/index.html
- /// [`format_description!`]: https://docs.rs/time/0.3/time/macros/macro.format_description.html
- /// [`time::format_description::parse`]: https://docs.rs/time/0.3/time/format_description/fn.parse.html
- /// [`time` book]: https://time-rs.github.io/book/api/format-description.html
- pub fn new(format: F) -> Self {
- Self { format }
- }
-}
-
-#[cfg(feature = "local-time")]
-impl<F> FormatTime for LocalTime<F>
-where
- F: Formattable,
-{
- fn format_time(&self, w: &mut Writer<'_>) -> fmt::Result {
- let now = OffsetDateTime::now_local().map_err(|_| fmt::Error)?;
- format_datetime(now, w, &self.format)
- }
-}
-
-#[cfg(feature = "local-time")]
-impl<F> Default for LocalTime<F>
-where
- F: Formattable + Default,
-{
- fn default() -> Self {
- Self::new(F::default())
- }
-}
-
-// === impl UtcTime ===
-
-impl UtcTime<well_known::Rfc3339> {
- /// Returns a formatter that formats the current [UTC time] in the
- /// [RFC 3339] format, which is a subset of the [ISO 8601] timestamp format.
- ///
- /// # Examples
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time};
- ///
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(time::UtcTime::rfc_3339());
- /// # drop(collector);
- /// ```
- ///
- /// [local time]: https://docs.rs/time/0.3/time/struct.OffsetDateTime.html#method.now_utc
- /// [RFC 3339]: https://datatracker.ietf.org/doc/html/rfc3339
- /// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
- pub fn rfc_3339() -> Self {
- Self::new(well_known::Rfc3339)
- }
-}
-
-impl<F: Formattable> UtcTime<F> {
- /// Returns a formatter that formats the current [UTC time] using the
- /// [`time` crate], with the provided provided format. The format may be any
- /// type that implements the [`Formattable`] trait.
- ///
- /// Typically, the format will be a format description string, or one of the
- /// `time` crate's [well-known formats].
- ///
- /// If the format description is statically known, then the
- /// [`format_description!`] macro should be used. This is identical to the
- /// [`time::format_description::parse`] method, but runs at compile-time,
- /// failing an error if the format description is invalid. If the desired format
- /// is not known statically (e.g., a user is providing a format string), then the
- /// [`time::format_description::parse`] method should be used. Note that this
- /// method is fallible.
- ///
- /// See the [`time` book] for details on the format description syntax.
- ///
- /// # Examples
- ///
- /// Using the [`format_description!`] macro:
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::UtcTime};
- /// use time::macros::format_description;
- ///
- /// let timer = UtcTime::new(format_description!("[hour]:[minute]:[second]"));
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// Using the [`format_description!`] macro requires enabling the `time`
- /// crate's "macros" feature flag.
- ///
- /// Using [`time::format_description::parse`]:
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::UtcTime};
- ///
- /// let time_format = time::format_description::parse("[hour]:[minute]:[second]")
- /// .expect("format string should be valid!");
- /// let timer = UtcTime::new(time_format);
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// Using a [well-known format][well-known formats] (this is equivalent to
- /// [`UtcTime::rfc_3339`]):
- ///
- /// ```
- /// use tracing_subscriber::fmt::{self, time::UtcTime};
- ///
- /// let timer = UtcTime::new(time::format_description::well_known::Rfc3339);
- /// let collector = tracing_subscriber::fmt()
- /// .with_timer(timer);
- /// # drop(collector);
- /// ```
- ///
- /// [UTC time]: https://docs.rs/time/latest/time/struct.OffsetDateTime.html#method.now_utc
- /// [`time` crate]: https://docs.rs/time/0.3/time/
- /// [`Formattable`]: https://docs.rs/time/0.3/time/formatting/trait.Formattable.html
- /// [well-known formats]: https://docs.rs/time/0.3/time/format_description/well_known/index.html
- /// [`format_description!`]: https://docs.rs/time/0.3/time/macros/macro.format_description.html
- /// [`time::format_description::parse`]: https://docs.rs/time/0.3/time/format_description/fn.parse.html
- /// [`time` book]: https://time-rs.github.io/book/api/format-description.html
- pub fn new(format: F) -> Self {
- Self { format }
- }
-}
-
-impl<F> FormatTime for UtcTime<F>
-where
- F: Formattable,
-{
- fn format_time(&self, w: &mut Writer<'_>) -> fmt::Result {
- format_datetime(OffsetDateTime::now_utc(), w, &self.format)
- }
-}
-
-impl<F> Default for UtcTime<F>
-where
- F: Formattable + Default,
-{
- fn default() -> Self {
- Self::new(F::default())
- }
-}
-
-fn format_datetime(
- now: OffsetDateTime,
- into: &mut Writer<'_>,
- fmt: &impl Formattable,
-) -> fmt::Result {
- let mut into = WriteAdaptor::new(into);
- now.format_into(&mut into, fmt)
- .map_err(|_| fmt::Error)
- .map(|_| ())
-}
diff --git a/vendor/tracing-subscriber-0.3.3/src/reload.rs b/vendor/tracing-subscriber-0.3.3/src/reload.rs
deleted file mode 100644
index b8ec67dfa..000000000
--- a/vendor/tracing-subscriber-0.3.3/src/reload.rs
+++ /dev/null
@@ -1,237 +0,0 @@
-//! Wrapper for a `Layer` to allow it to be dynamically reloaded.
-//!
-//! This module provides a [`Layer` type] which wraps another type implementing
-//! the [`Layer` trait], allowing the wrapped type to be replaced with another
-//! instance of that type at runtime.
-//!
-//! This can be used in cases where a subset of `Subscriber` functionality
-//! should be dynamically reconfigured, such as when filtering directives may
-//! change at runtime. Note that this layer introduces a (relatively small)
-//! amount of overhead, and should thus only be used as needed.
-//!
-//! [`Layer` type]: struct.Layer.html
-//! [`Layer` trait]: ../layer/trait.Layer.html
-use crate::layer;
-use crate::sync::RwLock;
-
-use std::{
- error, fmt,
- marker::PhantomData,
- sync::{Arc, Weak},
-};
-use tracing_core::{
- callsite, span,
- subscriber::{Interest, Subscriber},
- Event, Metadata,
-};
-
-/// Wraps a `Layer`, allowing it to be reloaded dynamically at runtime.
-#[derive(Debug)]
-pub struct Layer<L, S> {
- // TODO(eliza): this once used a `crossbeam_util::ShardedRwLock`. We may
- // eventually wish to replace it with a sharded lock implementation on top
- // of our internal `RwLock` wrapper type. If possible, we should profile
- // this first to determine if it's necessary.
- inner: Arc<RwLock<L>>,
- _s: PhantomData<fn(S)>,
-}
-
-/// Allows reloading the state of an associated `Layer`.
-#[derive(Debug)]
-pub struct Handle<L, S> {
- inner: Weak<RwLock<L>>,
- _s: PhantomData<fn(S)>,
-}
-
-/// Indicates that an error occurred when reloading a layer.
-#[derive(Debug)]
-pub struct Error {
- kind: ErrorKind,
-}
-
-#[derive(Debug)]
-enum ErrorKind {
- SubscriberGone,
- Poisoned,
-}
-
-// ===== impl Layer =====
-
-impl<L, S> crate::Layer<S> for Layer<L, S>
-where
- L: crate::Layer<S> + 'static,
- S: Subscriber,
-{
- fn on_layer(&mut self, subscriber: &mut S) {
- try_lock!(self.inner.write(), else return).on_layer(subscriber);
- }
-
- #[inline]
- fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
- try_lock!(self.inner.read(), else return Interest::sometimes()).register_callsite(metadata)
- }
-
- #[inline]
- fn enabled(&self, metadata: &Metadata<'_>, ctx: layer::Context<'_, S>) -> bool {
- try_lock!(self.inner.read(), else return false).enabled(metadata, ctx)
- }
-
- #[inline]
- fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_new_span(attrs, id, ctx)
- }
-
- #[inline]
- fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_record(span, values, ctx)
- }
-
- #[inline]
- fn on_follows_from(&self, span: &span::Id, follows: &span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_follows_from(span, follows, ctx)
- }
-
- #[inline]
- fn on_event(&self, event: &Event<'_>, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_event(event, ctx)
- }
-
- #[inline]
- fn on_enter(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_enter(id, ctx)
- }
-
- #[inline]
- fn on_exit(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_exit(id, ctx)
- }
-
- #[inline]
- fn on_close(&self, id: span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_close(id, ctx)
- }
-
- #[inline]
- fn on_id_change(&self, old: &span::Id, new: &span::Id, ctx: layer::Context<'_, S>) {
- try_lock!(self.inner.read()).on_id_change(old, new, ctx)
- }
-}
-
-impl<L, S> Layer<L, S>
-where
- L: crate::Layer<S> + 'static,
- S: Subscriber,
-{
- /// Wraps the given `Layer`, returning a `Layer` and a `Handle` that allows
- /// the inner type to be modified at runtime.
- pub fn new(inner: L) -> (Self, Handle<L, S>) {
- let this = Self {
- inner: Arc::new(RwLock::new(inner)),
- _s: PhantomData,
- };
- let handle = this.handle();
- (this, handle)
- }
-
- /// Returns a `Handle` that can be used to reload the wrapped `Layer`.
- pub fn handle(&self) -> Handle<L, S> {
- Handle {
- inner: Arc::downgrade(&self.inner),
- _s: PhantomData,
- }
- }
-}
-
-// ===== impl Handle =====
-
-impl<L, S> Handle<L, S>
-where
- L: crate::Layer<S> + 'static,
- S: Subscriber,
-{
- /// Replace the current layer with the provided `new_layer`.
- pub fn reload(&self, new_layer: impl Into<L>) -> Result<(), Error> {
- self.modify(|layer| {
- *layer = new_layer.into();
- })
- }
-
- /// Invokes a closure with a mutable reference to the current layer,
- /// allowing it to be modified in place.
- pub fn modify(&self, f: impl FnOnce(&mut L)) -> Result<(), Error> {
- let inner = self.inner.upgrade().ok_or(Error {
- kind: ErrorKind::SubscriberGone,
- })?;
-
- let mut lock = try_lock!(inner.write(), else return Err(Error::poisoned()));
- f(&mut *lock);
- // Release the lock before rebuilding the interest cache, as that
- // function will lock the new layer.
- drop(lock);
-
- callsite::rebuild_interest_cache();
- Ok(())
- }
-
- /// Returns a clone of the layer's current value if it still exists.
- /// Otherwise, if the subscriber has been dropped, returns `None`.
- pub fn clone_current(&self) -> Option<L>
- where
- L: Clone,
- {
- self.with_current(L::clone).ok()
- }
-
- /// Invokes a closure with a borrowed reference to the current layer,
- /// returning the result (or an error if the subscriber no longer exists).
- pub fn with_current<T>(&self, f: impl FnOnce(&L) -> T) -> Result<T, Error> {
- let inner = self.inner.upgrade().ok_or(Error {
- kind: ErrorKind::SubscriberGone,
- })?;
- let inner = try_lock!(inner.read(), else return Err(Error::poisoned()));
- Ok(f(&*inner))
- }
-}
-
-impl<L, S> Clone for Handle<L, S> {
- fn clone(&self) -> Self {
- Handle {
- inner: self.inner.clone(),
- _s: PhantomData,
- }
- }
-}
-
-// ===== impl Error =====
-
-impl Error {
- fn poisoned() -> Self {
- Self {
- kind: ErrorKind::Poisoned,
- }
- }
-
- /// Returns `true` if this error occurred because the layer was poisoned by
- /// a panic on another thread.
- pub fn is_poisoned(&self) -> bool {
- matches!(self.kind, ErrorKind::Poisoned)
- }
-
- /// Returns `true` if this error occurred because the `Subscriber`
- /// containing the reloadable layer was dropped.
- pub fn is_dropped(&self) -> bool {
- matches!(self.kind, ErrorKind::SubscriberGone)
- }
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let msg = match self.kind {
- ErrorKind::SubscriberGone => "subscriber no longer exists",
- ErrorKind::Poisoned => "lock poisoned",
- };
- f.pad(msg)
- }
-}
-
-impl error::Error for Error {}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/filter.rs b/vendor/tracing-subscriber-0.3.3/tests/filter.rs
deleted file mode 100644
index 8386d34d2..000000000
--- a/vendor/tracing-subscriber-0.3.3/tests/filter.rs
+++ /dev/null
@@ -1,187 +0,0 @@
-#![cfg(feature = "env-filter")]
-
-mod support;
-use self::support::*;
-use tracing::{self, subscriber::with_default, Level};
-use tracing_subscriber::{
- filter::{EnvFilter, LevelFilter},
- prelude::*,
-};
-
-#[test]
-fn level_filter_event() {
- let filter: EnvFilter = "info".parse().expect("filter should parse");
- let (subscriber, finished) = subscriber::mock()
- .event(event::mock().at_level(Level::INFO))
- .event(event::mock().at_level(Level::WARN))
- .event(event::mock().at_level(Level::ERROR))
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
-
- with_default(subscriber, || {
- tracing::trace!("this should be disabled");
- tracing::info!("this shouldn't be");
- tracing::debug!(target: "foo", "this should also be disabled");
- tracing::warn!(target: "foo", "this should be enabled");
- tracing::error!("this should be enabled too");
- });
-
- finished.assert_finished();
-}
-
-#[test]
-fn same_name_spans() {
- let filter: EnvFilter = "[foo{bar}]=trace,[foo{baz}]=trace"
- .parse()
- .expect("filter should parse");
- let (subscriber, finished) = subscriber::mock()
- .new_span(
- span::mock()
- .named("foo")
- .at_level(Level::TRACE)
- .with_field(field::mock("bar")),
- )
- .new_span(
- span::mock()
- .named("foo")
- .at_level(Level::TRACE)
- .with_field(field::mock("baz")),
- )
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
- with_default(subscriber, || {
- tracing::trace_span!("foo", bar = 1);
- tracing::trace_span!("foo", baz = 1);
- });
-
- finished.assert_finished();
-}
-
-#[test]
-fn level_filter_event_with_target() {
- let filter: EnvFilter = "info,stuff=debug".parse().expect("filter should parse");
- let (subscriber, finished) = subscriber::mock()
- .event(event::mock().at_level(Level::INFO))
- .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
- .event(event::mock().at_level(Level::WARN).with_target("stuff"))
- .event(event::mock().at_level(Level::ERROR))
- .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
-
- with_default(subscriber, || {
- tracing::trace!("this should be disabled");
- tracing::info!("this shouldn't be");
- tracing::debug!(target: "stuff", "this should be enabled");
- tracing::debug!("but this shouldn't");
- tracing::trace!(target: "stuff", "and neither should this");
- tracing::warn!(target: "stuff", "this should be enabled");
- tracing::error!("this should be enabled too");
- tracing::error!(target: "stuff", "this should be enabled also");
- });
-
- finished.assert_finished();
-}
-
-#[test]
-fn not_order_dependent() {
- // this test reproduces tokio-rs/tracing#623
-
- let filter: EnvFilter = "stuff=debug,info".parse().expect("filter should parse");
- let (subscriber, finished) = subscriber::mock()
- .event(event::mock().at_level(Level::INFO))
- .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
- .event(event::mock().at_level(Level::WARN).with_target("stuff"))
- .event(event::mock().at_level(Level::ERROR))
- .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
-
- with_default(subscriber, || {
- tracing::trace!("this should be disabled");
- tracing::info!("this shouldn't be");
- tracing::debug!(target: "stuff", "this should be enabled");
- tracing::debug!("but this shouldn't");
- tracing::trace!(target: "stuff", "and neither should this");
- tracing::warn!(target: "stuff", "this should be enabled");
- tracing::error!("this should be enabled too");
- tracing::error!(target: "stuff", "this should be enabled also");
- });
-
- finished.assert_finished();
-}
-
-#[test]
-fn add_directive_enables_event() {
- // this test reproduces tokio-rs/tracing#591
-
- // by default, use info level
- let mut filter = EnvFilter::new(LevelFilter::INFO.to_string());
-
- // overwrite with a more specific directive
- filter = filter.add_directive("hello=trace".parse().expect("directive should parse"));
-
- let (subscriber, finished) = subscriber::mock()
- .event(event::mock().at_level(Level::INFO).with_target("hello"))
- .event(event::mock().at_level(Level::TRACE).with_target("hello"))
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
-
- with_default(subscriber, || {
- tracing::info!(target: "hello", "hello info");
- tracing::trace!(target: "hello", "hello trace");
- });
-
- finished.assert_finished();
-}
-
-#[test]
-fn span_name_filter_is_dynamic() {
- let filter: EnvFilter = "info,[cool_span]=debug"
- .parse()
- .expect("filter should parse");
- let (subscriber, finished) = subscriber::mock()
- .event(event::mock().at_level(Level::INFO))
- .enter(span::mock().named("cool_span"))
- .event(event::mock().at_level(Level::DEBUG))
- .enter(span::mock().named("uncool_span"))
- .event(event::mock().at_level(Level::WARN))
- .event(event::mock().at_level(Level::DEBUG))
- .exit(span::mock().named("uncool_span"))
- .exit(span::mock().named("cool_span"))
- .enter(span::mock().named("uncool_span"))
- .event(event::mock().at_level(Level::WARN))
- .event(event::mock().at_level(Level::ERROR))
- .exit(span::mock().named("uncool_span"))
- .done()
- .run_with_handle();
- let subscriber = subscriber.with(filter);
-
- with_default(subscriber, || {
- tracing::trace!("this should be disabled");
- tracing::info!("this shouldn't be");
- let cool_span = tracing::info_span!("cool_span");
- let uncool_span = tracing::info_span!("uncool_span");
-
- {
- let _enter = cool_span.enter();
- tracing::debug!("i'm a cool event");
- tracing::trace!("i'm cool, but not cool enough");
- let _enter2 = uncool_span.enter();
- tracing::warn!("warning: extremely cool!");
- tracing::debug!("i'm still cool");
- }
-
- let _enter = uncool_span.enter();
- tracing::warn!("warning: not that cool");
- tracing::trace!("im not cool enough");
- tracing::error!("uncool error");
- });
-
- finished.assert_finished();
-}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/reload.rs b/vendor/tracing-subscriber-0.3.3/tests/reload.rs
deleted file mode 100644
index 5fe422e08..000000000
--- a/vendor/tracing-subscriber-0.3.3/tests/reload.rs
+++ /dev/null
@@ -1,81 +0,0 @@
-#![cfg(feature = "reload")]
-use std::sync::atomic::{AtomicUsize, Ordering};
-use tracing_core::{
- span::{Attributes, Id, Record},
- subscriber::Interest,
- Event, Metadata, Subscriber,
-};
-use tracing_subscriber::{layer, prelude::*, reload::*};
-
-pub struct NopSubscriber;
-
-impl Subscriber for NopSubscriber {
- fn register_callsite(&self, _: &'static Metadata<'static>) -> Interest {
- Interest::never()
- }
-
- fn enabled(&self, _: &Metadata<'_>) -> bool {
- false
- }
-
- fn new_span(&self, _: &Attributes<'_>) -> Id {
- Id::from_u64(1)
- }
-
- fn record(&self, _: &Id, _: &Record<'_>) {}
- fn record_follows_from(&self, _: &Id, _: &Id) {}
- fn event(&self, _: &Event<'_>) {}
- fn enter(&self, _: &Id) {}
- fn exit(&self, _: &Id) {}
-}
-
-#[test]
-fn reload_handle() {
- static FILTER1_CALLS: AtomicUsize = AtomicUsize::new(0);
- static FILTER2_CALLS: AtomicUsize = AtomicUsize::new(0);
-
- enum Filter {
- One,
- Two,
- }
-
- impl<S: Subscriber> tracing_subscriber::Layer<S> for Filter {
- fn register_callsite(&self, m: &Metadata<'_>) -> Interest {
- println!("REGISTER: {:?}", m);
- Interest::sometimes()
- }
-
- fn enabled(&self, m: &Metadata<'_>, _: layer::Context<'_, S>) -> bool {
- println!("ENABLED: {:?}", m);
- match self {
- Filter::One => FILTER1_CALLS.fetch_add(1, Ordering::SeqCst),
- Filter::Two => FILTER2_CALLS.fetch_add(1, Ordering::SeqCst),
- };
- true
- }
- }
- fn event() {
- tracing::trace!("my event");
- }
-
- let (layer, handle) = Layer::new(Filter::One);
-
- let subscriber = tracing_core::dispatcher::Dispatch::new(layer.with_subscriber(NopSubscriber));
-
- tracing_core::dispatcher::with_default(&subscriber, || {
- assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 0);
- assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
-
- event();
-
- assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
- assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
-
- handle.reload(Filter::Two).expect("should reload");
-
- event();
-
- assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
- assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 1);
- })
-}
diff --git a/vendor/tracing-subscriber/.cargo-checksum.json b/vendor/tracing-subscriber/.cargo-checksum.json
new file mode 100644
index 000000000..c4ac8776d
--- /dev/null
+++ b/vendor/tracing-subscriber/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"b7eafabf247da01abf02b9b4aab602fd4f66c0da2ce0c8bcf9aa5fb030002180","Cargo.toml":"cb66c58bf1b31f81c8a6e6e394ba5c125a432c280e6559c052bbafc243694005","LICENSE":"898b1ae9821e98daf8964c8d6c7f61641f5f5aa78ad500020771c0939ee0dea1","README.md":"3a06b39f4d699fbc8d8edccea3afdc29b702f79a208abef04411b5c139c607cc","benches/enter.rs":"4a94a04e2abd07950ef2f0b646f4dcdf4ff00abf6396edb5a53c8b41b7691b1a","benches/filter.rs":"6374005ffa47fa19880bb95e3e37406f40ea72a02c5136f4d5eb4c663d452b18","benches/filter_log.rs":"612716bdf9a188093e84d014a4847f18157f148f7d64e54150cd5c91ac709a8a","benches/fmt.rs":"5a0ff37967ffef3a221eebb78855d031e2e883a8a67528c8e794cc6f16cbee8a","benches/support/mod.rs":"72bef51154da9c9b3d81300195c1929a818858fa4b4fc2aa07b49ca586f4cd39","src/field/debug.rs":"4ab50198a0b042d92fefa77b5cac0aef7ba6936149fa555f4b6e2036dcd7f2d7","src/field/delimited.rs":"5e7967637dc3181c097637dcb2a95f35db16583b5fc293b30211db5779ab21ab","src/field/display.rs":"da8cfcb22a39f451f075e2c3a9ce5193c6afe19853cdbd643239657cac5b7e47","src/field/mod.rs":"cb8ab273159f42fc8ebe71c82acc63c962e546328fc4aa9fd5948ce996ef9e05","src/filter/directive.rs":"6341c3a1c8b6d33171647964c35816317c81b03bb098b493f1f1a22222f6ce84","src/filter/env/builder.rs":"57c3706a21e87d2ce73aac305cd55def268c5acb9bfc08f68423c150fd058e76","src/filter/env/directive.rs":"ecd2a7ffb882869f8ea9b0398f5af58ce1797a216b9dc9086c21363d1d500e77","src/filter/env/field.rs":"e1e32a2fc39884c9a5df1d5047128e43f1d0720c0b9daa6bf1e08ca9bcc5f537","src/filter/env/mod.rs":"8403df3f061a1c266b6ab6b30b03c6eb32c1c9354037a2d1eeb36817932e6ea5","src/filter/filter_fn.rs":"0debbc4a4b4d2a57b2a06905017ac908bf34b0a64aaf961535fbf6f4d5a700a9","src/filter/layer_filters/combinator.rs":"695de9d8b0a04df09bea08cc40403e09ff66613c07f72c403f7bc65b89e1fd36","src/filter/layer_filters/mod.rs":"2f23fa79561248255a60d1948423a21bfac5bb8651e6c2ab29d311f4e387a8dc","src/filter/level.rs":"cc449757aac47caaf19dd5ba4d74c8efbcd7531fcd6c13da0c5f6fdda12cc9ca","src/filter/mod.rs":"8ebfd0dc92415ff27ec552f20919e598842a87186f13f120449053a96e1e3307","src/filter/targets.rs":"8fafbbaeb4023f498e64a2831be02fefb825345fbd58065fc7f6129dd70eae4b","src/fmt/fmt_layer.rs":"a596e32e196895866cbd867d52ca13edcdd7651aec971b39f10f9285322823b0","src/fmt/format/json.rs":"554985ed40f7c59787aae87626144241ca973929e33979c54f821b673b71fec9","src/fmt/format/mod.rs":"4e920ab448b1dd4b8c2679261dd337273620fd4f20b6439a4aab341c4d2b08e0","src/fmt/format/pretty.rs":"d4b61d70d1e5b9e01b856acc9db7b23dd27697c587e424f699fb586dd29f73a4","src/fmt/mod.rs":"94239bfefe2bd80722eb4c30f7d10cabe7b9319172a73a7ab5943092e84660fa","src/fmt/time/datetime.rs":"778d4604d800e46b940087394e7b72750738b554e02aea523fa9820ab0768c08","src/fmt/time/mod.rs":"30c97a9d3abd099f52c4c91c7b5f0d29ed9d54d80d1718c6fb74bfd664589de1","src/fmt/time/time_crate.rs":"1bfd59516a583e396afc1770250aa8c06b52f6162a6e7b2cadb860b7eebd9d76","src/fmt/writer.rs":"fa796f0afa3653bf9f666099c65df85d4f74ad1aa412ffc9058b0614632cf12b","src/layer/context.rs":"77137d8b2810c9059ce7838c3b665748bcb9765487d6103b92596e08b0e9e84b","src/layer/layered.rs":"6f08c9662a041652578054ba67b79c457029cc8c29301e8961b0d0e737a3e873","src/layer/mod.rs":"9c84a8260914c8ce7097c101c5be676b64952cf85bc1618d185729443aaabb03","src/layer/tests.rs":"3e974d627c4bc4269cfa10c82c890e596c9d46af8e6bc03c6c117bde1237e948","src/lib.rs":"81ecd4288bdbae864b1627de207779bd674081085d33ee1ff0c23b7df4e7f136","src/macros.rs":"e184bffc6b5999c48e365ad08343dca764a5fb711b789beb26bd1d5f1d767726","src/prelude.rs":"088635def33be9a4c4b6ed934dc22540c555e27d62f7625a43aa9c0e525ca467","src/registry/extensions.rs":"0418b39287bbc06cc95b8cecd6a25aa808b8e04714d842340ff75db458cafe5b","src/registry/mod.rs":"76627b056ce39d006708a6273b6418d001b688f016f58aa546e7821d1ef7f3bb","src/registry/sharded.rs":"1b18f7eaf05bfb9ce6bcd1572dcf9bac352cc69d8ba4633f9679163546bc1d01","src/registry/stack.rs":"9ef333d6a8a28a064e80ff1e376dbb07bc597009010ec332b2dc3ab435d737c2","src/reload.rs":"c9522d15d5cd2b840d37e2bbf366e55c1372df5c75781fde12c8bd092e9e21d1","src/sync.rs":"7f78f3de5b618a999be0e61f936a233975e7769f1ebb55a0e48c3d199e9c45e3","src/util.rs":"55b4e9d63112f9d5a12a287273a9b1212741058384332d3edc024168cacfd627","tests/cached_layer_filters_dont_break_other_layers.rs":"b2084542a014abeff821b30b2b8c21e32bfdcffae53ce5335fb588f557fa4244","tests/duplicate_spans.rs":"3bf35184fb7d1dc5f33e5098820febbec37ef3ccd06b693d11b5585affb60ff4","tests/env_filter/main.rs":"b2d89ee7aaf94f0563e4e5b025cf43186ec61657b763b7c0ae010ff548635251","tests/env_filter/per_layer.rs":"19e9998922f24ec368fcbcda406f43a95335551c4c1669b509bbfc1ef216432a","tests/event_enabling.rs":"15e301a8ff6c74c454547dad15a47b5f11fc54e539162191f21462b6d5080830","tests/field_filter.rs":"fb8735801ba7ecabb421ca361bd1c846841aee63eecbdd665f9544a1cec70f67","tests/filter_log.rs":"086f1e708a2e7389024d7e36d963947909d94c1975db92f4fc425b5cba2af533","tests/fmt_max_level_hint.rs":"d4c6d6f976ae41ab8052fa610a7337ad7150802cbd5634cb30fc45c1f215cfcd","tests/hinted_layer_filters_dont_break_other_layers.rs":"d5ba9cfb6784cf59f007e673ad549dc722d109f6b3d4a69f6aa11b25ca10b469","tests/layer_filter_interests_are_cached.rs":"d036d1c4bc3754e94ebfdda9c841f4858ccec40aba0720f3fbf26c817bfe5a83","tests/layer_filters/boxed.rs":"04db459721a26d6502a2b3fbe42154c5a451021a9374a18c017d10971f44e0c0","tests/layer_filters/combinators.rs":"cdbfaa37fa5b0439ec2ae8028601d22120ff2a42867a2af8a3b27fc58e70cb6c","tests/layer_filters/downcast_raw.rs":"9b90ead571543cbe14e89b4fe637360d9baf3069f6f656ed3bdf65e7318648f1","tests/layer_filters/filter_scopes.rs":"02611bc58d0d8a67a127eca8cab1b2d9a9901bd2c8a8daad41adf6089b28aee0","tests/layer_filters/main.rs":"e9c3f5af7c65b41cde882d5a11a89bf8221e611f1ad881849546c4caf9a494c0","tests/layer_filters/per_event.rs":"424a027e5332e21e734a1833444352b7fbdeeecdc7a82b57f4efd6429bcfb14f","tests/layer_filters/targets.rs":"138e3f9ddd68571d94c5aff9d54ee2fbc5f44724c6ee42477a411740ccb79ee6","tests/layer_filters/trees.rs":"4df7b5cf12da44a9255c56e5b80e2b0cf84820230ba916f324c67bc3ee4e4605","tests/layer_filters/vec.rs":"eaf2e7fe0a76633cc02bc729513202a5fb169e2bdb5a8042d8c7bd1f7092691d","tests/multiple_layer_filter_interests_cached.rs":"1ea195f03e58d715228ec1b604f85bda2fc82812d05b2f6370d5edd34a035f32","tests/option.rs":"0268ca64fb3068bfa95126a477009611253130f902fc558a4605649945bdae29","tests/registry_max_level_hint.rs":"ba386d32b8d13832d7009163241c3d0723488c0393d85647eb9368776251e4fc","tests/registry_with_subscriber.rs":"61a545e1bf3f75efd0dd18c20bb93e8a1f2e0158b342179a94228c4cbd5bb9cc","tests/reload.rs":"8f169b60ab67bbc171dd7e576236b901293b5baa08ea469765a042375855e0f4","tests/same_len_filters.rs":"eceb745f7f5b6c8737c1860a58e2cf98a048fc486dee4379e94485f41c92c925","tests/support.rs":"d5d8ae7a143bda971e24dcba01137be0efea957d732b43502fd845c3bc952f8b","tests/unhinted_layer_filters_dont_break_other_layers.rs":"519cfef4977e511af938546d4208c645a28248c8ed8666daf180f0ad32f0a261","tests/utils.rs":"2c37d9f39010767190f72cb2b3faa3131985764aa547027197108299a9a6bb9e","tests/vec.rs":"d1176f3e1b0954129792a28282b95084d417143b0cc4e35887b95cee3c675392","tests/vec_subscriber_filter_interests_cached.rs":"115a0f097cd649c570eabe74f82791bbe15b2de32a2eef403575661798aadd82"},"package":"a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"} \ No newline at end of file
diff --git a/vendor/tracing-subscriber-0.3.3/CHANGELOG.md b/vendor/tracing-subscriber/CHANGELOG.md
index c380ff3b1..f283dc61e 100644
--- a/vendor/tracing-subscriber-0.3.3/CHANGELOG.md
+++ b/vendor/tracing-subscriber/CHANGELOG.md
@@ -1,3 +1,395 @@
+# 0.3.16 (October 6, 2022)
+
+This release of `tracing-subscriber` fixes a regression introduced in
+[v0.3.15][subscriber-0.3.15] where `Option::None`'s `Layer` implementation would
+set the max level hint to `OFF`. In addition, it adds several new APIs,
+including the `Filter::event_enabled` method for filtering events based on
+fields values, and the ability to log internal errors that occur when writing a
+log line.
+
+This release also replaces the dependency on the unmaintained [`ansi-term`]
+crate with the [`nu-ansi-term`] crate, resolving an *informational* security
+advisory ([RUSTSEC-2021-0139]) for [`ansi-term`]'s maintainance status. This
+increases the minimum supported Rust version (MSRV) to Rust 1.50+, although the
+crate should still compile for the previous MSRV of Rust 1.49+ when the `ansi`
+feature is not enabled.
+
+### Fixed
+
+- **layer**: `Option::None`'s `Layer` impl always setting the `max_level_hint`
+ to `LevelFilter::OFF` ([#2321])
+- Compilation with `-Z minimal versions` ([#2246])
+- **env-filter**: Clarify that disabled level warnings are emitted by
+ `tracing-subscriber` ([#2285])
+
+### Added
+
+- **fmt**: Log internal errors to `stderr` if writing a log line fails ([#2102])
+- **fmt**: `FmtLayer::log_internal_errors` and
+ `FmtSubscriber::log_internal_errors` methods for configuring whether internal
+ writer errors are printed to `stderr` ([#2102])
+- **fmt**: `#[must_use]` attributes on builders to warn if a `Subscriber` is
+ configured but not set as the default subscriber ([#2239])
+- **filter**: `Filter::event_enabled` method for filtering an event based on its
+ fields ([#2245], [#2251])
+- **filter**: `Targets::default_level` accessor ([#2242])
+
+### Changed
+
+- **ansi**: Replaced dependency on unmaintained `ansi-term` crate with
+ `nu-ansi-term` (([#2287], fixes informational advisory [RUSTSEC-2021-0139])
+- `tracing-core`: updated to [0.1.30][core-0.1.30]
+- Minimum Supported Rust Version (MSRV) increased to Rust 1.50+ (when the
+ `ansi`) feature flag is enabled ([#2287])
+
+### Documented
+
+- **fmt**: Correct inaccuracies in `fmt::init` documentation ([#2224])
+- **filter**: Fix incorrect doc link in `filter::Not` combinator ([#2249])
+
+Thanks to new contributors @cgbur, @DesmondWillowbrook, @RalfJung, and
+@poliorcetics, as well as returning contributors @CAD97, @connec, @jswrenn,
+@guswynn, and @bryangarza, for contributing to this release!
+
+[nu-ansi-term]: https://github.com/nushell/nu-ansi-term
+[ansi_term]: https://github.com/ogham/rust-ansi-term
+[RUSTSEC-2021-0139]: https://rustsec.org/advisories/RUSTSEC-2021-0139.html
+[core-0.1.30]: https://github.com/tokio-rs/tracing/releases/tag/tracing-core-0.1.30
+[subscriber-0.3.15]: https://github.com/tokio-rs/tracing/releases/tag/tracing-subscriber-0.3.15
+[#2321]: https://github.com/tokio-rs/tracing/pull/2321
+[#2246]: https://github.com/tokio-rs/tracing/pull/2246
+[#2285]: https://github.com/tokio-rs/tracing/pull/2285
+[#2102]: https://github.com/tokio-rs/tracing/pull/2102
+[#2239]: https://github.com/tokio-rs/tracing/pull/2239
+[#2245]: https://github.com/tokio-rs/tracing/pull/2245
+[#2251]: https://github.com/tokio-rs/tracing/pull/2251
+[#2287]: https://github.com/tokio-rs/tracing/pull/2287
+[#2224]: https://github.com/tokio-rs/tracing/pull/2224
+[#2249]: https://github.com/tokio-rs/tracing/pull/2249
+
+# 0.3.15 (Jul 20, 2022)
+
+This release fixes a bug where the `reload` layer would fail to pass through
+`max_level_hint` to the underlying layer, potentially breaking filtering.
+
+### Fixed
+
+- **reload**: pass through `max_level_hint` to the inner `Layer` ([#2204])
+
+Thanks to @guswynn for contributing to this release!
+
+[#2204]: https://github.com/tokio-rs/tracing/pull/2204
+
+# 0.3.14 (Jul 1, 2022)
+
+This release fixes multiple filtering bugs in the `Layer` implementations for
+`Option<impl Layer>` and `Vec<impl Layer>`.
+
+### Fixed
+
+- **layer**: `Layer::event_enabled` implementation for `Option<impl Layer<S>>`
+ returning `false` when the `Option` is `None`, disabling all events globally
+ ([#2193])
+- **layer**: `Layer::max_level_hint` implementation for `Option<impl Layer<S>>`
+ incorrectly disabling max level filtering when the option is `None` ([#2195])
+- **layer**: `Layer::max_level_hint` implementation for `Vec<impl Layer<S>>`
+ returning `LevelFilter::ERROR` rather than `LevelFilter::OFF` when the `Vec`
+ is empty ([#2195])
+
+Thanks to @CAD97 and @guswynn for contributing to this release!
+
+[#2193]: https://github.com/tokio-rs/tracing/pull/2193
+[#2195]: https://github.com/tokio-rs/tracing/pull/2195
+
+# 0.3.13 (Jun 30, 2022) (YANKED)
+
+This release of `tracing-subscriber` fixes a compilation failure due to an
+incorrect `tracing-core` dependency that was introduced in v0.3.12.
+
+### Changed
+
+- **tracing_core**: Updated minimum dependency version to 0.1.28 ([#2190])
+
+[#2190]: https://github.com/tokio-rs/tracing/pull/2190
+
+# 0.3.12 (Jun 29, 2022) (YANKED)
+
+This release of `tracing-subscriber` adds a new `Layer::event_enabled` method,
+which allows `Layer`s to filter events *after* their field values are recorded;
+a `Filter` implementation for `reload::Layer`, to make using `reload` with
+per-layer filtering more ergonomic, and additional inherent method downcasting
+APIs for the `Layered` type. In addition, it includes dependency updates, and
+minor fixes for documentation and feature flagging.
+
+### Added
+
+- **layer**: `Layer::event_enabled` method, which can be implemented to filter
+ events based on their field values ([#2008])
+- **reload**: `Filter` implementation for `reload::Layer` ([#2159])
+- **layer**: `Layered::downcast_ref` and `Layered::is` inherent methods
+ ([#2160])
+
+### Changed
+
+- **parking_lot**: Updated dependency on `parking_lot` to 0.13.0 ([#2143])
+- Replaced `lazy_static` dependency with `once_cell` ([#2147])
+
+### Fixed
+
+- Don't enable `tracing-core` features by default ([#2107])
+- Several documentation link and typo fixes ([#2064], [#2068], #[2077], [#2161],
+ [#1088])
+
+Thanks to @ben0x539, @jamesmunns, @georgemp, @james7132, @jswrenn, @CAD97, and
+@guswynn for contributing to this release!
+
+[#2008]: https://github.com/tokio-rs/tracing/pull/2008
+[#2159]: https://github.com/tokio-rs/tracing/pull/2159
+[#2160]: https://github.com/tokio-rs/tracing/pull/2160
+[#2143]: https://github.com/tokio-rs/tracing/pull/2143
+[#2107]: https://github.com/tokio-rs/tracing/pull/2107
+[#2064]: https://github.com/tokio-rs/tracing/pull/2064
+[#2068]: https://github.com/tokio-rs/tracing/pull/2068
+[#2077]: https://github.com/tokio-rs/tracing/pull/2077
+[#2161]: https://github.com/tokio-rs/tracing/pull/2161
+[#1088]: https://github.com/tokio-rs/tracing/pull/1088
+
+# 0.3.11 (Apr 9, 2022)
+
+This is a bugfix release for the `Filter` implementation for `EnvFilter` added
+in [v0.3.10].
+
+### Fixed
+
+- **env-filter**: Added missing `Filter::on_record` callback to `EnvFilter`'s
+ `Filter` impl ([#2058])
+- **env-filter**: Fixed method resolution issues when calling `EnvFilter`
+ methods with both the `Filter` and `Layer` traits in scope ([#2057])
+- **env-filter**: Fixed `EnvFilter::builder().parse()` and other parsing methods
+ returning an error when parsing an empty string ([#2052])
+
+Thanks to new contributor @Ma124 for contributing to this release!
+
+[v0.3.10]: https://github.com/tokio-rs/tracing/releases/tag/tracing-subscriber-0.3.10
+[#2058]: https://github.com/tokio-rs/tracing/pull/2058
+[#2057]: https://github.com/tokio-rs/tracing/pull/2057
+[#2052]: https://github.com/tokio-rs/tracing/pull/2052
+
+# 0.3.10 (Apr 1, 2022)
+
+This release adds several new features, including a `Filter` implementation and
+new builder API for `EnvFilter`, support for using a `Vec<L> where L: Layer` as
+a `Layer`, and a number of smaller API improvements to make working with dynamic
+and reloadable layers easier.
+
+### Added
+
+- **registry**: Implement `Filter` for `EnvFilter`, allowing it to be used with
+ per-layer filtering ([#1983])
+- **registry**: `Filter::on_new_span`, `Filter::on_enter`,
+ `Filter::on_exit`, `Filter::on_close` and `Filter::on_record` callbacks to
+ allow `Filter`s to track span states internally ([#1973], [#2017], [#2031])
+- **registry**: `Filtered::filter` and `Filtered::filter_mut` accessors
+ ([#1959])
+- **registry**: `Filtered::inner` and `Filtered::inner_mut` accessors to borrow
+ the wrapped `Layer` ([#2034])
+- **layer**: Implement `Layer` for `Vec<L: Layer>`, to allow composing together
+ a dynamically sized list of `Layer`s ([#2027])
+- **layer**: `Layer::boxed` method to make type-erasing `Layer`s easier
+ ([#2026])
+- **fmt**: `fmt::Layer::writer` and `fmt::Layer::writer_mut` accessors ([#2034])
+- **fmt**: `fmt::Layer::set_ansi` method to allow changing the ANSI formatting
+ configuration at runtime ([#2034])
+- **env-filter**: `EnvFilter::builder` to configure a new `EnvFilter` prior to
+ parsing it ([#2035])
+- Several documentation fixes and improvements ([#1972], [#1971], [#2023],
+ [#2023])
+
+### Fixed
+
+- **fmt**: `fmt::Layer`'s auto traits no longer depend on the `Subscriber` type
+ parameter's auto traits ([#2025])
+- **env-filter**: Fixed missing help text when the `ansi` feature is disabled
+ ([#2029])
+
+Thanks to new contributors @TimoFreiberg and @wagenet, as well as @CAD97 for
+contributing to this release!
+
+[#1983]: https://github.com/tokio-rs/tracing/pull/1983
+[#1973]: https://github.com/tokio-rs/tracing/pull/1973
+[#2017]: https://github.com/tokio-rs/tracing/pull/2017
+[#2031]: https://github.com/tokio-rs/tracing/pull/2031
+[#1959]: https://github.com/tokio-rs/tracing/pull/1959
+[#2034]: https://github.com/tokio-rs/tracing/pull/2034
+[#2027]: https://github.com/tokio-rs/tracing/pull/2027
+[#2026]: https://github.com/tokio-rs/tracing/pull/2026
+[#2035]: https://github.com/tokio-rs/tracing/pull/2035
+[#1972]: https://github.com/tokio-rs/tracing/pull/1972
+[#1971]: https://github.com/tokio-rs/tracing/pull/1971
+[#2023]: https://github.com/tokio-rs/tracing/pull/2023
+[#2025]: https://github.com/tokio-rs/tracing/pull/2025
+[#2029]: https://github.com/tokio-rs/tracing/pull/2029
+
+# 0.3.9 (Feb 17, 2022)
+
+This release updates the minimum supported Rust version (MSRV) to 1.49.0, and
+updates the (optional) dependency on `parking_lot` to v0.12.
+
+### Changed
+
+- Updated minimum supported Rust version (MSRV) to 1.49.0 ([#1913])
+- `parking_lot`: updated to v0.12 ([008339d])
+
+### Added
+
+- **fmt**: Documentation improvements ([#1926], [#1927])
+
+[#1913]: https://github.com/tokio-rs/tracing/pull/1913
+[#1926]: https://github.com/tokio-rs/tracing/pull/1926
+[#1927]: https://github.com/tokio-rs/tracing/pull/1927
+[008339d]: https://github.com/tokio-rs/tracing/commit/008339d1e8750ffe7b4634fc7789bda0c522424f
+
+# 0.3.8 (Feb 4, 2022)
+
+This release adds *experimental* support for recording structured field
+values using the [`valuable`] crate to the `format::Json` formatter. In
+particular, user-defined types which are recorded using their
+[`valuable::Valuable`] implementations will be serialized as JSON objects,
+rather than using their `fmt::Debug` representation. See [this blog post][post]
+for details on `valuable`.
+
+Note that `valuable` support currently requires `--cfg tracing_unstable`. See
+the documentation for details.
+
+Additionally, this release includes a number of other smaller API improvements.
+
+### Added
+
+- **json**: Experimental support for recording [`valuable`] values as structured
+ JSON ([#1862], [#1901])
+- **filter**: `Targets::would_enable` method for testing if a `Targets` filter
+ would enable a given target ([#1903])
+- **fmt**: `map_event_format`, `map_fmt_fields`, and `map_writer` methods to
+ `fmt::Layer` and `fmt::SubscriberBuilder` ([#1871])
+
+### Changed
+
+- `tracing-core`: updated to [0.1.22][core-0.1.22]
+
+### Fixed
+
+- Set `smallvec` minimal version to 1.2.0, to fix compilation errors with `-Z
+ minimal-versions` ([#1890])
+- Minor documentation fixes ([#1902], [#1893])
+
+Thanks to @guswynn, @glts, and @lilyball for contributing to this release!
+
+[`valuable`]: https://crates.io/crates/valuable
+[`valuable::Valuable`]: https://docs.rs/valuable/latest/valuable/trait.Valuable.html
+[post]: https://tokio.rs/blog/2021-05-valuable
+[core-0.1.22]: https://github.com/tokio-rs/tracing/releases/tag/tracing-core-0.1.22
+[#1862]: https://github.com/tokio-rs/tracing/pull/1862
+[#1901]: https://github.com/tokio-rs/tracing/pull/1901
+[#1903]: https://github.com/tokio-rs/tracing/pull/1903
+[#1871]: https://github.com/tokio-rs/tracing/pull/1871
+[#1890]: https://github.com/tokio-rs/tracing/pull/1890
+[#1902]: https://github.com/tokio-rs/tracing/pull/1902
+[#1893]: https://github.com/tokio-rs/tracing/pull/1893
+
+# 0.3.7 (Jan 25, 2022)
+
+This release adds combinators for combining filters.
+
+Additionally, this release also updates the `thread-local` crate to v1.1.4,
+fixing warnings for the security advisory [RUSTSEC-2022-0006]. Note that
+previous versions of `tracing-subscriber` did not use any of the `thread-local`
+crate's APIs effected by the vulnerability. However, updating the version fixes
+warnings emitted by `cargo audit` and similar tools.
+
+### Added
+
+- **filter**: Added combinators for combining filters ([#1578])
+
+### Fixed
+
+- **registry**: Updated `thread-local` to v1.1.4 ([#1858])
+
+Thanks to new contributor @matze for contributing to this release!
+
+[RUSTSEC-2022-0006]: https://rustsec.org/advisories/RUSTSEC-2022-0006
+[#1578]: https://github.com/tokio-rs/tracing/pull/1578
+[#1858]: https://github.com/tokio-rs/tracing/pull/1858
+
+# 0.3.6 (Jan 14, 2022)
+
+This release adds configuration options to `tracing_subscriber::fmt` to log
+source code locations for events.
+### Added
+
+- **fmt**: Added `with_file` and `with_line_number`
+ configuration methods to `fmt::Format`, `fmt::SubscriberBuilder`, and
+ `fmt::Layer` ([#1773])
+
+### Fixed
+
+- **fmt**: Removed incorrect leading comma from span fields with the `Pretty`
+ formatter ([#1833])
+
+### Deprecated
+
+- **fmt**: Deprecated `Pretty::with_source_location`, as it can now be replaced
+ by the more general `Format`, `SubscriberBuilder`, and `Layer` methods
+ ([#1773])
+
+Thanks to new contributor @renecouto for contributing to this release!
+
+[#1773]: https://github.com/tokio-rs/tracing/pull/1773
+[#1833]: https://github.com/tokio-rs/tracing/pull/1833
+
+# 0.3.5 (Dec 29, 2021)
+
+This release re-enables `RUST_LOG` filtering in `tracing_subscriber::fmt`'s
+default initialization methods, and adds an `OffsetLocalTime` formatter for
+using local timestamps with the `time` crate.
+
+### Added
+
+- **fmt**: Added `OffsetLocalTime` formatter to `fmt::time` for formatting local
+ timestamps with a fixed offset ([#1772])
+
+### Fixed
+
+- **fmt**: Added a `Targets` filter to `fmt::init()` and `fmt::try_init()` when
+ the "env-filter" feature is disabled, so that `RUST_LOG` is still honored
+ ([#1781])
+
+Thanks to @marienz and @ishitatsuyuki for contributing to this release!
+
+[#1772]: https://github.com/tokio-rs/tracing/pull/1772
+[#1781]: https://github.com/tokio-rs/tracing/pull/1781
+
+# 0.3.4 (Dec 23, 2021)
+
+This release contains bugfixes for the `fmt` module, as well as documentation
+improvements.
+
+### Fixed
+
+- **fmt**: Fixed `fmt` not emitting log lines when timestamp formatting fails
+ ([#1689])
+- **fmt**: Fixed double space before thread IDs with `Pretty` formatter
+ ([#1778])
+- Several documentation improvements ([#1608], [#1699], [#1701])
+
+[#1689]: https://github.com/tokio-rs/tracing/pull/1689
+[#1778]: https://github.com/tokio-rs/tracing/pull/1778
+[#1608]: https://github.com/tokio-rs/tracing/pull/1608
+[#1699]: https://github.com/tokio-rs/tracing/pull/1699
+[#1701]: https://github.com/tokio-rs/tracing/pull/1701
+
+Thanks to new contributors @Swatinem and @rukai for contributing to this
+release!
+
# 0.3.3 (Nov 29, 2021)
This release fixes a pair of regressions in `tracing-subscriber`'s `fmt` module.
diff --git a/vendor/tracing-subscriber-0.3.3/Cargo.toml b/vendor/tracing-subscriber/Cargo.toml
index b5e7ba7db..c65075964 100644
--- a/vendor/tracing-subscriber-0.3.3/Cargo.toml
+++ b/vendor/tracing-subscriber/Cargo.toml
@@ -11,20 +11,39 @@
[package]
edition = "2018"
-rust-version = "1.42.0"
+rust-version = "1.50.0"
name = "tracing-subscriber"
-version = "0.3.3"
-authors = ["Eliza Weisman <eliza@buoyant.io>", "David Barsky <me@davidbarsky.com>", "Tokio Contributors <team@tokio.rs>"]
-description = "Utilities for implementing and composing `tracing` subscribers.\n"
+version = "0.3.16"
+authors = [
+ "Eliza Weisman <eliza@buoyant.io>",
+ "David Barsky <me@davidbarsky.com>",
+ "Tokio Contributors <team@tokio.rs>",
+]
+description = """
+Utilities for implementing and composing `tracing` subscribers.
+"""
homepage = "https://tokio.rs"
readme = "README.md"
-keywords = ["logging", "tracing", "metrics", "subscriber"]
-categories = ["development-tools::debugging", "development-tools::profiling", "asynchronous"]
+keywords = [
+ "logging",
+ "tracing",
+ "metrics",
+ "subscriber",
+]
+categories = [
+ "development-tools::debugging",
+ "development-tools::profiling",
+ "asynchronous",
+]
license = "MIT"
repository = "https://github.com/tokio-rs/tracing"
+
[package.metadata.docs.rs]
all-features = true
-rustdoc-args = ["--cfg", "docsrs"]
+rustdoc-args = [
+ "--cfg",
+ "docsrs",
+]
[[bench]]
name = "filter"
@@ -41,20 +60,21 @@ harness = false
[[bench]]
name = "enter"
harness = false
-[dependencies.ansi_term]
-version = "0.12"
+
+[dependencies.matchers]
+version = "0.1.0"
optional = true
-[dependencies.lazy_static]
-version = "1"
+[dependencies.nu-ansi-term]
+version = "0.46.0"
optional = true
-[dependencies.matchers]
-version = "0.1.0"
+[dependencies.once_cell]
+version = "1.13.0"
optional = true
[dependencies.parking_lot]
-version = ">= 0.7, <= 0.11"
+version = "0.12.1"
optional = true
[dependencies.regex]
@@ -64,53 +84,58 @@ optional = true
default-features = false
[dependencies.serde]
-version = "1.0"
+version = "1.0.140"
optional = true
[dependencies.serde_json]
-version = "1.0"
+version = "1.0.82"
optional = true
[dependencies.sharded-slab]
-version = "0.1.0"
+version = "0.1.4"
optional = true
[dependencies.smallvec]
-version = "1"
+version = "1.9.0"
optional = true
[dependencies.thread_local]
-version = "1.0.1"
+version = "1.1.4"
optional = true
[dependencies.time]
-version = "0.3"
+version = "0.3.2"
features = ["formatting"]
optional = true
[dependencies.tracing]
-version = "0.1"
+version = "0.1.35"
optional = true
default-features = false
[dependencies.tracing-core]
-version = "0.1.20"
+version = "0.1.30"
+default-features = false
[dependencies.tracing-log]
-version = "0.1.2"
-features = ["log-tracer", "std"]
+version = "0.1.3"
+features = [
+ "log-tracer",
+ "std",
+]
optional = true
default-features = false
[dependencies.tracing-serde]
-version = "0.1.2"
+version = "0.1.3"
optional = true
+
[dev-dependencies.criterion]
-version = "0.3"
-default_features = false
+version = "0.3.6"
+default-features = false
[dev-dependencies.log]
-version = "0.4"
+version = "0.4.17"
[dev-dependencies.regex]
version = "1"
@@ -118,33 +143,90 @@ features = ["std"]
default-features = false
[dev-dependencies.time]
-version = "0.3"
-features = ["formatting", "macros"]
+version = "0.3.2"
+features = [
+ "formatting",
+ "macros",
+]
[dev-dependencies.tokio]
-version = "0.2"
-features = ["rt-core", "macros"]
+version = "1"
+features = [
+ "rt",
+ "macros",
+]
[dev-dependencies.tracing]
-version = "0.1"
+version = "0.1.35"
[dev-dependencies.tracing-futures]
-version = "0.2"
-features = ["std-future", "std"]
+version = "0.2.0"
+features = [
+ "std-future",
+ "std",
+]
default-features = false
[dev-dependencies.tracing-log]
-version = "0.1.2"
+version = "0.1.3"
[features]
alloc = []
-ansi = ["fmt", "ansi_term"]
-default = ["smallvec", "fmt", "ansi", "tracing-log", "std"]
-env-filter = ["matchers", "regex", "lazy_static", "tracing", "std"]
-fmt = ["registry", "std"]
-json = ["tracing-serde", "serde", "serde_json"]
+ansi = [
+ "fmt",
+ "nu-ansi-term",
+]
+default = [
+ "smallvec",
+ "fmt",
+ "ansi",
+ "tracing-log",
+ "std",
+]
+env-filter = [
+ "matchers",
+ "regex",
+ "once_cell",
+ "tracing",
+ "std",
+ "thread_local",
+]
+fmt = [
+ "registry",
+ "std",
+]
+json = [
+ "tracing-serde",
+ "serde",
+ "serde_json",
+]
local-time = ["time/local-offset"]
-registry = ["sharded-slab", "thread_local", "std"]
-std = ["alloc", "tracing-core/std"]
+registry = [
+ "sharded-slab",
+ "thread_local",
+ "std",
+]
+std = [
+ "alloc",
+ "tracing-core/std",
+]
+valuable = [
+ "tracing-core/valuable",
+ "valuable_crate",
+ "valuable-serde",
+ "tracing-serde/valuable",
+]
+
+[target."cfg(tracing_unstable)".dependencies.valuable-serde]
+version = "0.1.0"
+optional = true
+default-features = false
+
+[target."cfg(tracing_unstable)".dependencies.valuable_crate]
+version = "0.1.0"
+optional = true
+default-features = false
+package = "valuable"
+
[badges.maintenance]
status = "experimental"
diff --git a/vendor/tracing-subscriber-0.3.3/LICENSE b/vendor/tracing-subscriber/LICENSE
index cdb28b4b5..cdb28b4b5 100644
--- a/vendor/tracing-subscriber-0.3.3/LICENSE
+++ b/vendor/tracing-subscriber/LICENSE
diff --git a/vendor/tracing-subscriber-0.3.3/README.md b/vendor/tracing-subscriber/README.md
index 75c62e8ca..124fb956d 100644
--- a/vendor/tracing-subscriber-0.3.3/README.md
+++ b/vendor/tracing-subscriber/README.md
@@ -21,7 +21,7 @@ Utilities for implementing and composing [`tracing`][tracing] subscribers.
[crates-badge]: https://img.shields.io/crates/v/tracing-subscriber.svg
[crates-url]: https://crates.io/crates/tracing-subscriber
[docs-badge]: https://docs.rs/tracing-subscriber/badge.svg
-[docs-url]: https://docs.rs/tracing-subscriber/0.3.1
+[docs-url]: https://docs.rs/tracing-subscriber/0.3.15
[docs-master-badge]: https://img.shields.io/badge/docs-master-blue
[docs-master-url]: https://tracing-rs.netlify.com/tracing_subscriber
[mit-badge]: https://img.shields.io/badge/license-MIT-blue.svg
@@ -32,14 +32,14 @@ Utilities for implementing and composing [`tracing`][tracing] subscribers.
[discord-url]: https://discord.gg/EeF3cQw
[maint-badge]: https://img.shields.io/badge/maintenance-experimental-blue.svg
-*Compiler support: [requires `rustc` 1.42+][msrv]*
+*Compiler support: [requires `rustc` 1.50+][msrv]*
[msrv]: #supported-rust-versions
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.42. The current Tracing version is not guaranteed to build on Rust
+version is 1.50. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/vendor/tracing-subscriber-0.3.3/benches/enter.rs b/vendor/tracing-subscriber/benches/enter.rs
index 49c6e730a..49c6e730a 100644
--- a/vendor/tracing-subscriber-0.3.3/benches/enter.rs
+++ b/vendor/tracing-subscriber/benches/enter.rs
diff --git a/vendor/tracing-subscriber-0.3.3/benches/filter.rs b/vendor/tracing-subscriber/benches/filter.rs
index 91ab9c91d..91ab9c91d 100644
--- a/vendor/tracing-subscriber-0.3.3/benches/filter.rs
+++ b/vendor/tracing-subscriber/benches/filter.rs
diff --git a/vendor/tracing-subscriber-0.3.3/benches/filter_log.rs b/vendor/tracing-subscriber/benches/filter_log.rs
index 4dcf3b4ec..4dcf3b4ec 100644
--- a/vendor/tracing-subscriber-0.3.3/benches/filter_log.rs
+++ b/vendor/tracing-subscriber/benches/filter_log.rs
diff --git a/vendor/tracing-subscriber-0.3.3/benches/fmt.rs b/vendor/tracing-subscriber/benches/fmt.rs
index a039e66d4..a039e66d4 100644
--- a/vendor/tracing-subscriber-0.3.3/benches/fmt.rs
+++ b/vendor/tracing-subscriber/benches/fmt.rs
diff --git a/vendor/tracing-subscriber-0.3.3/benches/support/mod.rs b/vendor/tracing-subscriber/benches/support/mod.rs
index 25e9e7e22..25e9e7e22 100644
--- a/vendor/tracing-subscriber-0.3.3/benches/support/mod.rs
+++ b/vendor/tracing-subscriber/benches/support/mod.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/field/debug.rs b/vendor/tracing-subscriber/src/field/debug.rs
index cc67d29fe..cc67d29fe 100644
--- a/vendor/tracing-subscriber-0.3.3/src/field/debug.rs
+++ b/vendor/tracing-subscriber/src/field/debug.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/field/delimited.rs b/vendor/tracing-subscriber/src/field/delimited.rs
index 8c78c4b20..98634cea9 100644
--- a/vendor/tracing-subscriber-0.3.3/src/field/delimited.rs
+++ b/vendor/tracing-subscriber/src/field/delimited.rs
@@ -41,7 +41,7 @@ impl<D, V> Delimited<D, V> {
/// Returns a new [`MakeVisitor`] implementation that wraps `inner` so that
/// it will format each visited field separated by the provided `delimiter`.
///
- /// [`MakeVisitor`]: ../trait.MakeVisitor.html
+ /// [`MakeVisitor`]: super::MakeVisitor
pub fn new(delimiter: D, inner: V) -> Self {
Self { delimiter, inner }
}
@@ -53,7 +53,7 @@ impl<D, V> VisitDelimited<D, V> {
/// Returns a new [`Visit`] implementation that wraps `inner` so that
/// each formatted field is separated by the provided `delimiter`.
///
- /// [`Visit`]: https://docs.rs/tracing-core/0.1.6/tracing_core/field/trait.Visit.html
+ /// [`Visit`]: tracing_core::field::Visit
pub fn new(delimiter: D, inner: V) -> Self {
Self {
delimiter,
diff --git a/vendor/tracing-subscriber-0.3.3/src/field/display.rs b/vendor/tracing-subscriber/src/field/display.rs
index e0bbc55ed..78a039ce1 100644
--- a/vendor/tracing-subscriber-0.3.3/src/field/display.rs
+++ b/vendor/tracing-subscriber/src/field/display.rs
@@ -18,7 +18,7 @@ impl<V> Messages<V> {
/// Returns a new [`MakeVisitor`] implementation that will wrap `inner` so
/// that any strings named `message` are formatted using `fmt::Display`.
///
- /// [`MakeVisitor`]: ../trait.MakeVisitor.html
+ /// [`MakeVisitor`]: super::MakeVisitor
pub fn new(inner: V) -> Self {
Messages(inner)
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/field/mod.rs b/vendor/tracing-subscriber/src/field/mod.rs
index f7d03f2cc..5dfddb362 100644
--- a/vendor/tracing-subscriber-0.3.3/src/field/mod.rs
+++ b/vendor/tracing-subscriber/src/field/mod.rs
@@ -22,7 +22,7 @@ pub mod display;
/// data to, configuration variables that determine the visitor's behavior, or
/// `()` when no input is required to produce a visitor.
///
-/// [visitors]: https://docs.rs/tracing-core/latest/tracing_core/field/trait.Visit.html
+/// [visitors]: tracing_core::field::Visit
pub trait MakeVisitor<T> {
/// The visitor type produced by this `MakeVisitor`.
type Visitor: Visit;
@@ -33,7 +33,7 @@ pub trait MakeVisitor<T> {
/// A [visitor] that produces output once it has visited a set of fields.
///
-/// [visitor]: https://docs.rs/tracing-core/latest/tracing_core/field/trait.Visit.html
+/// [visitor]: tracing_core::field::Visit
pub trait VisitOutput<Out>: Visit {
/// Completes the visitor, returning any output.
///
@@ -82,10 +82,10 @@ pub trait VisitOutput<Out>: Visit {
/// r.record(&mut visitor);
/// }
/// ```
-/// [visitor]: https://docs.rs/tracing-core/latest/tracing_core/field/trait.Visit.html
-/// [attr]: https://docs.rs/tracing-core/latest/tracing_core/span/struct.Attributes.html
-/// [rec]: https://docs.rs/tracing-core/latest/tracing_core/span/struct.Record.html
-/// [event]: https://docs.rs/tracing-core/latest/tracing_core/event/struct.Event.html
+/// [visitor]: tracing_core::field::Visit
+/// [attr]: tracing_core::span::Attributes
+/// [rec]: tracing_core::span::Record
+/// [event]: tracing_core::event::Event
pub trait RecordFields: crate::sealed::Sealed<RecordFieldsMarker> {
/// Record all the fields in `self` with the provided `visitor`.
fn record(&self, visitor: &mut dyn Visit);
@@ -341,7 +341,7 @@ pub(in crate::field) mod test_util {
impl<'a> Visit for DebugVisitor<'a> {
fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
- write!(&mut self.writer, "{}={:?}", field, value).unwrap();
+ write!(self.writer, "{}={:?}", field, value).unwrap();
}
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/directive.rs b/vendor/tracing-subscriber/src/filter/directive.rs
index dd6b063c4..2ae3f0f24 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/directive.rs
+++ b/vendor/tracing-subscriber/src/filter/directive.rs
@@ -5,7 +5,7 @@ use alloc::vec;
use alloc::{string::String, vec::Vec};
use core::{cmp::Ordering, fmt, iter::FromIterator, slice, str::FromStr};
-use tracing_core::Metadata;
+use tracing_core::{Level, Metadata};
/// Indicates that a string could not be parsed as a filtering directive.
#[derive(Debug)]
pub struct ParseError {
@@ -142,6 +142,22 @@ impl DirectiveSet<StaticDirective> {
None => false,
}
}
+
+ /// Same as `enabled` above, but skips `Directive`'s with fields.
+ pub(crate) fn target_enabled(&self, target: &str, level: &Level) -> bool {
+ match self.directives_for_target(target).next() {
+ Some(d) => d.level >= *level,
+ None => false,
+ }
+ }
+
+ pub(crate) fn directives_for_target<'a>(
+ &'a self,
+ target: &'a str,
+ ) -> impl Iterator<Item = &'a StaticDirective> + 'a {
+ self.directives()
+ .filter(move |d| d.cares_about_target(target))
+ }
}
// === impl StaticDirective ===
@@ -158,6 +174,22 @@ impl StaticDirective {
level,
}
}
+
+ pub(in crate::filter) fn cares_about_target(&self, to_check: &str) -> bool {
+ // Does this directive have a target filter, and does it match the
+ // metadata's target?
+ if let Some(ref target) = self.target {
+ if !to_check.starts_with(&target[..]) {
+ return false;
+ }
+ }
+
+ if !self.field_names.is_empty() {
+ return false;
+ }
+
+ true
+ }
}
impl Ord for StaticDirective {
diff --git a/vendor/tracing-subscriber/src/filter/env/builder.rs b/vendor/tracing-subscriber/src/filter/env/builder.rs
new file mode 100644
index 000000000..c814707e6
--- /dev/null
+++ b/vendor/tracing-subscriber/src/filter/env/builder.rs
@@ -0,0 +1,325 @@
+use super::{
+ directive::{self, Directive},
+ EnvFilter, FromEnvError,
+};
+use crate::sync::RwLock;
+use std::env;
+use thread_local::ThreadLocal;
+use tracing::level_filters::STATIC_MAX_LEVEL;
+
+/// A [builder] for constructing new [`EnvFilter`]s.
+///
+/// [builder]: https://rust-unofficial.github.io/patterns/patterns/creational/builder.html
+#[derive(Debug, Clone)]
+#[must_use]
+pub struct Builder {
+ regex: bool,
+ env: Option<String>,
+ default_directive: Option<Directive>,
+}
+
+impl Builder {
+ /// Sets whether span field values can be matched with regular expressions.
+ ///
+ /// If this is `true`, field filter directives will be interpreted as
+ /// regular expressions if they are not able to be interpreted as a `bool`,
+ /// `i64`, `u64`, or `f64` literal. If this is `false,` those field values
+ /// will be interpreted as literal [`std::fmt::Debug`] output instead.
+ ///
+ /// By default, regular expressions are enabled.
+ ///
+ /// **Note**: when [`EnvFilter`]s are constructed from untrusted inputs,
+ /// disabling regular expressions is strongly encouraged.
+ pub fn with_regex(self, regex: bool) -> Self {
+ Self { regex, ..self }
+ }
+
+ /// Sets a default [filtering directive] that will be added to the filter if
+ /// the parsed string or environment variable contains no filter directives.
+ ///
+ /// By default, there is no default directive.
+ ///
+ /// # Examples
+ ///
+ /// If [`parse`], [`parse_lossy`], [`from_env`], or [`from_env_lossy`] are
+ /// called with an empty string or environment variable, the default
+ /// directive is used instead:
+ ///
+ /// ```rust
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// let filter = EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::INFO.into())
+ /// .parse("")?;
+ ///
+ /// assert_eq!(format!("{}", filter), "info");
+ /// # Ok(()) }
+ /// ```
+ ///
+ /// Note that the `lossy` variants ([`parse_lossy`] and [`from_env_lossy`])
+ /// will ignore any invalid directives. If all directives in a filter
+ /// string or environment variable are invalid, those methods will also use
+ /// the default directive:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// let filter = EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::INFO.into())
+ /// .parse_lossy("some_target=fake level,foo::bar=lolwut");
+ ///
+ /// assert_eq!(format!("{}", filter), "info");
+ /// ```
+ ///
+ ///
+ /// If the string or environment variable contains valid filtering
+ /// directives, the default directive is not used:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// let filter = EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::INFO.into())
+ /// .parse_lossy("foo=trace");
+ ///
+ /// // The default directive is *not* used:
+ /// assert_eq!(format!("{}", filter), "foo=trace");
+ /// ```
+ ///
+ /// Parsing a more complex default directive from a string:
+ ///
+ /// ```rust
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// let default = "myapp=debug".parse()
+ /// .expect("hard-coded default directive should be valid");
+ ///
+ /// let filter = EnvFilter::builder()
+ /// .with_default_directive(default)
+ /// .parse("")?;
+ ///
+ /// assert_eq!(format!("{}", filter), "myapp=debug");
+ /// # Ok(()) }
+ /// ```
+ ///
+ /// [`parse_lossy`]: Self::parse_lossy
+ /// [`from_env_lossy`]: Self::from_env_lossy
+ /// [`parse`]: Self::parse
+ /// [`from_env`]: Self::from_env
+ pub fn with_default_directive(self, default_directive: Directive) -> Self {
+ Self {
+ default_directive: Some(default_directive),
+ ..self
+ }
+ }
+
+ /// Sets the name of the environment variable used by the [`from_env`],
+ /// [`from_env_lossy`], and [`try_from_env`] methods.
+ ///
+ /// By default, this is the value of [`EnvFilter::DEFAULT_ENV`]
+ /// (`RUST_LOG`).
+ ///
+ /// [`from_env`]: Self::from_env
+ /// [`from_env_lossy`]: Self::from_env_lossy
+ /// [`try_from_env`]: Self::try_from_env
+ pub fn with_env_var(self, var: impl ToString) -> Self {
+ Self {
+ env: Some(var.to_string()),
+ ..self
+ }
+ }
+
+ /// Returns a new [`EnvFilter`] from the directives in the given string,
+ /// *ignoring* any that are invalid.
+ pub fn parse_lossy<S: AsRef<str>>(&self, dirs: S) -> EnvFilter {
+ let directives = dirs
+ .as_ref()
+ .split(',')
+ .filter(|s| !s.is_empty())
+ .filter_map(|s| match Directive::parse(s, self.regex) {
+ Ok(d) => Some(d),
+ Err(err) => {
+ eprintln!("ignoring `{}`: {}", s, err);
+ None
+ }
+ });
+ self.from_directives(directives)
+ }
+
+ /// Returns a new [`EnvFilter`] from the directives in the given string,
+ /// or an error if any are invalid.
+ pub fn parse<S: AsRef<str>>(&self, dirs: S) -> Result<EnvFilter, directive::ParseError> {
+ let dirs = dirs.as_ref();
+ if dirs.is_empty() {
+ return Ok(self.from_directives(std::iter::empty()));
+ }
+ let directives = dirs
+ .split(',')
+ .filter(|s| !s.is_empty())
+ .map(|s| Directive::parse(s, self.regex))
+ .collect::<Result<Vec<_>, _>>()?;
+ Ok(self.from_directives(directives))
+ }
+
+ /// Returns a new [`EnvFilter`] from the directives in the configured
+ /// environment variable, ignoring any directives that are invalid.
+ pub fn from_env_lossy(&self) -> EnvFilter {
+ let var = env::var(self.env_var_name()).unwrap_or_default();
+ self.parse_lossy(var)
+ }
+
+ /// Returns a new [`EnvFilter`] from the directives in the in the configured
+ /// environment variable, or an error if the environment variable is not set
+ /// or contains invalid directives.
+ pub fn from_env(&self) -> Result<EnvFilter, FromEnvError> {
+ let var = env::var(self.env_var_name()).unwrap_or_default();
+ self.parse(var).map_err(Into::into)
+ }
+
+ /// Returns a new [`EnvFilter`] from the directives in the in the configured
+ /// environment variable, or an error if the environment variable is not set
+ /// or contains invalid directives.
+ pub fn try_from_env(&self) -> Result<EnvFilter, FromEnvError> {
+ let var = env::var(self.env_var_name())?;
+ self.parse(var).map_err(Into::into)
+ }
+
+ // TODO(eliza): consider making this a public API?
+ // Clippy doesn't love this naming, because it suggests that `from_` methods
+ // should not take a `Self`...but in this case, it's the `EnvFilter` that is
+ // being constructed "from" the directives, rather than the builder itself.
+ #[allow(clippy::wrong_self_convention)]
+ pub(super) fn from_directives(
+ &self,
+ directives: impl IntoIterator<Item = Directive>,
+ ) -> EnvFilter {
+ use tracing::Level;
+
+ let mut directives: Vec<_> = directives.into_iter().collect();
+ let mut disabled = Vec::new();
+ for directive in &mut directives {
+ if directive.level > STATIC_MAX_LEVEL {
+ disabled.push(directive.clone());
+ }
+ if !self.regex {
+ directive.deregexify();
+ }
+ }
+
+ if !disabled.is_empty() {
+ #[cfg(feature = "nu_ansi_term")]
+ use nu_ansi_term::{Color, Style};
+ // NOTE: We can't use a configured `MakeWriter` because the EnvFilter
+ // has no knowledge of any underlying subscriber or collector, which
+ // may or may not use a `MakeWriter`.
+ let warn = |msg: &str| {
+ #[cfg(not(feature = "nu_ansi_term"))]
+ let msg = format!("warning: {}", msg);
+ #[cfg(feature = "nu_ansi_term")]
+ let msg = {
+ let bold = Style::new().bold();
+ let mut warning = Color::Yellow.paint("warning");
+ warning.style_ref_mut().is_bold = true;
+ format!("{}{} {}", warning, bold.paint(":"), bold.paint(msg))
+ };
+ eprintln!("{}", msg);
+ };
+ let ctx_prefixed = |prefix: &str, msg: &str| {
+ #[cfg(not(feature = "nu_ansi_term"))]
+ let msg = format!("{} {}", prefix, msg);
+ #[cfg(feature = "nu_ansi_term")]
+ let msg = {
+ let mut equal = Color::Fixed(21).paint("="); // dark blue
+ equal.style_ref_mut().is_bold = true;
+ format!(" {} {} {}", equal, Style::new().bold().paint(prefix), msg)
+ };
+ eprintln!("{}", msg);
+ };
+ let ctx_help = |msg| ctx_prefixed("help:", msg);
+ let ctx_note = |msg| ctx_prefixed("note:", msg);
+ let ctx = |msg: &str| {
+ #[cfg(not(feature = "nu_ansi_term"))]
+ let msg = format!("note: {}", msg);
+ #[cfg(feature = "nu_ansi_term")]
+ let msg = {
+ let mut pipe = Color::Fixed(21).paint("|");
+ pipe.style_ref_mut().is_bold = true;
+ format!(" {} {}", pipe, msg)
+ };
+ eprintln!("{}", msg);
+ };
+ warn("some trace filter directives would enable traces that are disabled statically");
+ for directive in disabled {
+ let target = if let Some(target) = &directive.target {
+ format!("the `{}` target", target)
+ } else {
+ "all targets".into()
+ };
+ let level = directive
+ .level
+ .into_level()
+ .expect("=off would not have enabled any filters");
+ ctx(&format!(
+ "`{}` would enable the {} level for {}",
+ directive, level, target
+ ));
+ }
+ ctx_note(&format!("the static max level is `{}`", STATIC_MAX_LEVEL));
+ let help_msg = || {
+ let (feature, filter) = match STATIC_MAX_LEVEL.into_level() {
+ Some(Level::TRACE) => unreachable!(
+ "if the max level is trace, no static filtering features are enabled"
+ ),
+ Some(Level::DEBUG) => ("max_level_debug", Level::TRACE),
+ Some(Level::INFO) => ("max_level_info", Level::DEBUG),
+ Some(Level::WARN) => ("max_level_warn", Level::INFO),
+ Some(Level::ERROR) => ("max_level_error", Level::WARN),
+ None => return ("max_level_off", String::new()),
+ };
+ (feature, format!("{} ", filter))
+ };
+ let (feature, earlier_level) = help_msg();
+ ctx_help(&format!(
+ "to enable {}logging, remove the `{}` feature from the `tracing` crate",
+ earlier_level, feature
+ ));
+ }
+
+ let (dynamics, statics) = Directive::make_tables(directives);
+ let has_dynamics = !dynamics.is_empty();
+
+ let mut filter = EnvFilter {
+ statics,
+ dynamics,
+ has_dynamics,
+ by_id: RwLock::new(Default::default()),
+ by_cs: RwLock::new(Default::default()),
+ scope: ThreadLocal::new(),
+ regex: self.regex,
+ };
+
+ if !has_dynamics && filter.statics.is_empty() {
+ if let Some(ref default) = self.default_directive {
+ filter = filter.add_directive(default.clone());
+ }
+ }
+
+ filter
+ }
+
+ fn env_var_name(&self) -> &str {
+ self.env.as_deref().unwrap_or(EnvFilter::DEFAULT_ENV)
+ }
+}
+
+impl Default for Builder {
+ fn default() -> Self {
+ Self {
+ regex: true,
+ env: None,
+ default_directive: None,
+ }
+ }
+}
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/env/directive.rs b/vendor/tracing-subscriber/src/filter/env/directive.rs
index 66ca23dc4..f062e6ef9 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/env/directive.rs
+++ b/vendor/tracing-subscriber/src/filter/env/directive.rs
@@ -4,14 +4,14 @@ use crate::filter::{
env::{field, FieldMap},
level::LevelFilter,
};
-use lazy_static::lazy_static;
+use once_cell::sync::Lazy;
use regex::Regex;
use std::{cmp::Ordering, fmt, iter::FromIterator, str::FromStr};
use tracing_core::{span, Level, Metadata};
/// A single filtering directive.
// TODO(eliza): add a builder for programmatically constructing directives?
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(docsrs, doc(cfg(feature = "env-filter")))]
pub struct Directive {
in_span: Option<String>,
@@ -107,80 +107,52 @@ impl Directive {
.collect();
(Dynamics::from_iter(dyns), statics)
}
-}
-
-impl Match for Directive {
- fn cares_about(&self, meta: &Metadata<'_>) -> bool {
- // Does this directive have a target filter, and does it match the
- // metadata's target?
- if let Some(ref target) = self.target {
- if !meta.target().starts_with(&target[..]) {
- return false;
- }
- }
-
- // Do we have a name filter, and does it match the metadata's name?
- // TODO(eliza): put name globbing here?
- if let Some(ref name) = self.in_span {
- if name != meta.name() {
- return false;
- }
- }
- // Does the metadata define all the fields that this directive cares about?
- let fields = meta.fields();
- for field in &self.fields {
- if fields.field(&field.name).is_none() {
- return false;
+ pub(super) fn deregexify(&mut self) {
+ for field in &mut self.fields {
+ field.value = match field.value.take() {
+ Some(field::ValueMatch::Pat(pat)) => {
+ Some(field::ValueMatch::Debug(pat.into_debug_match()))
+ }
+ x => x,
}
}
-
- true
- }
-
- fn level(&self) -> &LevelFilter {
- &self.level
}
-}
-impl FromStr for Directive {
- type Err = ParseError;
- fn from_str(from: &str) -> Result<Self, Self::Err> {
- lazy_static! {
- static ref DIRECTIVE_RE: Regex = Regex::new(
- r"(?x)
- ^(?P<global_level>(?i:trace|debug|info|warn|error|off|[0-5]))$ |
- # ^^^.
- # `note: we match log level names case-insensitively
- ^
- (?: # target name or span name
- (?P<target>[\w:-]+)|(?P<span>\[[^\]]*\])
- ){1,2}
- (?: # level or nothing
- =(?P<level>(?i:trace|debug|info|warn|error|off|[0-5]))?
- # ^^^.
- # `note: we match log level names case-insensitively
- )?
- $
- "
- )
- .unwrap();
- static ref SPAN_PART_RE: Regex =
- Regex::new(r#"(?P<name>[^\]\{]+)?(?:\{(?P<fields>[^\}]*)\})?"#).unwrap();
- static ref FIELD_FILTER_RE: Regex =
- // TODO(eliza): this doesn't _currently_ handle value matchers that include comma
- // characters. We should fix that.
- Regex::new(r#"(?x)
- (
- # field name
- [[:word:]][[[:word:]]\.]*
- # value part (optional)
- (?:=[^,]+)?
- )
- # trailing comma or EOS
- (?:,\s?|$)
- "#).unwrap();
- }
+ pub(super) fn parse(from: &str, regex: bool) -> Result<Self, ParseError> {
+ static DIRECTIVE_RE: Lazy<Regex> = Lazy::new(|| Regex::new(
+ r"(?x)
+ ^(?P<global_level>(?i:trace|debug|info|warn|error|off|[0-5]))$ |
+ # ^^^.
+ # `note: we match log level names case-insensitively
+ ^
+ (?: # target name or span name
+ (?P<target>[\w:-]+)|(?P<span>\[[^\]]*\])
+ ){1,2}
+ (?: # level or nothing
+ =(?P<level>(?i:trace|debug|info|warn|error|off|[0-5]))?
+ # ^^^.
+ # `note: we match log level names case-insensitively
+ )?
+ $
+ "
+ )
+ .unwrap());
+ static SPAN_PART_RE: Lazy<Regex> =
+ Lazy::new(|| Regex::new(r#"(?P<name>[^\]\{]+)?(?:\{(?P<fields>[^\}]*)\})?"#).unwrap());
+ static FIELD_FILTER_RE: Lazy<Regex> =
+ // TODO(eliza): this doesn't _currently_ handle value matchers that include comma
+ // characters. We should fix that.
+ Lazy::new(|| Regex::new(r#"(?x)
+ (
+ # field name
+ [[:word:]][[[:word:]]\.]*
+ # value part (optional)
+ (?:=[^,]+)?
+ )
+ # trailing comma or EOS
+ (?:,\s?|$)
+ "#).unwrap());
let caps = DIRECTIVE_RE.captures(from).ok_or_else(ParseError::new)?;
@@ -214,7 +186,7 @@ impl FromStr for Directive {
.map(|c| {
FIELD_FILTER_RE
.find_iter(c.as_str())
- .map(|c| c.as_str().parse())
+ .map(|c| field::Match::parse(c.as_str(), regex))
.collect::<Result<Vec<_>, _>>()
})
.unwrap_or_else(|| Ok(Vec::new()));
@@ -228,7 +200,7 @@ impl FromStr for Directive {
// Setting the target without the level enables every level for that target
.unwrap_or(LevelFilter::TRACE);
- Ok(Directive {
+ Ok(Self {
level,
target,
in_span,
@@ -237,6 +209,48 @@ impl FromStr for Directive {
}
}
+impl Match for Directive {
+ fn cares_about(&self, meta: &Metadata<'_>) -> bool {
+ // Does this directive have a target filter, and does it match the
+ // metadata's target?
+ if let Some(ref target) = self.target {
+ if !meta.target().starts_with(&target[..]) {
+ return false;
+ }
+ }
+
+ // Do we have a name filter, and does it match the metadata's name?
+ // TODO(eliza): put name globbing here?
+ if let Some(ref name) = self.in_span {
+ if name != meta.name() {
+ return false;
+ }
+ }
+
+ // Does the metadata define all the fields that this directive cares about?
+ let actual_fields = meta.fields();
+ for expected_field in &self.fields {
+ // Does the actual field set (from the metadata) contain this field?
+ if actual_fields.field(&expected_field.name).is_none() {
+ return false;
+ }
+ }
+
+ true
+ }
+
+ fn level(&self) -> &LevelFilter {
+ &self.level
+ }
+}
+
+impl FromStr for Directive {
+ type Err = ParseError;
+ fn from_str(from: &str) -> Result<Self, Self::Err> {
+ Directive::parse(from, true)
+ }
+}
+
impl Default for Directive {
fn default() -> Self {
Directive {
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/env/field.rs b/vendor/tracing-subscriber/src/filter/env/field.rs
index 970850f92..1394fd04a 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/env/field.rs
+++ b/vendor/tracing-subscriber/src/filter/env/field.rs
@@ -2,7 +2,7 @@ use matchers::Pattern;
use std::{
cmp::Ordering,
error::Error,
- fmt,
+ fmt::{self, Write},
str::FromStr,
sync::{
atomic::{AtomicBool, Ordering::*},
@@ -13,7 +13,7 @@ use std::{
use super::{FieldMap, LevelFilter};
use tracing_core::field::{Field, Visit};
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct Match {
pub(crate) name: String, // TODO: allow match patterns for names?
pub(crate) value: Option<ValueMatch>,
@@ -38,11 +38,20 @@ pub(crate) struct MatchVisitor<'a> {
#[derive(Debug, Clone)]
pub(crate) enum ValueMatch {
+ /// Matches a specific `bool` value.
Bool(bool),
+ /// Matches a specific `f64` value.
F64(f64),
+ /// Matches a specific `u64` value.
U64(u64),
+ /// Matches a specific `i64` value.
I64(i64),
+ /// Matches any `NaN` `f64` value.
NaN,
+ /// Matches any field whose `fmt::Debug` output is equal to a fixed string.
+ Debug(MatchDebug),
+ /// Matches any field whose `fmt::Debug` output matches a regular expression
+ /// pattern.
Pat(Box<MatchPattern>),
}
@@ -97,6 +106,9 @@ impl Ord for ValueMatch {
(Pat(this), Pat(that)) => this.cmp(that),
(Pat(_), _) => Ordering::Greater,
+
+ (Debug(this), Debug(that)) => this.cmp(that),
+ (Debug(_), _) => Ordering::Greater,
}
}
}
@@ -107,12 +119,25 @@ impl PartialOrd for ValueMatch {
}
}
+/// Matches a field's `fmt::Debug` output against a regular expression pattern.
+///
+/// This is used for matching all non-literal field value filters when regular
+/// expressions are enabled.
#[derive(Debug, Clone)]
pub(crate) struct MatchPattern {
pub(crate) matcher: Pattern,
pattern: Arc<str>,
}
+/// Matches a field's `fmt::Debug` output against a fixed string pattern.
+///
+/// This is used for matching all non-literal field value filters when regular
+/// expressions are disabled.
+#[derive(Debug, Clone)]
+pub(crate) struct MatchDebug {
+ pattern: Arc<str>,
+}
+
/// Indicates that a field name specified in a filter directive was invalid.
#[derive(Clone, Debug)]
#[cfg_attr(docsrs, doc(cfg(feature = "env-filter")))]
@@ -122,9 +147,17 @@ pub struct BadName {
// === impl Match ===
-impl FromStr for Match {
- type Err = Box<dyn Error + Send + Sync>;
- fn from_str(s: &str) -> Result<Self, Self::Err> {
+impl Match {
+ pub(crate) fn has_value(&self) -> bool {
+ self.value.is_some()
+ }
+
+ // TODO: reference count these strings?
+ pub(crate) fn name(&self) -> String {
+ self.name.clone()
+ }
+
+ pub(crate) fn parse(s: &str, regex: bool) -> Result<Self, Box<dyn Error + Send + Sync>> {
let mut parts = s.split('=');
let name = parts
.next()
@@ -133,22 +166,17 @@ impl FromStr for Match {
})?
// TODO: validate field name
.to_string();
- let value = parts.next().map(ValueMatch::from_str).transpose()?;
+ let value = parts
+ .next()
+ .map(|part| match regex {
+ true => ValueMatch::parse_regex(part),
+ false => Ok(ValueMatch::parse_non_regex(part)),
+ })
+ .transpose()?;
Ok(Match { name, value })
}
}
-impl Match {
- pub(crate) fn has_value(&self) -> bool {
- self.value.is_some()
- }
-
- // TODO: reference count these strings?
- pub(crate) fn name(&self) -> String {
- self.name.clone()
- }
-}
-
impl fmt::Display for Match {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.name, f)?;
@@ -199,9 +227,14 @@ fn value_match_f64(v: f64) -> ValueMatch {
}
}
-impl FromStr for ValueMatch {
- type Err = matchers::Error;
- fn from_str(s: &str) -> Result<Self, Self::Err> {
+impl ValueMatch {
+ /// Parse a `ValueMatch` that will match `fmt::Debug` fields using regular
+ /// expressions.
+ ///
+ /// This returns an error if the string didn't contain a valid `bool`,
+ /// `u64`, `i64`, or `f64` literal, and couldn't be parsed as a regular
+ /// expression.
+ fn parse_regex(s: &str) -> Result<Self, matchers::Error> {
s.parse::<bool>()
.map(ValueMatch::Bool)
.or_else(|_| s.parse::<u64>().map(ValueMatch::U64))
@@ -212,6 +245,21 @@ impl FromStr for ValueMatch {
.map(|p| ValueMatch::Pat(Box::new(p)))
})
}
+
+ /// Parse a `ValueMatch` that will match `fmt::Debug` against a fixed
+ /// string.
+ ///
+ /// This does *not* return an error, because any string that isn't a valid
+ /// `bool`, `u64`, `i64`, or `f64` literal is treated as expected
+ /// `fmt::Debug` output.
+ fn parse_non_regex(s: &str) -> Self {
+ s.parse::<bool>()
+ .map(ValueMatch::Bool)
+ .or_else(|_| s.parse::<u64>().map(ValueMatch::U64))
+ .or_else(|_| s.parse::<i64>().map(ValueMatch::I64))
+ .or_else(|_| s.parse::<f64>().map(value_match_f64))
+ .unwrap_or_else(|_| ValueMatch::Debug(MatchDebug::new(s)))
+ }
}
impl fmt::Display for ValueMatch {
@@ -222,6 +270,7 @@ impl fmt::Display for ValueMatch {
ValueMatch::NaN => fmt::Display::fmt(&std::f64::NAN, f),
ValueMatch::I64(ref inner) => fmt::Display::fmt(inner, f),
ValueMatch::U64(ref inner) => fmt::Display::fmt(inner, f),
+ ValueMatch::Debug(ref inner) => fmt::Display::fmt(inner, f),
ValueMatch::Pat(ref inner) => fmt::Display::fmt(inner, f),
}
}
@@ -264,6 +313,12 @@ impl MatchPattern {
fn debug_matches(&self, d: &impl fmt::Debug) -> bool {
self.matcher.debug_matches(d)
}
+
+ pub(super) fn into_debug_match(self) -> MatchDebug {
+ MatchDebug {
+ pattern: self.pattern,
+ }
+ }
}
impl PartialEq for MatchPattern {
@@ -289,6 +344,102 @@ impl Ord for MatchPattern {
}
}
+// === impl MatchDebug ===
+
+impl MatchDebug {
+ fn new(s: &str) -> Self {
+ Self {
+ pattern: s.to_owned().into(),
+ }
+ }
+
+ #[inline]
+ fn debug_matches(&self, d: &impl fmt::Debug) -> bool {
+ // Naively, we would probably match a value's `fmt::Debug` output by
+ // formatting it to a string, and then checking if the string is equal
+ // to the expected pattern. However, this would require allocating every
+ // time we want to match a field value against a `Debug` matcher, which
+ // can be avoided.
+ //
+ // Instead, we implement `fmt::Write` for a type that, rather than
+ // actually _writing_ the strings to something, matches them against the
+ // expected pattern, and returns an error if the pattern does not match.
+ struct Matcher<'a> {
+ pattern: &'a str,
+ }
+
+ impl fmt::Write for Matcher<'_> {
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ // If the string is longer than the remaining expected string,
+ // we know it won't match, so bail.
+ if s.len() > self.pattern.len() {
+ return Err(fmt::Error);
+ }
+
+ // If the expected string begins with the string that was
+ // written, we are still potentially a match. Advance the
+ // position in the expected pattern to chop off the matched
+ // output, and continue.
+ if self.pattern.starts_with(s) {
+ self.pattern = &self.pattern[s.len()..];
+ return Ok(());
+ }
+
+ // Otherwise, the expected string doesn't include the string
+ // that was written at the current position, so the `fmt::Debug`
+ // output doesn't match! Return an error signalling that this
+ // doesn't match.
+ Err(fmt::Error)
+ }
+ }
+ let mut matcher = Matcher {
+ pattern: &self.pattern,
+ };
+
+ // Try to "write" the value's `fmt::Debug` output to a `Matcher`. This
+ // returns an error if the `fmt::Debug` implementation wrote any
+ // characters that did not match the expected pattern.
+ write!(matcher, "{:?}", d).is_ok()
+ }
+}
+
+impl fmt::Display for MatchDebug {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&*self.pattern, f)
+ }
+}
+
+impl AsRef<str> for MatchDebug {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self.pattern.as_ref()
+ }
+}
+
+impl PartialEq for MatchDebug {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.pattern == other.pattern
+ }
+}
+
+impl Eq for MatchDebug {}
+
+impl PartialOrd for MatchDebug {
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.pattern.cmp(&other.pattern))
+ }
+}
+
+impl Ord for MatchDebug {
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.pattern.cmp(&other.pattern)
+ }
+}
+
// === impl BadName ===
impl Error for BadName {}
@@ -401,6 +552,9 @@ impl<'a> Visit for MatchVisitor<'a> {
Some((ValueMatch::Pat(ref e), ref matched)) if e.str_matches(&value) => {
matched.store(true, Release);
}
+ Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => {
+ matched.store(true, Release)
+ }
_ => {}
}
}
@@ -410,7 +564,63 @@ impl<'a> Visit for MatchVisitor<'a> {
Some((ValueMatch::Pat(ref e), ref matched)) if e.debug_matches(&value) => {
matched.store(true, Release);
}
+ Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => {
+ matched.store(true, Release)
+ }
_ => {}
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[derive(Debug)]
+ #[allow(dead_code)]
+ struct MyStruct {
+ answer: usize,
+ question: &'static str,
+ }
+
+ #[test]
+ fn debug_struct_match() {
+ let my_struct = MyStruct {
+ answer: 42,
+ question: "life, the universe, and everything",
+ };
+
+ let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }";
+
+ assert_eq!(
+ format!("{:?}", my_struct),
+ pattern,
+ "`MyStruct`'s `Debug` impl doesn't output the expected string"
+ );
+
+ let matcher = MatchDebug {
+ pattern: pattern.into(),
+ };
+ assert!(matcher.debug_matches(&my_struct))
+ }
+
+ #[test]
+ fn debug_struct_not_match() {
+ let my_struct = MyStruct {
+ answer: 42,
+ question: "what shall we have for lunch?",
+ };
+
+ let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }";
+
+ assert_eq!(
+ format!("{:?}", my_struct),
+ "MyStruct { answer: 42, question: \"what shall we have for lunch?\" }",
+ "`MyStruct`'s `Debug` impl doesn't output the expected string"
+ );
+
+ let matcher = MatchDebug {
+ pattern: pattern.into(),
+ };
+ assert!(!matcher.debug_matches(&my_struct))
+ }
+}
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/env/mod.rs b/vendor/tracing-subscriber/src/filter/env/mod.rs
index 81fe0e62d..81a9ae2bd 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/env/mod.rs
+++ b/vendor/tracing-subscriber/src/filter/env/mod.rs
@@ -4,7 +4,8 @@
// these are publicly re-exported, but the compiler doesn't realize
// that for some reason.
#[allow(unreachable_pub)]
-pub use self::{directive::Directive, field::BadName as BadFieldName};
+pub use self::{builder::Builder, directive::Directive, field::BadName as BadFieldName};
+mod builder;
mod directive;
mod field;
@@ -15,6 +16,7 @@ use crate::{
};
use directive::ParseError;
use std::{cell::RefCell, collections::HashMap, env, error::Error, fmt, str::FromStr};
+use thread_local::ThreadLocal;
use tracing_core::{
callsite,
field::Field,
@@ -26,6 +28,16 @@ use tracing_core::{
/// A [`Layer`] which filters spans and events based on a set of filter
/// directives.
///
+/// `EnvFilter` implements both the [`Layer`](#impl-Layer<S>) and [`Filter`] traits, so it may
+/// be used for both [global filtering][global] and [per-layer filtering][plf],
+/// respectively. See [the documentation on filtering with `Layer`s][filtering]
+/// for details.
+///
+/// The [`Targets`] type implements a similar form of filtering, but without the
+/// ability to dynamically enable events based on the current span context, and
+/// without filtering on field values. When these features are not required,
+/// [`Targets`] provides a lighter-weight alternative to [`EnvFilter`].
+///
/// # Directives
///
/// A filter consists of one or more comma-separated directives which match on [`Span`]s and [`Event`]s.
@@ -52,10 +64,27 @@ use tracing_core::{
/// and will match on any [`Span`] or [`Event`] that has a field with that name.
/// For example: `[span{field=\"value\"}]=debug`, `[{field}]=trace`.
/// - `value` matches on the value of a span's field. If a value is a numeric literal or a bool,
-/// it will match _only_ on that value. Otherwise, this filter acts as a regex on
-/// the `std::fmt::Debug` output from the value.
+/// it will match _only_ on that value. Otherwise, this filter matches the
+/// [`std::fmt::Debug`] output from the value.
/// - `level` sets a maximum verbosity level accepted by this directive.
///
+/// When a field value directive (`[{<FIELD NAME>=<FIELD_VALUE>}]=...`) matches a
+/// value's [`std::fmt::Debug`] output (i.e., the field value in the directive
+/// is not a `bool`, `i64`, `u64`, or `f64` literal), the matched pattern may be
+/// interpreted as either a regular expression or as the precise expected
+/// output of the field's [`std::fmt::Debug`] implementation. By default, these
+/// filters are interpreted as regular expressions, but this can be disabled
+/// using the [`Builder::with_regex`] builder method to use precise matching
+/// instead.
+///
+/// When field value filters are interpreted as regular expressions, the
+/// [`regex-automata` crate's regular expression syntax][re-syntax] is
+/// supported.
+///
+/// **Note**: When filters are constructed from potentially untrusted inputs,
+/// [disabling regular expression matching](Builder::with_regex) is strongly
+/// recommended.
+///
/// ## Usage Notes
///
/// - The portion of the directive which is included within the square brackets is `tracing`-specific.
@@ -72,7 +101,7 @@ use tracing_core::{
/// - A dash in a target will only appear when being specified explicitly:
/// `tracing::info!(target: "target-name", ...);`
///
-/// ## Examples
+/// ## Example Syntax
///
/// - `tokio::net=info` will enable all spans or events that:
/// - have the `tokio::net` target,
@@ -89,10 +118,68 @@ use tracing_core::{
/// - which has a field named `name` with value `bob`,
/// - at _any_ level.
///
-/// The [`Targets`] type implements a similar form of filtering, but without the
-/// ability to dynamically enable events based on the current span context, and
-/// without filtering on field values. When these features are not required,
-/// [`Targets`] provides a lighter-weight alternative to [`EnvFilter`].
+/// # Examples
+///
+/// Parsing an `EnvFilter` from the [default environment
+/// variable](EnvFilter::from_default_env) (`RUST_LOG`):
+///
+/// ```
+/// use tracing_subscriber::{EnvFilter, fmt, prelude::*};
+///
+/// tracing_subscriber::registry()
+/// .with(fmt::layer())
+/// .with(EnvFilter::from_default_env())
+/// .init();
+/// ```
+///
+/// Parsing an `EnvFilter` [from a user-provided environment
+/// variable](EnvFilter::from_env):
+///
+/// ```
+/// use tracing_subscriber::{EnvFilter, fmt, prelude::*};
+///
+/// tracing_subscriber::registry()
+/// .with(fmt::layer())
+/// .with(EnvFilter::from_env("MYAPP_LOG"))
+/// .init();
+/// ```
+///
+/// Using `EnvFilter` as a [per-layer filter][plf] to filter only a single
+/// [`Layer`]:
+///
+/// ```
+/// use tracing_subscriber::{EnvFilter, fmt, prelude::*};
+///
+/// // Parse an `EnvFilter` configuration from the `RUST_LOG`
+/// // environment variable.
+/// let filter = EnvFilter::from_default_env();
+///
+/// // Apply the filter to this layer *only*.
+/// let filtered_layer = fmt::layer().with_filter(filter);
+///
+/// // Some other layer, whose output we don't want to filter.
+/// let unfiltered_layer = // ...
+/// # fmt::layer();
+///
+/// tracing_subscriber::registry()
+/// .with(filtered_layer)
+/// .with(unfiltered_layer)
+/// .init();
+/// ```
+/// # Constructing `EnvFilter`s
+///
+/// An `EnvFilter` is be constructed by parsing a string containing one or more
+/// directives. The [`EnvFilter::new`] constructor parses an `EnvFilter` from a
+/// string, ignoring any invalid directives, while [`EnvFilter::try_new`]
+/// returns an error if invalid directives are encountered. Similarly, the
+/// [`EnvFilter::from_env`] and [`EnvFilter::try_from_env`] constructors parse
+/// an `EnvFilter` from the value of the provided environment variable, with
+/// lossy and strict validation, respectively.
+///
+/// A [builder](EnvFilter::builder) interface is available to set additional
+/// configuration options prior to parsing an `EnvFilter`. See the [`Builder`
+/// type's documentation](Builder) for details on the options that can be
+/// configured using the builder.
///
/// [`Span`]: tracing_core::span
/// [fields]: tracing_core::Field
@@ -100,6 +187,11 @@ use tracing_core::{
/// [`level`]: tracing_core::Level
/// [`Metadata`]: tracing_core::Metadata
/// [`Targets`]: crate::filter::Targets
+/// [`env_logger`]: https://crates.io/crates/env_logger
+/// [`Filter`]: #impl-Filter<S>
+/// [global]: crate::layer#global-filtering
+/// [plf]: crate::layer#per-layer-filtering
+/// [filtering]: crate::layer#filtering-with-layers
#[cfg_attr(docsrs, doc(cfg(all(feature = "env-filter", feature = "std"))))]
#[derive(Debug)]
pub struct EnvFilter {
@@ -108,10 +200,8 @@ pub struct EnvFilter {
has_dynamics: bool,
by_id: RwLock<HashMap<span::Id, directive::SpanMatcher>>,
by_cs: RwLock<HashMap<callsite::Identifier, directive::CallsiteMatcher>>,
-}
-
-thread_local! {
- static SCOPE: RefCell<Vec<LevelFilter>> = RefCell::new(Vec::new());
+ scope: ThreadLocal<RefCell<Vec<LevelFilter>>>,
+ regex: bool,
}
type FieldMap<T> = HashMap<Field, T>;
@@ -134,58 +224,181 @@ impl EnvFilter {
/// `RUST_LOG` is the default environment variable used by
/// [`EnvFilter::from_default_env`] and [`EnvFilter::try_from_default_env`].
///
- /// [`EnvFilter::from_default_env`]: #method.from_default_env
- /// [`EnvFilter::try_from_default_env`]: #method.try_from_default_env
+ /// [`EnvFilter::from_default_env`]: EnvFilter::from_default_env()
+ /// [`EnvFilter::try_from_default_env`]: EnvFilter::try_from_default_env()
pub const DEFAULT_ENV: &'static str = "RUST_LOG";
+ // === constructors, etc ===
+
+ /// Returns a [builder] that can be used to configure a new [`EnvFilter`]
+ /// instance.
+ ///
+ /// The [`Builder`] type is used to set additional configurations, such as
+ /// [whether regular expressions are enabled](Builder::with_regex) or [the
+ /// default directive](Builder::with_default_directive) before parsing an
+ /// [`EnvFilter`] from a string or environment variable.
+ ///
+ /// [builder]: https://rust-unofficial.github.io/patterns/patterns/creational/builder.html
+ pub fn builder() -> Builder {
+ Builder::default()
+ }
+
/// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment
/// variable, ignoring any invalid filter directives.
+ ///
+ /// If the environment variable is empty or not set, or if it contains only
+ /// invalid directives, a default directive enabling the [`ERROR`] level is
+ /// added.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// # fn docs() -> EnvFilter {
+ /// EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::ERROR.into())
+ /// .from_env_lossy()
+ /// # }
+ /// ```
+ ///
+ /// [`ERROR`]: tracing::Level::ERROR
pub fn from_default_env() -> Self {
- Self::from_env(Self::DEFAULT_ENV)
+ Self::builder()
+ .with_default_directive(LevelFilter::ERROR.into())
+ .from_env_lossy()
}
/// Returns a new `EnvFilter` from the value of the given environment
/// variable, ignoring any invalid filter directives.
+ ///
+ /// If the environment variable is empty or not set, or if it contains only
+ /// invalid directives, a default directive enabling the [`ERROR`] level is
+ /// added.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// # fn docs() -> EnvFilter {
+ /// # let env = "";
+ /// EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::ERROR.into())
+ /// .with_env_var(env)
+ /// .from_env_lossy()
+ /// # }
+ /// ```
+ ///
+ /// [`ERROR`]: tracing::Level::ERROR
pub fn from_env<A: AsRef<str>>(env: A) -> Self {
- env::var(env.as_ref()).map(Self::new).unwrap_or_default()
+ Self::builder()
+ .with_default_directive(LevelFilter::ERROR.into())
+ .with_env_var(env.as_ref())
+ .from_env_lossy()
}
/// Returns a new `EnvFilter` from the directives in the given string,
/// ignoring any that are invalid.
- pub fn new<S: AsRef<str>>(dirs: S) -> Self {
- let directives = dirs.as_ref().split(',').filter_map(|s| match s.parse() {
- Ok(d) => Some(d),
- Err(err) => {
- eprintln!("ignoring `{}`: {}", s, err);
- None
- }
- });
- Self::from_directives(directives)
+ ///
+ /// If the string is empty or contains only invalid directives, a default
+ /// directive enabling the [`ERROR`] level is added.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// # fn docs() -> EnvFilter {
+ /// # let directives = "";
+ /// EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::ERROR.into())
+ /// .parse_lossy(directives)
+ /// # }
+ /// ```
+ ///
+ /// [`ERROR`]: tracing::Level::ERROR
+ pub fn new<S: AsRef<str>>(directives: S) -> Self {
+ Self::builder()
+ .with_default_directive(LevelFilter::ERROR.into())
+ .parse_lossy(directives)
}
/// Returns a new `EnvFilter` from the directives in the given string,
/// or an error if any are invalid.
+ ///
+ /// If the string is empty, a default directive enabling the [`ERROR`] level
+ /// is added.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
+ ///
+ /// # fn docs() -> Result<EnvFilter, tracing_subscriber::filter::ParseError> {
+ /// # let directives = "";
+ /// EnvFilter::builder()
+ /// .with_default_directive(LevelFilter::ERROR.into())
+ /// .parse(directives)
+ /// # }
+ /// ```
+ ///
+ /// [`ERROR`]: tracing::Level::ERROR
pub fn try_new<S: AsRef<str>>(dirs: S) -> Result<Self, directive::ParseError> {
- let directives = dirs
- .as_ref()
- .split(',')
- .map(|s| s.parse())
- .collect::<Result<Vec<_>, _>>()?;
- Ok(Self::from_directives(directives))
+ Self::builder().parse(dirs)
}
/// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment
- /// variable, or an error if the environment variable contains any invalid
- /// filter directives.
+ /// variable, or an error if the environment variable is unset or contains
+ /// any invalid filter directives.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::EnvFilter;
+ ///
+ /// # fn docs() -> Result<EnvFilter, tracing_subscriber::filter::FromEnvError> {
+ /// EnvFilter::builder().try_from_env()
+ /// # }
+ /// ```
pub fn try_from_default_env() -> Result<Self, FromEnvError> {
- Self::try_from_env(Self::DEFAULT_ENV)
+ Self::builder().try_from_env()
}
/// Returns a new `EnvFilter` from the value of the given environment
/// variable, or an error if the environment variable is unset or contains
/// any invalid filter directives.
+ ///
+ /// To set additional configuration options prior to parsing the filter, use
+ /// the [`Builder`] type instead.
+ ///
+ /// This function is equivalent to the following:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::EnvFilter;
+ ///
+ /// # fn docs() -> Result<EnvFilter, tracing_subscriber::filter::FromEnvError> {
+ /// # let env = "";
+ /// EnvFilter::builder().with_env_var(env).try_from_env()
+ /// # }
+ /// ```
pub fn try_from_env<A: AsRef<str>>(env: A) -> Result<Self, FromEnvError> {
- env::var(env.as_ref())?.parse().map_err(Into::into)
+ Self::builder().with_env_var(env.as_ref()).try_from_env()
}
/// Add a filtering directive to this `EnvFilter`.
@@ -202,13 +415,13 @@ impl EnvFilter {
/// and events as a previous filter, but sets a different level for those
/// spans and events, the previous directive is overwritten.
///
- /// [`LevelFilter`]: ../filter/struct.LevelFilter.html
- /// [`Level`]: https://docs.rs/tracing-core/latest/tracing_core/struct.Level.html
+ /// [`LevelFilter`]: super::LevelFilter
+ /// [`Level`]: tracing_core::Level
///
/// # Examples
///
/// From [`LevelFilter`]:
- ////
+ ///
/// ```rust
/// use tracing_subscriber::filter::{EnvFilter, LevelFilter};
/// let mut filter = EnvFilter::from_default_env()
@@ -223,9 +436,9 @@ impl EnvFilter {
/// let mut filter = EnvFilter::from_default_env()
/// .add_directive(Level::INFO.into());
/// ```
- ////
+ ///
/// Parsed from a string:
- ////
+ ///
/// ```rust
/// use tracing_subscriber::filter::{EnvFilter, Directive};
///
@@ -236,7 +449,15 @@ impl EnvFilter {
/// # Ok(())
/// # }
/// ```
- pub fn add_directive(mut self, directive: Directive) -> Self {
+ /// In the above example, substitute `my_crate`, `module`, etc. with the
+ /// name your target crate/module is imported with. This might be
+ /// different from the package name in Cargo.toml (`-` is replaced by `_`).
+ /// Example, if the package name in your Cargo.toml is `MY-FANCY-LIB`, then
+ /// the corresponding Rust identifier would be `MY_FANCY_LIB`:
+ pub fn add_directive(mut self, mut directive: Directive) -> Self {
+ if !self.regex {
+ directive.deregexify();
+ }
if let Some(stat) = directive.to_static() {
self.statics.add(stat)
} else {
@@ -246,165 +467,19 @@ impl EnvFilter {
self
}
- fn from_directives(directives: impl IntoIterator<Item = Directive>) -> Self {
- use tracing::level_filters::STATIC_MAX_LEVEL;
- use tracing::Level;
-
- let directives: Vec<_> = directives.into_iter().collect();
-
- let disabled: Vec<_> = directives
- .iter()
- .filter(|directive| directive.level > STATIC_MAX_LEVEL)
- .collect();
-
- if !disabled.is_empty() {
- #[cfg(feature = "ansi_term")]
- use ansi_term::{Color, Style};
- // NOTE: We can't use a configured `MakeWriter` because the EnvFilter
- // has no knowledge of any underlying subscriber or subscriber, which
- // may or may not use a `MakeWriter`.
- let warn = |msg: &str| {
- #[cfg(not(feature = "ansi_term"))]
- let msg = format!("warning: {}", msg);
- #[cfg(feature = "ansi_term")]
- let msg = {
- let bold = Style::new().bold();
- let mut warning = Color::Yellow.paint("warning");
- warning.style_ref_mut().is_bold = true;
- format!("{}{} {}", warning, bold.paint(":"), bold.paint(msg))
- };
- eprintln!("{}", msg);
- };
- let ctx_prefixed = |prefix: &str, msg: &str| {
- #[cfg(not(feature = "ansi_term"))]
- let msg = format!("note: {}", msg);
- #[cfg(feature = "ansi_term")]
- let msg = {
- let mut equal = Color::Fixed(21).paint("="); // dark blue
- equal.style_ref_mut().is_bold = true;
- format!(" {} {} {}", equal, Style::new().bold().paint(prefix), msg)
- };
- eprintln!("{}", msg);
- };
- let ctx_help = |msg| ctx_prefixed("help:", msg);
- let ctx_note = |msg| ctx_prefixed("note:", msg);
- let ctx = |msg: &str| {
- #[cfg(not(feature = "ansi_term"))]
- let msg = format!("note: {}", msg);
- #[cfg(feature = "ansi_term")]
- let msg = {
- let mut pipe = Color::Fixed(21).paint("|");
- pipe.style_ref_mut().is_bold = true;
- format!(" {} {}", pipe, msg)
- };
- eprintln!("{}", msg);
- };
- warn("some trace filter directives would enable traces that are disabled statically");
- for directive in disabled {
- let target = if let Some(target) = &directive.target {
- format!("the `{}` target", target)
- } else {
- "all targets".into()
- };
- let level = directive
- .level
- .into_level()
- .expect("=off would not have enabled any filters");
- ctx(&format!(
- "`{}` would enable the {} level for {}",
- directive, level, target
- ));
- }
- ctx_note(&format!("the static max level is `{}`", STATIC_MAX_LEVEL));
- let help_msg = || {
- let (feature, filter) = match STATIC_MAX_LEVEL.into_level() {
- Some(Level::TRACE) => unreachable!(
- "if the max level is trace, no static filtering features are enabled"
- ),
- Some(Level::DEBUG) => ("max_level_debug", Level::TRACE),
- Some(Level::INFO) => ("max_level_info", Level::DEBUG),
- Some(Level::WARN) => ("max_level_warn", Level::INFO),
- Some(Level::ERROR) => ("max_level_error", Level::WARN),
- None => return ("max_level_off", String::new()),
- };
- (feature, format!("{} ", filter))
- };
- let (feature, earlier_level) = help_msg();
- ctx_help(&format!(
- "to enable {}logging, remove the `{}` feature",
- earlier_level, feature
- ));
- }
-
- let (dynamics, mut statics) = Directive::make_tables(directives);
- let has_dynamics = !dynamics.is_empty();
-
- if statics.is_empty() && !has_dynamics {
- statics.add(directive::StaticDirective::default());
- }
-
- Self {
- statics,
- dynamics,
- has_dynamics,
- by_id: RwLock::new(HashMap::new()),
- by_cs: RwLock::new(HashMap::new()),
- }
- }
-
- fn cares_about_span(&self, span: &span::Id) -> bool {
- let spans = try_lock!(self.by_id.read(), else return false);
- spans.contains_key(span)
- }
-
- fn base_interest(&self) -> Interest {
- if self.has_dynamics {
- Interest::sometimes()
- } else {
- Interest::never()
- }
- }
-}
-
-impl<S: Subscriber> Layer<S> for EnvFilter {
- fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
- if self.has_dynamics && metadata.is_span() {
- // If this metadata describes a span, first, check if there is a
- // dynamic filter that should be constructed for it. If so, it
- // should always be enabled, since it influences filtering.
- if let Some(matcher) = self.dynamics.matcher(metadata) {
- let mut by_cs = try_lock!(self.by_cs.write(), else return self.base_interest());
- by_cs.insert(metadata.callsite(), matcher);
- return Interest::always();
- }
- }
-
- // Otherwise, check if any of our static filters enable this metadata.
- if self.statics.enabled(metadata) {
- Interest::always()
- } else {
- self.base_interest()
- }
- }
-
- fn max_level_hint(&self) -> Option<LevelFilter> {
- if self.dynamics.has_value_filters() {
- // If we perform any filtering on span field *values*, we will
- // enable *all* spans, because their field values are not known
- // until recording.
- return Some(LevelFilter::TRACE);
- }
- std::cmp::max(
- self.statics.max_level.into(),
- self.dynamics.max_level.into(),
- )
- }
+ // === filtering methods ===
- fn enabled(&self, metadata: &Metadata<'_>, _: Context<'_, S>) -> bool {
+ /// Returns `true` if this `EnvFilter` would enable the provided `metadata`
+ /// in the current context.
+ ///
+ /// This is equivalent to calling the [`Layer::enabled`] or
+ /// [`Filter::enabled`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ pub fn enabled<S>(&self, metadata: &Metadata<'_>, _: Context<'_, S>) -> bool {
let level = metadata.level();
// is it possible for a dynamic filter directive to enable this event?
- // if not, we can avoid the thread local access + iterating over the
+ // if not, we can avoid the thread loca'l access + iterating over the
// spans in the current scope.
if self.has_dynamics && self.dynamics.max_level >= *level {
if metadata.is_span() {
@@ -420,14 +495,15 @@ impl<S: Subscriber> Layer<S> for EnvFilter {
}
}
- let enabled_by_scope = SCOPE.with(|scope| {
- for filter in scope.borrow().iter() {
+ let enabled_by_scope = {
+ let scope = self.scope.get_or_default().borrow();
+ for filter in &*scope {
if filter >= level {
return true;
}
}
false
- });
+ };
if enabled_by_scope {
return true;
}
@@ -443,7 +519,33 @@ impl<S: Subscriber> Layer<S> for EnvFilter {
false
}
- fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, _: Context<'_, S>) {
+ /// Returns an optional hint of the highest [verbosity level][level] that
+ /// this `EnvFilter` will enable.
+ ///
+ /// This is equivalent to calling the [`Layer::max_level_hint`] or
+ /// [`Filter::max_level_hint`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ ///
+ /// [level]: tracing_core::metadata::Level
+ pub fn max_level_hint(&self) -> Option<LevelFilter> {
+ if self.dynamics.has_value_filters() {
+ // If we perform any filtering on span field *values*, we will
+ // enable *all* spans, because their field values are not known
+ // until recording.
+ return Some(LevelFilter::TRACE);
+ }
+ std::cmp::max(
+ self.statics.max_level.into(),
+ self.dynamics.max_level.into(),
+ )
+ }
+
+ /// Informs the filter that a new span was created.
+ ///
+ /// This is equivalent to calling the [`Layer::on_new_span`] or
+ /// [`Filter::on_new_span`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ pub fn on_new_span<S>(&self, attrs: &span::Attributes<'_>, id: &span::Id, _: Context<'_, S>) {
let by_cs = try_lock!(self.by_cs.read());
if let Some(cs) = by_cs.get(&attrs.metadata().callsite()) {
let span = cs.to_span_match(attrs);
@@ -451,28 +553,37 @@ impl<S: Subscriber> Layer<S> for EnvFilter {
}
}
- fn on_record(&self, id: &span::Id, values: &span::Record<'_>, _: Context<'_, S>) {
- if let Some(span) = try_lock!(self.by_id.read()).get(id) {
- span.record_update(values);
- }
- }
-
- fn on_enter(&self, id: &span::Id, _: Context<'_, S>) {
+ /// Informs the filter that the span with the provided `id` was entered.
+ ///
+ /// This is equivalent to calling the [`Layer::on_enter`] or
+ /// [`Filter::on_enter`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ pub fn on_enter<S>(&self, id: &span::Id, _: Context<'_, S>) {
// XXX: This is where _we_ could push IDs to the stack instead, and use
// that to allow changing the filter while a span is already entered.
// But that might be much less efficient...
if let Some(span) = try_lock!(self.by_id.read()).get(id) {
- SCOPE.with(|scope| scope.borrow_mut().push(span.level()));
+ self.scope.get_or_default().borrow_mut().push(span.level());
}
}
- fn on_exit(&self, id: &span::Id, _: Context<'_, S>) {
+ /// Informs the filter that the span with the provided `id` was exited.
+ ///
+ /// This is equivalent to calling the [`Layer::on_exit`] or
+ /// [`Filter::on_exit`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ pub fn on_exit<S>(&self, id: &span::Id, _: Context<'_, S>) {
if self.cares_about_span(id) {
- SCOPE.with(|scope| scope.borrow_mut().pop());
+ self.scope.get_or_default().borrow_mut().pop();
}
}
- fn on_close(&self, id: span::Id, _: Context<'_, S>) {
+ /// Informs the filter that the span with the provided `id` was closed.
+ ///
+ /// This is equivalent to calling the [`Layer::on_close`] or
+ /// [`Filter::on_close`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope.
+ pub fn on_close<S>(&self, id: span::Id, _: Context<'_, S>) {
// If we don't need to acquire a write lock, avoid doing so.
if !self.cares_about_span(&id) {
return;
@@ -481,6 +592,140 @@ impl<S: Subscriber> Layer<S> for EnvFilter {
let mut spans = try_lock!(self.by_id.write());
spans.remove(&id);
}
+
+ /// Informs the filter that the span with the provided `id` recorded the
+ /// provided field `values`.
+ ///
+ /// This is equivalent to calling the [`Layer::on_record`] or
+ /// [`Filter::on_record`] methods on `EnvFilter`'s implementations of those
+ /// traits, but it does not require the trait to be in scope
+ pub fn on_record<S>(&self, id: &span::Id, values: &span::Record<'_>, _: Context<'_, S>) {
+ if let Some(span) = try_lock!(self.by_id.read()).get(id) {
+ span.record_update(values);
+ }
+ }
+
+ fn cares_about_span(&self, span: &span::Id) -> bool {
+ let spans = try_lock!(self.by_id.read(), else return false);
+ spans.contains_key(span)
+ }
+
+ fn base_interest(&self) -> Interest {
+ if self.has_dynamics {
+ Interest::sometimes()
+ } else {
+ Interest::never()
+ }
+ }
+
+ fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
+ if self.has_dynamics && metadata.is_span() {
+ // If this metadata describes a span, first, check if there is a
+ // dynamic filter that should be constructed for it. If so, it
+ // should always be enabled, since it influences filtering.
+ if let Some(matcher) = self.dynamics.matcher(metadata) {
+ let mut by_cs = try_lock!(self.by_cs.write(), else return self.base_interest());
+ by_cs.insert(metadata.callsite(), matcher);
+ return Interest::always();
+ }
+ }
+
+ // Otherwise, check if any of our static filters enable this metadata.
+ if self.statics.enabled(metadata) {
+ Interest::always()
+ } else {
+ self.base_interest()
+ }
+ }
+}
+
+impl<S: Subscriber> Layer<S> for EnvFilter {
+ #[inline]
+ fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
+ EnvFilter::register_callsite(self, metadata)
+ }
+
+ #[inline]
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ EnvFilter::max_level_hint(self)
+ }
+
+ #[inline]
+ fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, S>) -> bool {
+ self.enabled(metadata, ctx)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) {
+ self.on_record(id, values, ctx);
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_enter(id, ctx);
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_exit(id, ctx);
+ }
+
+ #[inline]
+ fn on_close(&self, id: span::Id, ctx: Context<'_, S>) {
+ self.on_close(id, ctx);
+ }
+}
+
+feature! {
+ #![all(feature = "registry", feature = "std")]
+ use crate::layer::Filter;
+
+ impl<S> Filter<S> for EnvFilter {
+ #[inline]
+ fn enabled(&self, meta: &Metadata<'_>, ctx: &Context<'_, S>) -> bool {
+ self.enabled(meta, ctx.clone())
+ }
+
+ #[inline]
+ fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
+ self.register_callsite(meta)
+ }
+
+ #[inline]
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ EnvFilter::max_level_hint(self)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) {
+ self.on_record(id, values, ctx);
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_enter(id, ctx);
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) {
+ self.on_exit(id, ctx);
+ }
+
+ #[inline]
+ fn on_close(&self, id: span::Id, ctx: Context<'_, S>) {
+ self.on_close(id, ctx);
+ }
+ }
}
impl FromStr for EnvFilter {
@@ -502,7 +747,7 @@ where
impl Default for EnvFilter {
fn default() -> Self {
- Self::from_directives(std::iter::empty())
+ Builder::default().from_directives(std::iter::empty())
}
}
@@ -735,4 +980,12 @@ mod tests {
[span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug",
);
}
+
+ #[test]
+ fn parse_empty_string() {
+ // There is no corresponding test for [`Builder::parse_lossy`] as failed
+ // parsing does not produce any observable side effects. If this test fails
+ // check that [`Builder::parse_lossy`] is behaving correctly as well.
+ assert!(EnvFilter::builder().parse("").is_ok());
+ }
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/filter_fn.rs b/vendor/tracing-subscriber/src/filter/filter_fn.rs
index 332bf860a..332bf860a 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/filter_fn.rs
+++ b/vendor/tracing-subscriber/src/filter/filter_fn.rs
diff --git a/vendor/tracing-subscriber/src/filter/layer_filters/combinator.rs b/vendor/tracing-subscriber/src/filter/layer_filters/combinator.rs
new file mode 100644
index 000000000..3934a1326
--- /dev/null
+++ b/vendor/tracing-subscriber/src/filter/layer_filters/combinator.rs
@@ -0,0 +1,542 @@
+//! Filter combinators
+use crate::layer::{Context, Filter};
+use std::{cmp, fmt, marker::PhantomData};
+use tracing_core::{
+ span::{Attributes, Id, Record},
+ subscriber::Interest,
+ LevelFilter, Metadata,
+};
+
+/// Combines two [`Filter`]s so that spans and events are enabled if and only if
+/// *both* filters return `true`.
+///
+/// This type is typically returned by the [`FilterExt::and`] method. See that
+/// method's documentation for details.
+///
+/// [`Filter`]: crate::layer::Filter
+/// [`FilterExt::and`]: crate::filter::FilterExt::and
+pub struct And<A, B, S> {
+ a: A,
+ b: B,
+ _s: PhantomData<fn(S)>,
+}
+
+/// Combines two [`Filter`]s so that spans and events are enabled if *either* filter
+/// returns `true`.
+///
+/// This type is typically returned by the [`FilterExt::or`] method. See that
+/// method's documentation for details.
+///
+/// [`Filter`]: crate::layer::Filter
+/// [`FilterExt::or`]: crate::filter::FilterExt::or
+pub struct Or<A, B, S> {
+ a: A,
+ b: B,
+ _s: PhantomData<fn(S)>,
+}
+
+/// Inverts the result of a [`Filter`].
+///
+/// If the wrapped filter would enable a span or event, it will be disabled. If
+/// it would disable a span or event, that span or event will be enabled.
+///
+/// This type is typically returned by the [`FilterExt::not`] method. See that
+/// method's documentation for details.
+///
+/// [`Filter`]: crate::layer::Filter
+/// [`FilterExt::not`]: crate::filter::FilterExt::not
+pub struct Not<A, S> {
+ a: A,
+ _s: PhantomData<fn(S)>,
+}
+
+// === impl And ===
+
+impl<A, B, S> And<A, B, S>
+where
+ A: Filter<S>,
+ B: Filter<S>,
+{
+ /// Combines two [`Filter`]s so that spans and events are enabled if and only if
+ /// *both* filters return `true`.
+ ///
+ /// # Examples
+ ///
+ /// Enabling spans or events if they have both a particular target *and* are
+ /// above a certain level:
+ ///
+ /// ```ignore
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, combinator::And},
+ /// prelude::*,
+ /// };
+ ///
+ /// // Enables spans and events with targets starting with `interesting_target`:
+ /// let target_filter = filter_fn(|meta| {
+ /// meta.target().starts_with("interesting_target")
+ /// });
+ ///
+ /// // Enables spans and events with levels `INFO` and below:
+ /// let level_filter = LevelFilter::INFO;
+ ///
+ /// // Combine the two filters together so that a span or event is only enabled
+ /// // if *both* filters would enable it:
+ /// let filter = And::new(level_filter, target_filter);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::info!("an event with an uninteresting target");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!(target: "interesting_target", "a very interesting event");
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::debug!(target: "interesting_target", "interesting debug event...");
+ /// ```
+ ///
+ /// [`Filter`]: crate::layer::Filter
+ pub(crate) fn new(a: A, b: B) -> Self {
+ Self {
+ a,
+ b,
+ _s: PhantomData,
+ }
+ }
+}
+
+impl<A, B, S> Filter<S> for And<A, B, S>
+where
+ A: Filter<S>,
+ B: Filter<S>,
+{
+ #[inline]
+ fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
+ self.a.enabled(meta, cx) && self.b.enabled(meta, cx)
+ }
+
+ fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
+ let a = self.a.callsite_enabled(meta);
+ if a.is_never() {
+ return a;
+ }
+
+ let b = self.b.callsite_enabled(meta);
+
+ if !b.is_always() {
+ return b;
+ }
+
+ a
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ // If either hint is `None`, return `None`. Otherwise, return the most restrictive.
+ cmp::min(self.a.max_level_hint(), self.b.max_level_hint())
+ }
+
+ #[inline]
+ fn event_enabled(&self, event: &tracing_core::Event<'_>, cx: &Context<'_, S>) -> bool {
+ self.a.event_enabled(event, cx) && self.b.event_enabled(event, cx)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_new_span(attrs, id, ctx.clone());
+ self.b.on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, id: &Id, values: &Record<'_>, ctx: Context<'_, S>) {
+ self.a.on_record(id, values, ctx.clone());
+ self.b.on_record(id, values, ctx);
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_enter(id, ctx.clone());
+ self.b.on_enter(id, ctx);
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_exit(id, ctx.clone());
+ self.b.on_exit(id, ctx);
+ }
+
+ #[inline]
+ fn on_close(&self, id: Id, ctx: Context<'_, S>) {
+ self.a.on_close(id.clone(), ctx.clone());
+ self.b.on_close(id, ctx);
+ }
+}
+
+impl<A, B, S> Clone for And<A, B, S>
+where
+ A: Clone,
+ B: Clone,
+{
+ fn clone(&self) -> Self {
+ Self {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ _s: PhantomData,
+ }
+ }
+}
+
+impl<A, B, S> fmt::Debug for And<A, B, S>
+where
+ A: fmt::Debug,
+ B: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("And")
+ .field("a", &self.a)
+ .field("b", &self.b)
+ .finish()
+ }
+}
+
+// === impl Or ===
+
+impl<A, B, S> Or<A, B, S>
+where
+ A: Filter<S>,
+ B: Filter<S>,
+{
+ /// Combines two [`Filter`]s so that spans and events are enabled if *either* filter
+ /// returns `true`.
+ ///
+ /// # Examples
+ ///
+ /// Enabling spans and events at the `INFO` level and above, and all spans
+ /// and events with a particular target:
+ ///
+ /// ```ignore
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, combinator::Or},
+ /// prelude::*,
+ /// };
+ ///
+ /// // Enables spans and events with targets starting with `interesting_target`:
+ /// let target_filter = filter_fn(|meta| {
+ /// meta.target().starts_with("interesting_target")
+ /// });
+ ///
+ /// // Enables spans and events with levels `INFO` and below:
+ /// let level_filter = LevelFilter::INFO;
+ ///
+ /// // Combine the two filters together so that a span or event is enabled
+ /// // if it is at INFO or lower, or if it has a target starting with
+ /// // `interesting_target`.
+ /// let filter = Or::new(level_filter, target_filter);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::debug!("an uninteresting event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!("an uninteresting INFO event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!(target: "interesting_target", "a very interesting event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::debug!(target: "interesting_target", "interesting debug event...");
+ /// ```
+ ///
+ /// Enabling a higher level for a particular target by using `Or` in
+ /// conjunction with the [`And`] combinator:
+ ///
+ /// ```ignore
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, combinator},
+ /// prelude::*,
+ /// };
+ ///
+ /// // This filter will enable spans and events with targets beginning with
+ /// // `my_crate`:
+ /// let my_crate = filter_fn(|meta| {
+ /// meta.target().starts_with("my_crate")
+ /// });
+ ///
+ /// // Combine the `my_crate` filter with a `LevelFilter` to produce a filter
+ /// // that will enable the `INFO` level and lower for spans and events with
+ /// // `my_crate` targets:
+ /// let filter = combinator::And::new(my_crate, LevelFilter::INFO);
+ ///
+ /// // If a span or event *doesn't* have a target beginning with
+ /// // `my_crate`, enable it if it has the `WARN` level or lower:
+ /// // let filter = combinator::Or::new(filter, LevelFilter::WARN);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ /// ```
+ ///
+ /// [`Filter`]: crate::layer::Filter
+ pub(crate) fn new(a: A, b: B) -> Self {
+ Self {
+ a,
+ b,
+ _s: PhantomData,
+ }
+ }
+}
+
+impl<A, B, S> Filter<S> for Or<A, B, S>
+where
+ A: Filter<S>,
+ B: Filter<S>,
+{
+ #[inline]
+ fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
+ self.a.enabled(meta, cx) || self.b.enabled(meta, cx)
+ }
+
+ fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
+ let a = self.a.callsite_enabled(meta);
+ let b = self.b.callsite_enabled(meta);
+
+ // If either filter will always enable the span or event, return `always`.
+ if a.is_always() || b.is_always() {
+ return Interest::always();
+ }
+
+ // Okay, if either filter will sometimes enable the span or event,
+ // return `sometimes`.
+ if a.is_sometimes() || b.is_sometimes() {
+ return Interest::sometimes();
+ }
+
+ debug_assert!(
+ a.is_never() && b.is_never(),
+ "if neither filter was `always` or `sometimes`, both must be `never` (a={:?}; b={:?})",
+ a,
+ b,
+ );
+ Interest::never()
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ // If either hint is `None`, return `None`. Otherwise, return the less restrictive.
+ Some(cmp::max(self.a.max_level_hint()?, self.b.max_level_hint()?))
+ }
+
+ #[inline]
+ fn event_enabled(&self, event: &tracing_core::Event<'_>, cx: &Context<'_, S>) -> bool {
+ self.a.event_enabled(event, cx) || self.b.event_enabled(event, cx)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_new_span(attrs, id, ctx.clone());
+ self.b.on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, id: &Id, values: &Record<'_>, ctx: Context<'_, S>) {
+ self.a.on_record(id, values, ctx.clone());
+ self.b.on_record(id, values, ctx);
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_enter(id, ctx.clone());
+ self.b.on_enter(id, ctx);
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_exit(id, ctx.clone());
+ self.b.on_exit(id, ctx);
+ }
+
+ #[inline]
+ fn on_close(&self, id: Id, ctx: Context<'_, S>) {
+ self.a.on_close(id.clone(), ctx.clone());
+ self.b.on_close(id, ctx);
+ }
+}
+
+impl<A, B, S> Clone for Or<A, B, S>
+where
+ A: Clone,
+ B: Clone,
+{
+ fn clone(&self) -> Self {
+ Self {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ _s: PhantomData,
+ }
+ }
+}
+
+impl<A, B, S> fmt::Debug for Or<A, B, S>
+where
+ A: fmt::Debug,
+ B: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Or")
+ .field("a", &self.a)
+ .field("b", &self.b)
+ .finish()
+ }
+}
+
+// === impl Not ===
+
+impl<A, S> Not<A, S>
+where
+ A: Filter<S>,
+{
+ /// Inverts the result of a [`Filter`].
+ ///
+ /// If the wrapped filter would enable a span or event, it will be disabled. If
+ /// it would disable a span or event, that span or event will be enabled.
+ ///
+ /// This inverts the values returned by the [`enabled`] and [`callsite_enabled`]
+ /// methods on the wrapped filter; it does *not* invert [`event_enabled`], as
+ /// filters which do not implement filtering on event field values will return
+ /// the default `true` even for events that their [`enabled`] method disables.
+ ///
+ /// Consider a normal filter defined as:
+ ///
+ /// ```ignore (pseudo-code)
+ /// // for spans
+ /// match callsite_enabled() {
+ /// ALWAYS => on_span(),
+ /// SOMETIMES => if enabled() { on_span() },
+ /// NEVER => (),
+ /// }
+ /// // for events
+ /// match callsite_enabled() {
+ /// ALWAYS => on_event(),
+ /// SOMETIMES => if enabled() && event_enabled() { on_event() },
+ /// NEVER => (),
+ /// }
+ /// ```
+ ///
+ /// and an inverted filter defined as:
+ ///
+ /// ```ignore (pseudo-code)
+ /// // for spans
+ /// match callsite_enabled() {
+ /// ALWAYS => (),
+ /// SOMETIMES => if !enabled() { on_span() },
+ /// NEVER => on_span(),
+ /// }
+ /// // for events
+ /// match callsite_enabled() {
+ /// ALWAYS => (),
+ /// SOMETIMES => if !enabled() { on_event() },
+ /// NEVER => on_event(),
+ /// }
+ /// ```
+ ///
+ /// A proper inversion would do `!(enabled() && event_enabled())` (or
+ /// `!enabled() || !event_enabled()`), but because of the implicit `&&`
+ /// relation between `enabled` and `event_enabled`, it is difficult to
+ /// short circuit and not call the wrapped `event_enabled`.
+ ///
+ /// A combinator which remembers the result of `enabled` in order to call
+ /// `event_enabled` only when `enabled() == true` is possible, but requires
+ /// additional thread-local mutable state to support a very niche use case.
+ //
+ // Also, it'd mean the wrapped layer's `enabled()` always gets called and
+ // globally applied to events where it doesn't today, since we can't know
+ // what `event_enabled` will say until we have the event to call it with.
+ ///
+ /// [`Filter`]: crate::layer::Filter
+ /// [`enabled`]: crate::layer::Filter::enabled
+ /// [`event_enabled`]: crate::layer::Filter::event_enabled
+ /// [`callsite_enabled`]: crate::layer::Filter::callsite_enabled
+ pub(crate) fn new(a: A) -> Self {
+ Self { a, _s: PhantomData }
+ }
+}
+
+impl<A, S> Filter<S> for Not<A, S>
+where
+ A: Filter<S>,
+{
+ #[inline]
+ fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
+ !self.a.enabled(meta, cx)
+ }
+
+ fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
+ match self.a.callsite_enabled(meta) {
+ i if i.is_always() => Interest::never(),
+ i if i.is_never() => Interest::always(),
+ _ => Interest::sometimes(),
+ }
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ // TODO(eliza): figure this out???
+ None
+ }
+
+ #[inline]
+ fn event_enabled(&self, event: &tracing_core::Event<'_>, cx: &Context<'_, S>) -> bool {
+ // Never disable based on event_enabled; we "disabled" it in `enabled`,
+ // so the `not` has already been applied and filtered this not out.
+ let _ = (event, cx);
+ true
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_new_span(attrs, id, ctx);
+ }
+
+ #[inline]
+ fn on_record(&self, id: &Id, values: &Record<'_>, ctx: Context<'_, S>) {
+ self.a.on_record(id, values, ctx.clone());
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_enter(id, ctx);
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &Id, ctx: Context<'_, S>) {
+ self.a.on_exit(id, ctx);
+ }
+
+ #[inline]
+ fn on_close(&self, id: Id, ctx: Context<'_, S>) {
+ self.a.on_close(id, ctx);
+ }
+}
+
+impl<A, S> Clone for Not<A, S>
+where
+ A: Clone,
+{
+ fn clone(&self) -> Self {
+ Self {
+ a: self.a.clone(),
+ _s: PhantomData,
+ }
+ }
+}
+
+impl<A, S> fmt::Debug for Not<A, S>
+where
+ A: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Not").field(&self.a).finish()
+ }
+}
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/layer_filters.rs b/vendor/tracing-subscriber/src/filter/layer_filters/mod.rs
index e77fd3751..e50ee6f00 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/layer_filters.rs
+++ b/vendor/tracing-subscriber/src/filter/layer_filters/mod.rs
@@ -37,14 +37,16 @@ use std::{
cell::{Cell, RefCell},
fmt,
marker::PhantomData,
+ ops::Deref,
sync::Arc,
thread_local,
};
use tracing_core::{
span,
subscriber::{Interest, Subscriber},
- Event, Metadata,
+ Dispatch, Event, Metadata,
};
+pub mod combinator;
/// A [`Layer`] that wraps an inner [`Layer`] and adds a [`Filter`] which
/// controls what spans and events are enabled for that layer.
@@ -158,7 +160,288 @@ thread_local! {
pub(crate) static FILTERING: FilterState = FilterState::new();
}
+/// Extension trait adding [combinators] for combining [`Filter`].
+///
+/// [combinators]: crate::filter::combinator
+/// [`Filter`]: crate::layer::Filter
+pub trait FilterExt<S>: layer::Filter<S> {
+ /// Combines this [`Filter`] with another [`Filter`] s so that spans and
+ /// events are enabled if and only if *both* filters return `true`.
+ ///
+ /// # Examples
+ ///
+ /// Enabling spans or events if they have both a particular target *and* are
+ /// above a certain level:
+ ///
+ /// ```
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, FilterExt},
+ /// prelude::*,
+ /// };
+ ///
+ /// // Enables spans and events with targets starting with `interesting_target`:
+ /// let target_filter = filter_fn(|meta| {
+ /// meta.target().starts_with("interesting_target")
+ /// });
+ ///
+ /// // Enables spans and events with levels `INFO` and below:
+ /// let level_filter = LevelFilter::INFO;
+ ///
+ /// // Combine the two filters together, returning a filter that only enables
+ /// // spans and events that *both* filters will enable:
+ /// let filter = target_filter.and(level_filter);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::info!("an event with an uninteresting target");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!(target: "interesting_target", "a very interesting event");
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::debug!(target: "interesting_target", "interesting debug event...");
+ /// ```
+ ///
+ /// [`Filter`]: crate::layer::Filter
+ fn and<B>(self, other: B) -> combinator::And<Self, B, S>
+ where
+ Self: Sized,
+ B: layer::Filter<S>,
+ {
+ combinator::And::new(self, other)
+ }
+
+ /// Combines two [`Filter`]s so that spans and events are enabled if *either* filter
+ /// returns `true`.
+ ///
+ /// # Examples
+ ///
+ /// Enabling spans and events at the `INFO` level and above, and all spans
+ /// and events with a particular target:
+ /// ```
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, FilterExt},
+ /// prelude::*,
+ /// };
+ ///
+ /// // Enables spans and events with targets starting with `interesting_target`:
+ /// let target_filter = filter_fn(|meta| {
+ /// meta.target().starts_with("interesting_target")
+ /// });
+ ///
+ /// // Enables spans and events with levels `INFO` and below:
+ /// let level_filter = LevelFilter::INFO;
+ ///
+ /// // Combine the two filters together so that a span or event is enabled
+ /// // if it is at INFO or lower, or if it has a target starting with
+ /// // `interesting_target`.
+ /// let filter = level_filter.or(target_filter);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ ///
+ /// // This event will *not* be enabled:
+ /// tracing::debug!("an uninteresting event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!("an uninteresting INFO event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::info!(target: "interesting_target", "a very interesting event");
+ ///
+ /// // This event *will* be enabled:
+ /// tracing::debug!(target: "interesting_target", "interesting debug event...");
+ /// ```
+ ///
+ /// Enabling a higher level for a particular target by using `or` in
+ /// conjunction with the [`and`] combinator:
+ ///
+ /// ```
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, FilterExt},
+ /// prelude::*,
+ /// };
+ ///
+ /// // This filter will enable spans and events with targets beginning with
+ /// // `my_crate`:
+ /// let my_crate = filter_fn(|meta| {
+ /// meta.target().starts_with("my_crate")
+ /// });
+ ///
+ /// let filter = my_crate
+ /// // Combine the `my_crate` filter with a `LevelFilter` to produce a
+ /// // filter that will enable the `INFO` level and lower for spans and
+ /// // events with `my_crate` targets:
+ /// .and(LevelFilter::INFO)
+ /// // If a span or event *doesn't* have a target beginning with
+ /// // `my_crate`, enable it if it has the `WARN` level or lower:
+ /// .or(LevelFilter::WARN);
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ /// ```
+ ///
+ /// [`Filter`]: crate::layer::Filter
+ /// [`and`]: FilterExt::and
+ fn or<B>(self, other: B) -> combinator::Or<Self, B, S>
+ where
+ Self: Sized,
+ B: layer::Filter<S>,
+ {
+ combinator::Or::new(self, other)
+ }
+
+ /// Inverts `self`, returning a filter that enables spans and events only if
+ /// `self` would *not* enable them.
+ ///
+ /// This inverts the values returned by the [`enabled`] and [`callsite_enabled`]
+ /// methods on the wrapped filter; it does *not* invert [`event_enabled`], as
+ /// filters which do not implement filtering on event field values will return
+ /// the default `true` even for events that their [`enabled`] method disables.
+ ///
+ /// Consider a normal filter defined as:
+ ///
+ /// ```ignore (pseudo-code)
+ /// // for spans
+ /// match callsite_enabled() {
+ /// ALWAYS => on_span(),
+ /// SOMETIMES => if enabled() { on_span() },
+ /// NEVER => (),
+ /// }
+ /// // for events
+ /// match callsite_enabled() {
+ /// ALWAYS => on_event(),
+ /// SOMETIMES => if enabled() && event_enabled() { on_event() },
+ /// NEVER => (),
+ /// }
+ /// ```
+ ///
+ /// and an inverted filter defined as:
+ ///
+ /// ```ignore (pseudo-code)
+ /// // for spans
+ /// match callsite_enabled() {
+ /// ALWAYS => (),
+ /// SOMETIMES => if !enabled() { on_span() },
+ /// NEVER => on_span(),
+ /// }
+ /// // for events
+ /// match callsite_enabled() {
+ /// ALWAYS => (),
+ /// SOMETIMES => if !enabled() { on_event() },
+ /// NEVER => on_event(),
+ /// }
+ /// ```
+ ///
+ /// A proper inversion would do `!(enabled() && event_enabled())` (or
+ /// `!enabled() || !event_enabled()`), but because of the implicit `&&`
+ /// relation between `enabled` and `event_enabled`, it is difficult to
+ /// short circuit and not call the wrapped `event_enabled`.
+ ///
+ /// A combinator which remembers the result of `enabled` in order to call
+ /// `event_enabled` only when `enabled() == true` is possible, but requires
+ /// additional thread-local mutable state to support a very niche use case.
+ //
+ // Also, it'd mean the wrapped layer's `enabled()` always gets called and
+ // globally applied to events where it doesn't today, since we can't know
+ // what `event_enabled` will say until we have the event to call it with.
+ ///
+ /// [`Filter`]: crate::subscribe::Filter
+ /// [`enabled`]: crate::subscribe::Filter::enabled
+ /// [`event_enabled`]: crate::subscribe::Filter::event_enabled
+ /// [`callsite_enabled`]: crate::subscribe::Filter::callsite_enabled
+ fn not(self) -> combinator::Not<Self, S>
+ where
+ Self: Sized,
+ {
+ combinator::Not::new(self)
+ }
+
+ /// [Boxes] `self`, erasing its concrete type.
+ ///
+ /// This is equivalent to calling [`Box::new`], but in method form, so that
+ /// it can be used when chaining combinator methods.
+ ///
+ /// # Examples
+ ///
+ /// When different combinations of filters are used conditionally, they may
+ /// have different types. For example, the following code won't compile,
+ /// since the `if` and `else` clause produce filters of different types:
+ ///
+ /// ```compile_fail
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, FilterExt},
+ /// prelude::*,
+ /// };
+ ///
+ /// let enable_bar_target: bool = // ...
+ /// # false;
+ ///
+ /// let filter = if enable_bar_target {
+ /// filter_fn(|meta| meta.target().starts_with("foo"))
+ /// // If `enable_bar_target` is true, add a `filter_fn` enabling
+ /// // spans and events with the target `bar`:
+ /// .or(filter_fn(|meta| meta.target().starts_with("bar")))
+ /// .and(LevelFilter::INFO)
+ /// } else {
+ /// filter_fn(|meta| meta.target().starts_with("foo"))
+ /// .and(LevelFilter::INFO)
+ /// };
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ /// ```
+ ///
+ /// By using `boxed`, the types of the two different branches can be erased,
+ /// so the assignment to the `filter` variable is valid (as both branches
+ /// have the type `Box<dyn Filter<S> + Send + Sync + 'static>`). The
+ /// following code *does* compile:
+ ///
+ /// ```
+ /// use tracing_subscriber::{
+ /// filter::{filter_fn, LevelFilter, FilterExt},
+ /// prelude::*,
+ /// };
+ ///
+ /// let enable_bar_target: bool = // ...
+ /// # false;
+ ///
+ /// let filter = if enable_bar_target {
+ /// filter_fn(|meta| meta.target().starts_with("foo"))
+ /// .or(filter_fn(|meta| meta.target().starts_with("bar")))
+ /// .and(LevelFilter::INFO)
+ /// // Boxing the filter erases its type, so both branches now
+ /// // have the same type.
+ /// .boxed()
+ /// } else {
+ /// filter_fn(|meta| meta.target().starts_with("foo"))
+ /// .and(LevelFilter::INFO)
+ /// .boxed()
+ /// };
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(tracing_subscriber::fmt::layer().with_filter(filter))
+ /// .init();
+ /// ```
+ ///
+ /// [Boxes]: std::boxed
+ /// [`Box::new`]: std::boxed::Box::new
+ fn boxed(self) -> Box<dyn layer::Filter<S> + Send + Sync + 'static>
+ where
+ Self: Sized + Send + Sync + 'static,
+ {
+ Box::new(self)
+ }
+}
+
// === impl Filter ===
+
#[cfg(feature = "registry")]
#[cfg_attr(docsrs, doc(cfg(feature = "registry")))]
impl<S> layer::Filter<S> for LevelFilter {
@@ -179,38 +462,35 @@ impl<S> layer::Filter<S> for LevelFilter {
}
}
-impl<S> layer::Filter<S> for Arc<dyn layer::Filter<S> + Send + Sync + 'static> {
- #[inline]
- fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
- (**self).enabled(meta, cx)
- }
+macro_rules! filter_impl_body {
+ () => {
+ #[inline]
+ fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
+ self.deref().enabled(meta, cx)
+ }
- #[inline]
- fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
- (**self).callsite_enabled(meta)
- }
+ #[inline]
+ fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
+ self.deref().callsite_enabled(meta)
+ }
- #[inline]
- fn max_level_hint(&self) -> Option<LevelFilter> {
- (**self).max_level_hint()
- }
+ #[inline]
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ self.deref().max_level_hint()
+ }
+ };
}
-impl<S> layer::Filter<S> for Box<dyn layer::Filter<S> + Send + Sync + 'static> {
- #[inline]
- fn enabled(&self, meta: &Metadata<'_>, cx: &Context<'_, S>) -> bool {
- (**self).enabled(meta, cx)
- }
-
- #[inline]
- fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
- (**self).callsite_enabled(meta)
- }
+#[cfg(feature = "registry")]
+#[cfg_attr(docsrs, doc(cfg(feature = "registry")))]
+impl<S> layer::Filter<S> for Arc<dyn layer::Filter<S> + Send + Sync + 'static> {
+ filter_impl_body!();
+}
- #[inline]
- fn max_level_hint(&self) -> Option<LevelFilter> {
- (**self).max_level_hint()
- }
+#[cfg(feature = "registry")]
+#[cfg_attr(docsrs, doc(cfg(feature = "registry")))]
+impl<S> layer::Filter<S> for Box<dyn layer::Filter<S> + Send + Sync + 'static> {
+ filter_impl_body!();
}
// === impl Filtered ===
@@ -247,6 +527,78 @@ impl<L, F, S> Filtered<L, F, S> {
fn did_enable(&self, f: impl FnOnce()) {
FILTERING.with(|filtering| filtering.did_enable(self.id(), f))
}
+
+ /// Borrows the [`Filter`](crate::layer::Filter) used by this layer.
+ pub fn filter(&self) -> &F {
+ &self.filter
+ }
+
+ /// Mutably borrows the [`Filter`](crate::layer::Filter) used by this layer.
+ ///
+ /// When this layer can be mutably borrowed, this may be used to mutate the filter.
+ /// Generally, this will primarily be used with the
+ /// [`reload::Handle::modify`](crate::reload::Handle::modify) method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use tracing::info;
+ /// # use tracing_subscriber::{filter,fmt,reload,Registry,prelude::*};
+ /// # fn main() {
+ /// let filtered_layer = fmt::Layer::default().with_filter(filter::LevelFilter::WARN);
+ /// let (filtered_layer, reload_handle) = reload::Layer::new(filtered_layer);
+ /// #
+ /// # // specifying the Registry type is required
+ /// # let _: &reload::Handle<filter::Filtered<fmt::Layer<Registry>,
+ /// # filter::LevelFilter, Registry>,Registry>
+ /// # = &reload_handle;
+ /// #
+ /// info!("This will be ignored");
+ /// reload_handle.modify(|layer| *layer.filter_mut() = filter::LevelFilter::INFO);
+ /// info!("This will be logged");
+ /// # }
+ /// ```
+ pub fn filter_mut(&mut self) -> &mut F {
+ &mut self.filter
+ }
+
+ /// Borrows the inner [`Layer`] wrapped by this `Filtered` layer.
+ pub fn inner(&self) -> &L {
+ &self.layer
+ }
+
+ /// Mutably borrows the inner [`Layer`] wrapped by this `Filtered` layer.
+ ///
+ /// This method is primarily expected to be used with the
+ /// [`reload::Handle::modify`](crate::reload::Handle::modify) method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use tracing::info;
+ /// # use tracing_subscriber::{filter,fmt,reload,Registry,prelude::*};
+ /// # fn non_blocking<T: std::io::Write>(writer: T) -> (fn() -> std::io::Stdout) {
+ /// # std::io::stdout
+ /// # }
+ /// # fn main() {
+ /// let filtered_layer = fmt::layer().with_writer(non_blocking(std::io::stderr())).with_filter(filter::LevelFilter::INFO);
+ /// let (filtered_layer, reload_handle) = reload::Layer::new(filtered_layer);
+ /// #
+ /// # // specifying the Registry type is required
+ /// # let _: &reload::Handle<filter::Filtered<fmt::Layer<Registry, _, _, fn() -> std::io::Stdout>,
+ /// # filter::LevelFilter, Registry>, Registry>
+ /// # = &reload_handle;
+ /// #
+ /// info!("This will be logged to stderr");
+ /// reload_handle.modify(|layer| *layer.inner_mut().writer_mut() = non_blocking(std::io::stdout()));
+ /// info!("This will be logged to stdout");
+ /// # }
+ /// ```
+ ///
+ /// [subscriber]: Subscribe
+ pub fn inner_mut(&mut self) -> &mut L {
+ &mut self.layer
+ }
}
impl<S, L, F> Layer<S> for Filtered<L, F, S>
@@ -255,6 +607,10 @@ where
F: layer::Filter<S> + 'static,
L: Layer<S>,
{
+ fn on_register_dispatch(&self, collector: &Dispatch) {
+ self.layer.on_register_dispatch(collector);
+ }
+
fn on_layer(&mut self, subscriber: &mut S) {
self.id = MagicPlfDowncastMarker(subscriber.register_filter());
self.layer.on_layer(subscriber);
@@ -322,7 +678,9 @@ where
fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, cx: Context<'_, S>) {
self.did_enable(|| {
- self.layer.on_new_span(attrs, id, cx.with_filter(self.id()));
+ let cx = cx.with_filter(self.id());
+ self.filter.on_new_span(attrs, id, cx.clone());
+ self.layer.on_new_span(attrs, id, cx);
})
}
@@ -333,6 +691,7 @@ where
fn on_record(&self, span: &span::Id, values: &span::Record<'_>, cx: Context<'_, S>) {
if let Some(cx) = cx.if_enabled_for(span, self.id()) {
+ self.filter.on_record(span, values, cx.clone());
self.layer.on_record(span, values, cx)
}
}
@@ -345,6 +704,22 @@ where
}
}
+ fn event_enabled(&self, event: &Event<'_>, cx: Context<'_, S>) -> bool {
+ let cx = cx.with_filter(self.id());
+ let enabled = FILTERING
+ .with(|filtering| filtering.and(self.id(), || self.filter.event_enabled(event, &cx)));
+
+ if enabled {
+ // If the filter enabled this event, ask the wrapped subscriber if
+ // _it_ wants it --- it might have a global filter.
+ self.layer.event_enabled(event, cx)
+ } else {
+ // Otherwise, return `true`. See the comment in `enabled` for why this
+ // is necessary.
+ true
+ }
+ }
+
fn on_event(&self, event: &Event<'_>, cx: Context<'_, S>) {
self.did_enable(|| {
self.layer.on_event(event, cx.with_filter(self.id()));
@@ -353,19 +728,22 @@ where
fn on_enter(&self, id: &span::Id, cx: Context<'_, S>) {
if let Some(cx) = cx.if_enabled_for(id, self.id()) {
- self.layer.on_enter(id, cx)
+ self.filter.on_enter(id, cx.clone());
+ self.layer.on_enter(id, cx);
}
}
fn on_exit(&self, id: &span::Id, cx: Context<'_, S>) {
if let Some(cx) = cx.if_enabled_for(id, self.id()) {
- self.layer.on_exit(id, cx)
+ self.filter.on_exit(id, cx.clone());
+ self.layer.on_exit(id, cx);
}
}
fn on_close(&self, id: span::Id, cx: Context<'_, S>) {
if let Some(cx) = cx.if_enabled_for(&id, self.id()) {
- self.layer.on_close(id, cx)
+ self.filter.on_close(id.clone(), cx.clone());
+ self.layer.on_close(id, cx);
}
}
@@ -544,6 +922,10 @@ impl fmt::Binary for FilterId {
}
}
+// === impl FilterExt ===
+
+impl<F, S> FilterExt<S> for F where F: layer::Filter<S> {}
+
// === impl FilterMap ===
impl FilterMap {
@@ -709,6 +1091,14 @@ impl FilterState {
}
}
+ /// Run a second filtering pass, e.g. for Subscribe::event_enabled.
+ fn and(&self, filter: FilterId, f: impl FnOnce() -> bool) -> bool {
+ let map = self.enabled.get();
+ let enabled = map.is_enabled(filter) && f();
+ self.enabled.set(map.set(filter, enabled));
+ enabled
+ }
+
/// Clears the current in-progress filter state.
///
/// This resets the [`FilterMap`] and current [`Interest`] as well as
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/level.rs b/vendor/tracing-subscriber/src/filter/level.rs
index 0fa601260..0fa601260 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/level.rs
+++ b/vendor/tracing-subscriber/src/filter/level.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/mod.rs b/vendor/tracing-subscriber/src/filter/mod.rs
index 000a27195..000a27195 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/mod.rs
+++ b/vendor/tracing-subscriber/src/filter/mod.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/filter/targets.rs b/vendor/tracing-subscriber/src/filter/targets.rs
index e0c7fcf82..e1407114b 100644
--- a/vendor/tracing-subscriber-0.3.3/src/filter/targets.rs
+++ b/vendor/tracing-subscriber/src/filter/targets.rs
@@ -20,7 +20,7 @@ use core::{
slice,
str::FromStr,
};
-use tracing_core::{Interest, Metadata, Subscriber};
+use tracing_core::{Interest, Level, Metadata, Subscriber};
/// A filter that enables or disables spans and events based on their [target]
/// and [level].
@@ -111,7 +111,7 @@ use tracing_core::{Interest, Metadata, Subscriber};
/// by the user at runtime.
///
/// The `Targets` filter can be used as a [per-layer filter][plf] *and* as a
-/// [global filter]:
+/// [global filter][global]:
///
/// ```rust
/// use tracing_subscriber::{
@@ -277,6 +277,62 @@ impl Targets {
self
}
+ /// Returns the default level for this filter, if one is set.
+ ///
+ /// The default level is used to filter any spans or events with targets
+ /// that do not match any of the configured set of prefixes.
+ ///
+ /// The default level can be set for a filter either by using
+ /// [`with_default`](Self::with_default) or when parsing from a filter string that includes a
+ /// level without a target (e.g. `"trace"`).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use tracing_subscriber::filter::{LevelFilter, Targets};
+ ///
+ /// let filter = Targets::new().with_default(LevelFilter::INFO);
+ /// assert_eq!(filter.default_level(), Some(LevelFilter::INFO));
+ ///
+ /// let filter: Targets = "info".parse().unwrap();
+ /// assert_eq!(filter.default_level(), Some(LevelFilter::INFO));
+ /// ```
+ ///
+ /// The default level is `None` if no default is set:
+ ///
+ /// ```
+ /// use tracing_subscriber::filter::Targets;
+ ///
+ /// let filter = Targets::new();
+ /// assert_eq!(filter.default_level(), None);
+ ///
+ /// let filter: Targets = "my_crate=info".parse().unwrap();
+ /// assert_eq!(filter.default_level(), None);
+ /// ```
+ ///
+ /// Note that an unset default level (`None`) behaves like [`LevelFilter::OFF`] when the filter is
+ /// used, but it could also be set explicitly which may be useful to distinguish (such as when
+ /// merging multiple `Targets`).
+ ///
+ /// ```
+ /// use tracing_subscriber::filter::{LevelFilter, Targets};
+ ///
+ /// let filter = Targets::new().with_default(LevelFilter::OFF);
+ /// assert_eq!(filter.default_level(), Some(LevelFilter::OFF));
+ ///
+ /// let filter: Targets = "off".parse().unwrap();
+ /// assert_eq!(filter.default_level(), Some(LevelFilter::OFF));
+ /// ```
+ pub fn default_level(&self) -> Option<LevelFilter> {
+ self.0.directives().into_iter().find_map(|d| {
+ if d.target.is_none() {
+ Some(d.level)
+ } else {
+ None
+ }
+ })
+ }
+
/// Returns an iterator over the [target]-[`LevelFilter`] pairs in this filter.
///
/// The order of iteration is undefined.
@@ -313,6 +369,35 @@ impl Targets {
Interest::never()
}
}
+
+ /// Returns whether a [target]-[`Level`] pair would be enabled
+ /// by this `Targets`.
+ ///
+ /// This method can be used with [`module_path!`] from `std` as the target
+ /// in order to emulate the behavior of the [`tracing::event!`] and [`tracing::span!`]
+ /// macros.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use tracing_subscriber::filter::{Targets, LevelFilter};
+ /// use tracing_core::Level;
+ ///
+ /// let filter = Targets::new()
+ /// .with_target("my_crate", Level::INFO)
+ /// .with_target("my_crate::interesting_module", Level::DEBUG);
+ ///
+ /// assert!(filter.would_enable("my_crate", &Level::INFO));
+ /// assert!(!filter.would_enable("my_crate::interesting_module", &Level::TRACE));
+ /// ```
+ ///
+ /// [target]: tracing_core::Metadata::target
+ /// [`module_path!`]: std::module_path!
+ pub fn would_enable(&self, target: &str, level: &Level) -> bool {
+ // "Correct" to call because `Targets` only produces `StaticDirective`'s with NO
+ // fields
+ self.0.target_enabled(target, level)
+ }
}
impl<T, L> Extend<(T, L)> for Targets
@@ -657,6 +742,21 @@ mod tests {
}
#[test]
+ fn targets_default_level() {
+ let filter = expect_parse("crate1::mod1=error,crate1::mod2,crate2=debug,crate3=off");
+ assert_eq!(filter.default_level(), None);
+
+ let filter = expect_parse("crate1::mod1=error,crate1::mod2,crate2=debug,crate3=off")
+ .with_default(LevelFilter::OFF);
+ assert_eq!(filter.default_level(), Some(LevelFilter::OFF));
+
+ let filter = expect_parse("crate1::mod1=error,crate1::mod2,crate2=debug,crate3=off")
+ .with_default(LevelFilter::OFF)
+ .with_default(LevelFilter::INFO);
+ assert_eq!(filter.default_level(), Some(LevelFilter::INFO));
+ }
+
+ #[test]
// `println!` is only available with `libstd`.
#[cfg(feature = "std")]
fn size_of_filters() {
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/fmt_layer.rs b/vendor/tracing-subscriber/src/fmt/fmt_layer.rs
index 0e0d5e0eb..6e4e2ac0b 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/fmt_layer.rs
+++ b/vendor/tracing-subscriber/src/fmt/fmt_layer.rs
@@ -56,7 +56,7 @@ use tracing_core::{
/// # tracing::subscriber::set_global_default(subscriber).unwrap();
/// ```
///
-/// [`Layer`]: ../layer/trait.Layer.html
+/// [`Layer`]: super::layer::Layer
#[cfg_attr(docsrs, doc(cfg(all(feature = "fmt", feature = "std"))))]
#[derive(Debug)]
pub struct Layer<
@@ -70,11 +70,12 @@ pub struct Layer<
fmt_event: E,
fmt_span: format::FmtSpanConfig,
is_ansi: bool,
- _inner: PhantomData<S>,
+ log_internal_errors: bool,
+ _inner: PhantomData<fn(S)>,
}
impl<S> Layer<S> {
- /// Returns a new [`Layer`](struct.Layer.html) with the default configuration.
+ /// Returns a new [`Layer`][self::Layer] with the default configuration.
pub fn new() -> Self {
Self::default()
}
@@ -87,8 +88,8 @@ where
N: for<'writer> FormatFields<'writer> + 'static,
W: for<'writer> MakeWriter<'writer> + 'static,
{
- /// Sets the [event formatter][`FormatEvent`] that the layer will use to
- /// format events.
+ /// Sets the [event formatter][`FormatEvent`] that the layer being built will
+ /// use to format events.
///
/// The event formatter may be any type implementing the [`FormatEvent`]
/// trait, which is implemented for all functions taking a [`FmtContext`], a
@@ -108,7 +109,7 @@ where
/// ```
/// [`FormatEvent`]: format::FormatEvent
/// [`Event`]: tracing::Event
- /// [`Writer`]: crate::format::Writer
+ /// [`Writer`]: format::Writer
pub fn event_format<E2>(self, e: E2) -> Layer<S, N, E2, W>
where
E2: FormatEvent<S, N> + 'static,
@@ -119,6 +120,37 @@ where
fmt_span: self.fmt_span,
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
+ _inner: self._inner,
+ }
+ }
+
+ /// Updates the event formatter by applying a function to the existing event formatter.
+ ///
+ /// This sets the event formatter that the layer being built will use to record fields.
+ ///
+ /// # Examples
+ ///
+ /// Updating an event formatter:
+ ///
+ /// ```rust
+ /// let layer = tracing_subscriber::fmt::layer()
+ /// .map_event_format(|e| e.compact());
+ /// # // this is necessary for type inference.
+ /// # use tracing_subscriber::Layer as _;
+ /// # let _ = layer.with_subscriber(tracing_subscriber::registry::Registry::default());
+ /// ```
+ pub fn map_event_format<E2>(self, f: impl FnOnce(E) -> E2) -> Layer<S, N, E2, W>
+ where
+ E2: FormatEvent<S, N> + 'static,
+ {
+ Layer {
+ fmt_fields: self.fmt_fields,
+ fmt_event: f(self.fmt_event),
+ fmt_span: self.fmt_span,
+ make_writer: self.make_writer,
+ is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -126,7 +158,7 @@ where
// This needs to be a seperate impl block because they place different bounds on the type parameters.
impl<S, N, E, W> Layer<S, N, E, W> {
- /// Sets the [`MakeWriter`] that the [`Layer`] being built will use to write events.
+ /// Sets the [`MakeWriter`] that the layer being built will use to write events.
///
/// # Examples
///
@@ -142,9 +174,6 @@ impl<S, N, E, W> Layer<S, N, E, W> {
/// # use tracing_subscriber::Layer as _;
/// # let _ = layer.with_subscriber(tracing_subscriber::registry::Registry::default());
/// ```
- ///
- /// [`MakeWriter`]: ../fmt/trait.MakeWriter.html
- /// [`Layer`]: ../layer/trait.Layer.html
pub fn with_writer<W2>(self, make_writer: W2) -> Layer<S, N, E, W2>
where
W2: for<'writer> MakeWriter<'writer> + 'static,
@@ -154,12 +183,63 @@ impl<S, N, E, W> Layer<S, N, E, W> {
fmt_event: self.fmt_event,
fmt_span: self.fmt_span,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
make_writer,
_inner: self._inner,
}
}
- /// Configures the subscriber to support [`libtest`'s output capturing][capturing] when used in
+ /// Borrows the [writer] for this [`Layer`].
+ ///
+ /// [writer]: MakeWriter
+ pub fn writer(&self) -> &W {
+ &self.make_writer
+ }
+
+ /// Mutably borrows the [writer] for this [`Layer`].
+ ///
+ /// This method is primarily expected to be used with the
+ /// [`reload::Handle::modify`](crate::reload::Handle::modify) method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use tracing::info;
+ /// # use tracing_subscriber::{fmt,reload,Registry,prelude::*};
+ /// # fn non_blocking<T: std::io::Write>(writer: T) -> (fn() -> std::io::Stdout) {
+ /// # std::io::stdout
+ /// # }
+ /// # fn main() {
+ /// let layer = fmt::layer().with_writer(non_blocking(std::io::stderr()));
+ /// let (layer, reload_handle) = reload::Layer::new(layer);
+ /// #
+ /// # // specifying the Registry type is required
+ /// # let _: &reload::Handle<fmt::Layer<Registry, _, _, _>, Registry> = &reload_handle;
+ /// #
+ /// info!("This will be logged to stderr");
+ /// reload_handle.modify(|layer| *layer.writer_mut() = non_blocking(std::io::stdout()));
+ /// info!("This will be logged to stdout");
+ /// # }
+ /// ```
+ ///
+ /// [writer]: MakeWriter
+ pub fn writer_mut(&mut self) -> &mut W {
+ &mut self.make_writer
+ }
+
+ /// Sets whether this layer should use ANSI terminal formatting
+ /// escape codes (such as colors).
+ ///
+ /// This method is primarily expected to be used with the
+ /// [`reload::Handle::modify`](crate::reload::Handle::modify) method when changing
+ /// the writer.
+ #[cfg(feature = "ansi")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "ansi")))]
+ pub fn set_ansi(&mut self, ansi: bool) {
+ self.is_ansi = ansi;
+ }
+
+ /// Configures the layer to support [`libtest`'s output capturing][capturing] when used in
/// unit tests.
///
/// See [`TestWriter`] for additional details.
@@ -180,13 +260,14 @@ impl<S, N, E, W> Layer<S, N, E, W> {
/// ```
/// [capturing]:
/// https://doc.rust-lang.org/book/ch11-02-running-tests.html#showing-function-output
- /// [`TestWriter`]: writer/struct.TestWriter.html
+ /// [`TestWriter`]: super::writer::TestWriter
pub fn with_test_writer(self) -> Layer<S, N, E, TestWriter> {
Layer {
fmt_fields: self.fmt_fields,
fmt_event: self.fmt_event,
fmt_span: self.fmt_span,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
make_writer: TestWriter::default(),
_inner: self._inner,
}
@@ -201,6 +282,58 @@ impl<S, N, E, W> Layer<S, N, E, W> {
..self
}
}
+
+ /// Sets whether to write errors from [`FormatEvent`] to the writer.
+ /// Defaults to true.
+ ///
+ /// By default, `fmt::Layer` will write any `FormatEvent`-internal errors to
+ /// the writer. These errors are unlikely and will only occur if there is a
+ /// bug in the `FormatEvent` implementation or its dependencies.
+ ///
+ /// If writing to the writer fails, the error message is printed to stderr
+ /// as a fallback.
+ ///
+ /// [`FormatEvent`]: crate::fmt::FormatEvent
+ pub fn log_internal_errors(self, log_internal_errors: bool) -> Self {
+ Self {
+ log_internal_errors,
+ ..self
+ }
+ }
+
+ /// Updates the [`MakeWriter`] by applying a function to the existing [`MakeWriter`].
+ ///
+ /// This sets the [`MakeWriter`] that the layer being built will use to write events.
+ ///
+ /// # Examples
+ ///
+ /// Redirect output to stderr if level is <= WARN:
+ ///
+ /// ```rust
+ /// use tracing::Level;
+ /// use tracing_subscriber::fmt::{self, writer::MakeWriterExt};
+ ///
+ /// let stderr = std::io::stderr.with_max_level(Level::WARN);
+ /// let layer = fmt::layer()
+ /// .map_writer(move |w| stderr.or_else(w));
+ /// # // this is necessary for type inference.
+ /// # use tracing_subscriber::Layer as _;
+ /// # let _ = layer.with_subscriber(tracing_subscriber::registry::Registry::default());
+ /// ```
+ pub fn map_writer<W2>(self, f: impl FnOnce(W) -> W2) -> Layer<S, N, E, W2>
+ where
+ W2: for<'writer> MakeWriter<'writer> + 'static,
+ {
+ Layer {
+ fmt_fields: self.fmt_fields,
+ fmt_event: self.fmt_event,
+ fmt_span: self.fmt_span,
+ is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
+ make_writer: f(self.make_writer),
+ _inner: self._inner,
+ }
+ }
}
impl<S, N, L, T, W> Layer<S, N, format::Format<L, T>, W>
@@ -228,6 +361,7 @@ where
fmt_span: self.fmt_span,
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -240,6 +374,7 @@ where
fmt_span: self.fmt_span.without_time(),
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -284,7 +419,7 @@ where
/// `Layer`s added to this subscriber.
///
/// [lifecycle]: https://docs.rs/tracing/latest/tracing/span/index.html#the-span-lifecycle
- /// [time]: #method.without_time
+ /// [time]: Layer::without_time()
pub fn with_span_events(self, kind: FmtSpan) -> Self {
Layer {
fmt_span: self.fmt_span.with_kind(kind),
@@ -299,6 +434,30 @@ where
..self
}
}
+ /// Sets whether or not an event's [source code file path][file] is
+ /// displayed.
+ ///
+ /// [file]: tracing_core::Metadata::file
+ pub fn with_file(self, display_filename: bool) -> Layer<S, N, format::Format<L, T>, W> {
+ Layer {
+ fmt_event: self.fmt_event.with_file(display_filename),
+ ..self
+ }
+ }
+
+ /// Sets whether or not an event's [source code line number][line] is
+ /// displayed.
+ ///
+ /// [line]: tracing_core::Metadata::line
+ pub fn with_line_number(
+ self,
+ display_line_number: bool,
+ ) -> Layer<S, N, format::Format<L, T>, W> {
+ Layer {
+ fmt_event: self.fmt_event.with_line_number(display_line_number),
+ ..self
+ }
+ }
/// Sets whether or not an event's level is displayed.
pub fn with_level(self, display_level: bool) -> Layer<S, N, format::Format<L, T>, W> {
@@ -309,9 +468,9 @@ where
}
/// Sets whether or not the [thread ID] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [thread ID]: https://doc.rust-lang.org/stable/std/thread/struct.ThreadId.html
+ /// [thread ID]: std::thread::ThreadId
pub fn with_thread_ids(self, display_thread_ids: bool) -> Layer<S, N, format::Format<L, T>, W> {
Layer {
fmt_event: self.fmt_event.with_thread_ids(display_thread_ids),
@@ -320,9 +479,9 @@ where
}
/// Sets whether or not the [name] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [name]: https://doc.rust-lang.org/stable/std/thread/index.html#naming-threads
+ /// [name]: std::thread#naming-threads
pub fn with_thread_names(
self,
display_thread_names: bool,
@@ -333,7 +492,7 @@ where
}
}
- /// Sets the layer being built to use a [less verbose formatter](../fmt/format/struct.Compact.html).
+ /// Sets the layer being built to use a [less verbose formatter][super::format::Compact].
pub fn compact(self) -> Layer<S, N, format::Format<format::Compact, T>, W>
where
N: for<'writer> FormatFields<'writer> + 'static,
@@ -344,6 +503,7 @@ where
fmt_span: self.fmt_span,
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -358,11 +518,12 @@ where
fmt_span: self.fmt_span,
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
- /// Sets the layer being built to use a [JSON formatter](../fmt/format/struct.Json.html).
+ /// Sets the layer being built to use a [JSON formatter][super::format::Json].
///
/// The full format includes fields from all entered spans.
///
@@ -377,7 +538,7 @@ where
/// - [`Layer::flatten_event`] can be used to enable flattening event fields into the root
/// object.
///
- /// [`Layer::flatten_event`]: #method.flatten_event
+ /// [`Layer::flatten_event`]: Layer::flatten_event()
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn json(self) -> Layer<S, format::JsonFields, format::Format<format::Json, T>, W> {
@@ -388,6 +549,7 @@ where
make_writer: self.make_writer,
// always disable ANSI escapes in JSON mode!
is_ansi: false,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -398,7 +560,7 @@ where
impl<S, T, W> Layer<S, format::JsonFields, format::Format<format::Json, T>, W> {
/// Sets the JSON layer being built to flatten event metadata.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::format::Json]
pub fn flatten_event(
self,
flatten_event: bool,
@@ -413,7 +575,7 @@ impl<S, T, W> Layer<S, format::JsonFields, format::Format<format::Json, T>, W> {
/// Sets whether or not the formatter will include the current span in
/// formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::format::Json]
pub fn with_current_span(
self,
display_current_span: bool,
@@ -428,7 +590,7 @@ impl<S, T, W> Layer<S, format::JsonFields, format::Format<format::Json, T>, W> {
/// Sets whether or not the formatter will include a list (from root to leaf)
/// of all currently entered spans in formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::format::Json]
pub fn with_span_list(
self,
display_span_list: bool,
@@ -454,6 +616,38 @@ impl<S, N, E, W> Layer<S, N, E, W> {
fmt_span: self.fmt_span,
make_writer: self.make_writer,
is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
+ _inner: self._inner,
+ }
+ }
+
+ /// Updates the field formatter by applying a function to the existing field formatter.
+ ///
+ /// This sets the field formatter that the layer being built will use to record fields.
+ ///
+ /// # Examples
+ ///
+ /// Updating a field formatter:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::field::MakeExt;
+ /// let layer = tracing_subscriber::fmt::layer()
+ /// .map_fmt_fields(|f| f.debug_alt());
+ /// # // this is necessary for type inference.
+ /// # use tracing_subscriber::Layer as _;
+ /// # let _ = layer.with_subscriber(tracing_subscriber::registry::Registry::default());
+ /// ```
+ pub fn map_fmt_fields<N2>(self, f: impl FnOnce(N) -> N2) -> Layer<S, N2, E, W>
+ where
+ N2: for<'writer> FormatFields<'writer> + 'static,
+ {
+ Layer {
+ fmt_event: self.fmt_event,
+ fmt_fields: f(self.fmt_fields),
+ fmt_span: self.fmt_span,
+ make_writer: self.make_writer,
+ is_ansi: self.is_ansi,
+ log_internal_errors: self.log_internal_errors,
_inner: self._inner,
}
}
@@ -467,6 +661,7 @@ impl<S> Default for Layer<S> {
fmt_span: format::FmtSpanConfig::default(),
make_writer: io::stdout,
is_ansi: cfg!(feature = "ansi"),
+ log_internal_errors: false,
_inner: PhantomData,
}
}
@@ -497,7 +692,7 @@ where
/// formatters are in use, each can store its own formatted representation
/// without conflicting.
///
-/// [extensions]: ../registry/struct.Extensions.html
+/// [extensions]: crate::registry::Extensions
#[derive(Default)]
pub struct FormattedFields<E: ?Sized> {
_format_fields: PhantomData<fn(E)>,
@@ -586,6 +781,11 @@ where
{
fields.was_ansi = self.is_ansi;
extensions.insert(fields);
+ } else {
+ eprintln!(
+ "[tracing-subscriber] Unable to format the following event, ignoring: {:?}",
+ attrs
+ );
}
}
@@ -732,7 +932,20 @@ where
.is_ok()
{
let mut writer = self.make_writer.make_writer_for(event.metadata());
- let _ = io::Write::write_all(&mut writer, buf.as_bytes());
+ let res = io::Write::write_all(&mut writer, buf.as_bytes());
+ if self.log_internal_errors {
+ if let Err(e) = res {
+ eprintln!("[tracing-subscriber] Unable to write an event to the Writer for this Subscriber! Error: {}\n", e);
+ }
+ }
+ } else if self.log_internal_errors {
+ let err_msg = format!("Unable to format the following event. Name: {}; Fields: {:?}\n",
+ event.metadata().name(), event.fields());
+ let mut writer = self.make_writer.make_writer_for(event.metadata());
+ let res = io::Write::write_all(&mut writer, err_msg.as_bytes());
+ if let Err(e) = res {
+ eprintln!("[tracing-subscriber] Unable to write an \"event formatting error\" to the Writer for this Subscriber! Error: {}\n", e);
+ }
}
buf.clear();
@@ -821,7 +1034,7 @@ where
/// If this returns `None`, then no span exists for that ID (either it has
/// closed or the ID is invalid).
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
#[inline]
pub fn span(&self, id: &Id) -> Option<SpanRef<'_, S>>
where
@@ -844,7 +1057,7 @@ where
///
/// If this returns `None`, then we are not currently within a span.
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
#[inline]
pub fn lookup_current(&self) -> Option<SpanRef<'_, S>>
where
@@ -1030,6 +1243,60 @@ mod test {
}
#[test]
+ fn format_error_print_to_stderr() {
+ struct AlwaysError;
+
+ impl std::fmt::Debug for AlwaysError {
+ fn fmt(&self, _f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ Err(std::fmt::Error)
+ }
+ }
+
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_level(false)
+ .with_ansi(false)
+ .with_timer(MockTime)
+ .finish();
+
+ with_default(subscriber, || {
+ tracing::info!(?AlwaysError);
+ });
+ let actual = sanitize_timings(make_writer.get_string());
+
+ // Only assert the start because the line number and callsite may change.
+ let expected = concat!("Unable to format the following event. Name: event ", file!(), ":");
+ assert!(actual.as_str().starts_with(expected), "\nactual = {}\nshould start with expected = {}\n", actual, expected);
+ }
+
+ #[test]
+ fn format_error_ignore_if_log_internal_errors_is_false() {
+ struct AlwaysError;
+
+ impl std::fmt::Debug for AlwaysError {
+ fn fmt(&self, _f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ Err(std::fmt::Error)
+ }
+ }
+
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_level(false)
+ .with_ansi(false)
+ .with_timer(MockTime)
+ .log_internal_errors(false)
+ .finish();
+
+ with_default(subscriber, || {
+ tracing::info!(?AlwaysError);
+ });
+ let actual = sanitize_timings(make_writer.get_string());
+ assert_eq!("", actual.as_str());
+ }
+
+ #[test]
fn synthesize_span_none() {
let make_writer = MockMakeWriter::default();
let subscriber = crate::fmt::Subscriber::builder()
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/format/json.rs b/vendor/tracing-subscriber/src/fmt/format/json.rs
index cc86f03c7..c2f4d3755 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/format/json.rs
+++ b/vendor/tracing-subscriber/src/fmt/format/json.rs
@@ -23,25 +23,42 @@ use tracing_serde::AsSerde;
#[cfg(feature = "tracing-log")]
use tracing_log::NormalizeEvent;
-/// Marker for `Format` that indicates that the verbose JSON log format should be used.
+/// Marker for [`Format`] that indicates that the newline-delimited JSON log
+/// format should be used.
///
-/// The full format includes fields from all entered spans.
+/// This formatter is intended for production use with systems where structured
+/// logs are consumed as JSON by analysis and viewing tools. The JSON output is
+/// not optimized for human readability; instead, it should be pretty-printed
+/// using external JSON tools such as `jq`, or using a JSON log viewer.
///
/// # Example Output
///
-/// ```json
-/// {
-/// "timestamp":"Feb 20 11:28:15.096",
-/// "level":"INFO",
-/// "fields":{"message":"some message","key":"value"}
-/// "target":"mycrate",
-/// "span":{name":"leaf"},
-/// "spans":[{"name":"root"},{"name":"leaf"}],
-/// }
-/// ```
+/// <pre><font color="#4E9A06"><b>:;</b></font> <font color="#4E9A06">cargo</font> run --example fmt-json
+/// <font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 0.08s
+/// <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt-json`
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821315Z&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;preparing to shave yaks&quot;,&quot;number_of_yaks&quot;:3},&quot;target&quot;:&quot;fmt_json&quot;}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821422Z&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;shaving yaks&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821495Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:1,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821546Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaved successfully&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:1,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821598Z&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:1,&quot;shaved&quot;:true},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821637Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:1},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821684Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:2,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821727Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaved successfully&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:2,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821773Z&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:2,&quot;shaved&quot;:true},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821806Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:2},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821909Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:3,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.821956Z&quot;,&quot;level&quot;:&quot;WARN&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;could not locate yak&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:3,&quot;name&quot;:&quot;shave&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.822006Z&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:3,&quot;shaved&quot;:false},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.822041Z&quot;,&quot;level&quot;:&quot;ERROR&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;failed to shave yak&quot;,&quot;yak&quot;:3,&quot;error&quot;:&quot;missing yak&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.822079Z&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:2},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
+/// {&quot;timestamp&quot;:&quot;2022-02-15T18:47:10.822117Z&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaving completed&quot;,&quot;all_yaks_shaved&quot;:false},&quot;target&quot;:&quot;fmt_json&quot;}
+/// </pre>
///
/// # Options
///
+/// This formatter exposes additional options to configure the structure of the
+/// output JSON objects:
+///
/// - [`Json::flatten_event`] can be used to enable flattening event fields into
/// the root
/// - [`Json::with_current_span`] can be used to control logging of the current
@@ -52,9 +69,23 @@ use tracing_log::NormalizeEvent;
/// By default, event fields are not flattened, and both current span and span
/// list are logged.
///
-/// [`Json::flatten_event`]: #method.flatten_event
-/// [`Json::with_current_span`]: #method.with_current_span
-/// [`Json::with_span_list`]: #method.with_span_list
+/// # Valuable Support
+///
+/// Experimental support is available for using the [`valuable`] crate to record
+/// user-defined values as structured JSON. When the ["valuable" unstable
+/// feature][unstable] is enabled, types implementing [`valuable::Valuable`] will
+/// be recorded as structured JSON, rather than
+/// using their [`std::fmt::Debug`] implementations.
+///
+/// **Note**: This is an experimental feature. [Unstable features][unstable]
+/// must be enabled in order to use `valuable` support.
+///
+/// [`Json::flatten_event`]: Json::flatten_event()
+/// [`Json::with_current_span`]: Json::with_current_span()
+/// [`Json::with_span_list`]: Json::with_span_list()
+/// [`valuable`]: https://crates.io/crates/valuable
+/// [unstable]: crate#unstable-features
+/// [`valuable::Valuable`]: https://docs.rs/valuable/latest/valuable/trait.Valuable.html
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Json {
pub(crate) flatten_event: bool,
@@ -243,6 +274,18 @@ where
serializer.serialize_entry("target", meta.target())?;
}
+ if self.display_filename {
+ if let Some(filename) = meta.file() {
+ serializer.serialize_entry("filename", filename)?;
+ }
+ }
+
+ if self.display_line_number {
+ if let Some(line_number) = meta.line() {
+ serializer.serialize_entry("line_number", &line_number)?;
+ }
+ }
+
if self.format.display_current_span {
if let Some(ref span) = current_span {
serializer
@@ -298,7 +341,6 @@ impl Default for Json {
/// The JSON [`FormatFields`] implementation.
///
-/// [`FormatFields`]: trait.FormatFields.html
#[derive(Debug)]
pub struct JsonFields {
// reserve the ability to add fields to this without causing a breaking
@@ -309,7 +351,6 @@ pub struct JsonFields {
impl JsonFields {
/// Returns a new JSON [`FormatFields`] implementation.
///
- /// [`FormatFields`]: trait.FormatFields.html
pub fn new() -> Self {
Self { _private: () }
}
@@ -378,9 +419,8 @@ impl<'a> FormatFields<'a> for JsonFields {
/// The [visitor] produced by [`JsonFields`]'s [`MakeVisitor`] implementation.
///
-/// [visitor]: ../../field/trait.Visit.html
-/// [`JsonFields`]: struct.JsonFields.html
-/// [`MakeVisitor`]: ../../field/trait.MakeVisitor.html
+/// [visitor]: crate::field::Visit
+/// [`MakeVisitor`]: crate::field::MakeVisitor
pub struct JsonVisitor<'a> {
values: BTreeMap<&'a str, serde_json::Value>,
writer: &'a mut dyn Write,
@@ -435,6 +475,26 @@ impl<'a> crate::field::VisitOutput<fmt::Result> for JsonVisitor<'a> {
}
impl<'a> field::Visit for JsonVisitor<'a> {
+ #[cfg(all(tracing_unstable, feature = "valuable"))]
+ fn record_value(&mut self, field: &Field, value: valuable_crate::Value<'_>) {
+ let value = match serde_json::to_value(valuable_serde::Serializable::new(value)) {
+ Ok(value) => value,
+ Err(_e) => {
+ #[cfg(debug_assertions)]
+ unreachable!(
+ "`valuable::Valuable` implementations should always serialize \
+ successfully, but an error occurred: {}",
+ _e,
+ );
+
+ #[cfg(not(debug_assertions))]
+ return;
+ }
+ };
+
+ self.values.insert(field.name(), value);
+ }
+
/// Visit a double precision floating point value.
fn record_f64(&mut self, field: &Field, value: f64) {
self.values
@@ -488,6 +548,7 @@ mod test {
use tracing::{self, subscriber::with_default};
use std::fmt;
+ use std::path::Path;
struct MockTime;
impl FormatTime for MockTime {
@@ -516,6 +577,50 @@ mod test {
}
#[test]
+ fn json_filename() {
+ let current_path = Path::new("tracing-subscriber")
+ .join("src")
+ .join("fmt")
+ .join("format")
+ .join("json.rs")
+ .to_str()
+ .expect("path must be valid unicode")
+ // escape windows backslashes
+ .replace('\\', "\\\\");
+ let expected =
+ &format!("{}{}{}",
+ "{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"filename\":\"",
+ current_path,
+ "\",\"fields\":{\"message\":\"some json test\"}}\n");
+ let subscriber = subscriber()
+ .flatten_event(false)
+ .with_current_span(true)
+ .with_file(true)
+ .with_span_list(true);
+ test_json(expected, subscriber, || {
+ let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
+ let _guard = span.enter();
+ tracing::info!("some json test");
+ });
+ }
+
+ #[test]
+ fn json_line_number() {
+ let expected =
+ "{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"line_number\":42,\"fields\":{\"message\":\"some json test\"}}\n";
+ let subscriber = subscriber()
+ .flatten_event(false)
+ .with_current_span(true)
+ .with_line_number(true)
+ .with_span_list(true);
+ test_json_with_line_number(expected, subscriber, || {
+ let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
+ let _guard = span.enter();
+ tracing::info!("some json test");
+ });
+ }
+
+ #[test]
fn json_flattened_event() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"message\":\"some json test\"}\n";
@@ -747,4 +852,34 @@ mod test {
serde_json::from_str(actual).unwrap()
);
}
+
+ fn test_json_with_line_number<T>(
+ expected: &str,
+ builder: crate::fmt::SubscriberBuilder<JsonFields, Format<Json>>,
+ producer: impl FnOnce() -> T,
+ ) {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = builder
+ .with_writer(make_writer.clone())
+ .with_timer(MockTime)
+ .finish();
+
+ with_default(subscriber, producer);
+
+ let buf = make_writer.buf();
+ let actual = std::str::from_utf8(&buf[..]).unwrap();
+ let mut expected =
+ serde_json::from_str::<std::collections::HashMap<&str, serde_json::Value>>(expected)
+ .unwrap();
+ let expect_line_number = expected.remove("line_number").is_some();
+ let mut actual: std::collections::HashMap<&str, serde_json::Value> =
+ serde_json::from_str(actual).unwrap();
+ let line_number = actual.remove("line_number");
+ if expect_line_number {
+ assert_eq!(line_number.map(|x| x.is_number()), Some(true));
+ } else {
+ assert!(line_number.is_none());
+ }
+ assert_eq!(actual, expected);
+ }
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/format/mod.rs b/vendor/tracing-subscriber/src/fmt/format/mod.rs
index 9001e102e..b8a482e55 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/format/mod.rs
+++ b/vendor/tracing-subscriber/src/fmt/format/mod.rs
@@ -1,4 +1,33 @@
//! Formatters for logging `tracing` events.
+//!
+//! This module provides several formatter implementations, as well as utilities
+//! for implementing custom formatters.
+//!
+//! # Formatters
+//! This module provides a number of formatter implementations:
+//!
+//! * [`Full`]: The default formatter. This emits human-readable,
+//! single-line logs for each event that occurs, with the current span context
+//! displayed before the formatted representation of the event. See
+//! [here](Full#example-output) for sample output.
+//!
+//! * [`Compact`]: A variant of the default formatter, optimized for
+//! short line lengths. Fields from the current span context are appended to
+//! the fields of the formatted event, and span names are not shown; the
+//! verbosity level is abbreviated to a single character. See
+//! [here](Compact#example-output) for sample output.
+//!
+//! * [`Pretty`]: Emits excessively pretty, multi-line logs, optimized
+//! for human readability. This is primarily intended to be used in local
+//! development and debugging, or for command-line applications, where
+//! automated analysis and compact storage of logs is less of a priority than
+//! readability and visual appeal. See [here](Pretty#example-output)
+//! for sample output.
+//!
+//! * [`Json`]: Outputs newline-delimited JSON logs. This is intended
+//! for production use with systems where structured logs are consumed as JSON
+//! by analysis and viewing tools. The JSON output is not optimized for human
+//! readability. See [here](Json#example-output) for sample output.
use super::time::{FormatTime, SystemTime};
use crate::{
field::{MakeOutput, MakeVisitor, RecordFields, VisitFmt, VisitOutput},
@@ -17,7 +46,7 @@ use tracing_core::{
use tracing_log::NormalizeEvent;
#[cfg(feature = "ansi")]
-use ansi_term::{Colour, Style};
+use nu_ansi_term::{Color, Style};
#[cfg(feature = "json")]
mod json;
@@ -72,7 +101,7 @@ pub use pretty::*;
/// does not support ANSI escape codes (such as a log file), and they should
/// not be emitted.
///
-/// Crates like [`ansi_term`] and [`owo-colors`] can be used to add ANSI
+/// Crates like [`nu_ansi_term`] and [`owo-colors`] can be used to add ANSI
/// escape codes to formatted output.
///
/// * The actual [`Event`] to be formatted.
@@ -153,13 +182,14 @@ pub use pretty::*;
/// DEBUG yak_shaving::shaver: some-span{field-on-span=foo}: started shaving yak
/// ```
///
+/// [`layer::Context`]: crate::layer::Context
/// [`fmt::Layer`]: super::Layer
/// [`fmt::Subscriber`]: super::Subscriber
/// [`Event`]: tracing::Event
/// [implements `FormatFields`]: super::FmtContext#impl-FormatFields<'writer>
/// [ANSI terminal escape codes]: https://en.wikipedia.org/wiki/ANSI_escape_code
/// [`Writer::has_ansi_escapes`]: Writer::has_ansi_escapes
-/// [`ansi_term`]: https://crates.io/crates/ansi_term
+/// [`nu_ansi_term`]: https://crates.io/crates/nu_ansi_term
/// [`owo-colors`]: https://crates.io/crates/owo-colors
/// [default formatter]: Full
pub trait FormatEvent<S, N>
@@ -197,8 +227,8 @@ where
/// time a span or event with fields is recorded, the subscriber will format
/// those fields with its associated `FormatFields` implementation.
///
-/// [set of fields]: ../field/trait.RecordFields.html
-/// [`FmtSubscriber`]: ../fmt/struct.Subscriber.html
+/// [set of fields]: crate::field::RecordFields
+/// [`FmtSubscriber`]: super::Subscriber
pub trait FormatFields<'writer> {
/// Format the provided `fields` to the provided [`Writer`], returning a result.
fn format_fields<R: RecordFields>(&self, writer: Writer<'writer>, fields: R) -> fmt::Result;
@@ -251,7 +281,6 @@ pub fn json() -> Format<Json> {
/// Returns a [`FormatFields`] implementation that formats fields using the
/// provided function or closure.
///
-/// [`FormatFields`]: trait.FormatFields.html
pub fn debug_fn<F>(f: F) -> FieldFn<F>
where
F: Fn(&mut Writer<'_>, &Field, &dyn fmt::Debug) -> fmt::Result + Clone,
@@ -272,6 +301,8 @@ where
///
/// Additionally, a `Writer` may expose additional `tracing`-specific
/// information to the formatter implementation.
+///
+/// [fields]: tracing_core::field
pub struct Writer<'writer> {
writer: &'writer mut dyn fmt::Write,
// TODO(eliza): add ANSI support
@@ -281,28 +312,76 @@ pub struct Writer<'writer> {
/// A [`FormatFields`] implementation that formats fields by calling a function
/// or closure.
///
-/// [`FormatFields`]: trait.FormatFields.html
#[derive(Debug, Clone)]
pub struct FieldFn<F>(F);
/// The [visitor] produced by [`FieldFn`]'s [`MakeVisitor`] implementation.
///
-/// [visitor]: ../../field/trait.Visit.html
-/// [`FieldFn`]: struct.FieldFn.html
-/// [`MakeVisitor`]: ../../field/trait.MakeVisitor.html
+/// [visitor]: super::super::field::Visit
+/// [`MakeVisitor`]: super::super::field::MakeVisitor
pub struct FieldFnVisitor<'a, F> {
f: F,
writer: Writer<'a>,
result: fmt::Result,
}
-/// Marker for `Format` that indicates that the compact log format should be used.
+/// Marker for [`Format`] that indicates that the compact log format should be used.
+///
+/// The compact format includes fields from all currently entered spans, after
+/// the event's fields. Span names are listed in order before fields are
+/// displayed.
+///
+/// # Example Output
///
-/// The compact format only includes the fields from the most recently entered span.
+/// <pre><font color="#4E9A06"><b>:;</b></font> <font color="#4E9A06">cargo</font> run --example fmt-compact
+/// <font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 0.08s
+/// <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt-compact`
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809287Z </font><font color="#4E9A06"> INFO</font> <b>fmt_compact</b><font color="#AAAAAA">: preparing to shave yaks </font><i>number_of_yaks</i><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809367Z </font><font color="#4E9A06"> INFO</font> <b>shaving_yaks</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: shaving yaks </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809414Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot; </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=1</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809443Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: yak shaved successfully </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=1</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809477Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks</b>: <b>yak_events</b><font color="#AAAAAA">: </font><i>yak</i><font color="#AAAAAA">=1 </font><i>shaved</i><font color="#AAAAAA">=true </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809500Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: </font><i>yaks_shaved</i><font color="#AAAAAA">=1 </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809531Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot; </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=2</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809554Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: yak shaved successfully </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=2</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809581Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks</b>: <b>yak_events</b><font color="#AAAAAA">: </font><i>yak</i><font color="#AAAAAA">=2 </font><i>shaved</i><font color="#AAAAAA">=true </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809606Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: </font><i>yaks_shaved</i><font color="#AAAAAA">=2 </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809635Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot; </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809664Z </font><font color="#C4A000"> WARN</font> <b>shaving_yaks</b>:<b>shave</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: could not locate yak </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3 </font><font color="#AAAAAA"><i>yak</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809693Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks</b>: <b>yak_events</b><font color="#AAAAAA">: </font><i>yak</i><font color="#AAAAAA">=3 </font><i>shaved</i><font color="#AAAAAA">=false </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809717Z </font><font color="#CC0000">ERROR</font> <b>shaving_yaks</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: failed to shave yak </font><i>yak</i><font color="#AAAAAA">=3 </font><i>error</i><font color="#AAAAAA">=missing yak </font><i>error.sources</i><font color="#AAAAAA">=[out of space, out of cash] </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809743Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks</b>: <b>fmt_compact::yak_shave</b><font color="#AAAAAA">: </font><i>yaks_shaved</i><font color="#AAAAAA">=2 </font><font color="#AAAAAA"><i>yaks</i></font><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-17T19:51:05.809768Z </font><font color="#4E9A06"> INFO</font> <b>fmt_compact</b><font color="#AAAAAA">: yak shaving completed </font><i>all_yaks_shaved</i><font color="#AAAAAA">=false</font>
+///
+/// </pre>
#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
pub struct Compact;
-/// Marker for `Format` that indicates that the verbose log format should be used.
+/// Marker for [`Format`] that indicates that the default log format should be used.
+///
+/// This formatter shows the span context before printing event data. Spans are
+/// displayed including their names and fields.
///
-/// The full format includes fields from all entered spans.
+/// # Example Output
+///
+/// <pre><font color="#4E9A06"><b>:;</b></font> <font color="#4E9A06">cargo</font> run --example fmt
+/// <font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 0.08s
+/// <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt`
+/// <font color="#AAAAAA">2022-02-15T18:40:14.289898Z </font><font color="#4E9A06"> INFO</font> fmt: preparing to shave yaks <i>number_of_yaks</i><font color="#AAAAAA">=3</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.289974Z </font><font color="#4E9A06"> INFO</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: shaving yaks</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290011Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=1</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot;</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290038Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=1</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: yak shaved successfully</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290070Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: yak_events: </font><i>yak</i><font color="#AAAAAA">=1 </font><i>shaved</i><font color="#AAAAAA">=true</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290089Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: </font><i>yaks_shaved</i><font color="#AAAAAA">=1</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290114Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=2</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot;</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290134Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=2</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: yak shaved successfully</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290157Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: yak_events: </font><i>yak</i><font color="#AAAAAA">=2 </font><i>shaved</i><font color="#AAAAAA">=true</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290174Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: </font><i>yaks_shaved</i><font color="#AAAAAA">=2</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290198Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: hello! I&apos;m gonna shave a yak </font><i>excitement</i><font color="#AAAAAA">=&quot;yay!&quot;</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290222Z </font><font color="#C4A000"> WARN</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">:</font><b>shave{</b><i>yak</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: could not locate yak</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290247Z </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: yak_events: </font><i>yak</i><font color="#AAAAAA">=3 </font><i>shaved</i><font color="#AAAAAA">=false</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290268Z </font><font color="#CC0000">ERROR</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: failed to shave yak </font><i>yak</i><font color="#AAAAAA">=3 </font><i>error</i><font color="#AAAAAA">=missing yak </font><i>error.sources</i><font color="#AAAAAA">=[out of space, out of cash]</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290287Z </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b><i>yaks</i><font color="#AAAAAA">=3</font><b>}</b><font color="#AAAAAA">: fmt::yak_shave: </font><i>yaks_shaved</i><font color="#AAAAAA">=2</font>
+/// <font color="#AAAAAA">2022-02-15T18:40:14.290309Z </font><font color="#4E9A06"> INFO</font> fmt: yak shaving completed. <i>all_yaks_shaved</i><font color="#AAAAAA">=false</font>
+/// </pre>
#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
pub struct Full;
@@ -311,8 +390,11 @@ pub struct Full;
/// You will usually want to use this as the `FormatEvent` for a `FmtSubscriber`.
///
/// The default logging format, [`Full`] includes all fields in each event and its containing
-/// spans. The [`Compact`] logging format includes only the fields from the most-recently-entered
-/// span.
+/// spans. The [`Compact`] logging format is intended to produce shorter log
+/// lines; it displays each event's fields, along with fields from the current
+/// span context, but other information is abbreviated. The [`Pretty`] logging
+/// format is an extra-verbose, multi-line human-readable logging format
+/// intended for use in development.
#[derive(Debug, Clone)]
pub struct Format<F = Full, T = SystemTime> {
format: F,
@@ -323,6 +405,8 @@ pub struct Format<F = Full, T = SystemTime> {
pub(crate) display_level: bool,
pub(crate) display_thread_id: bool,
pub(crate) display_thread_name: bool,
+ pub(crate) display_filename: bool,
+ pub(crate) display_line_number: bool,
}
// === impl Writer ===
@@ -499,6 +583,8 @@ impl Default for Format<Full, SystemTime> {
display_level: true,
display_thread_id: false,
display_thread_name: false,
+ display_filename: false,
+ display_line_number: false,
}
}
}
@@ -517,6 +603,8 @@ impl<F, T> Format<F, T> {
display_level: self.display_level,
display_thread_id: self.display_thread_id,
display_thread_name: self.display_thread_name,
+ display_filename: self.display_filename,
+ display_line_number: self.display_line_number,
}
}
@@ -554,6 +642,8 @@ impl<F, T> Format<F, T> {
display_level: self.display_level,
display_thread_id: self.display_thread_id,
display_thread_name: self.display_thread_name,
+ display_filename: true,
+ display_line_number: true,
}
}
@@ -571,8 +661,6 @@ impl<F, T> Format<F, T> {
///
/// - [`Format::flatten_event`] can be used to enable flattening event fields into the root
/// object.
- ///
- /// [`Format::flatten_event`]: #method.flatten_event
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn json(self) -> Format<Json, T> {
@@ -585,6 +673,8 @@ impl<F, T> Format<F, T> {
display_level: self.display_level,
display_thread_id: self.display_thread_id,
display_thread_name: self.display_thread_name,
+ display_filename: self.display_filename,
+ display_line_number: self.display_line_number,
}
}
@@ -612,6 +702,8 @@ impl<F, T> Format<F, T> {
display_level: self.display_level,
display_thread_id: self.display_thread_id,
display_thread_name: self.display_thread_name,
+ display_filename: self.display_filename,
+ display_line_number: self.display_line_number,
}
}
@@ -626,6 +718,8 @@ impl<F, T> Format<F, T> {
display_level: self.display_level,
display_thread_id: self.display_thread_id,
display_thread_name: self.display_thread_name,
+ display_filename: self.display_filename,
+ display_line_number: self.display_line_number,
}
}
@@ -654,9 +748,9 @@ impl<F, T> Format<F, T> {
}
/// Sets whether or not the [thread ID] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [thread ID]: https://doc.rust-lang.org/stable/std/thread/struct.ThreadId.html
+ /// [thread ID]: std::thread::ThreadId
pub fn with_thread_ids(self, display_thread_id: bool) -> Format<F, T> {
Format {
display_thread_id,
@@ -665,9 +759,9 @@ impl<F, T> Format<F, T> {
}
/// Sets whether or not the [name] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [name]: https://doc.rust-lang.org/stable/std/thread/index.html#naming-threads
+ /// [name]: std::thread#naming-threads
pub fn with_thread_names(self, display_thread_name: bool) -> Format<F, T> {
Format {
display_thread_name,
@@ -675,6 +769,38 @@ impl<F, T> Format<F, T> {
}
}
+ /// Sets whether or not an event's [source code file path][file] is
+ /// displayed.
+ ///
+ /// [file]: tracing_core::Metadata::file
+ pub fn with_file(self, display_filename: bool) -> Format<F, T> {
+ Format {
+ display_filename,
+ ..self
+ }
+ }
+
+ /// Sets whether or not an event's [source code line number][line] is
+ /// displayed.
+ ///
+ /// [line]: tracing_core::Metadata::line
+ pub fn with_line_number(self, display_line_number: bool) -> Format<F, T> {
+ Format {
+ display_line_number,
+ ..self
+ }
+ }
+
+ /// Sets whether or not the source code location from which an event
+ /// originated is displayed.
+ ///
+ /// This is equivalent to calling [`Format::with_file`] and
+ /// [`Format::with_line_number`] with the same value.
+ pub fn with_source_location(self, display_location: bool) -> Self {
+ self.with_line_number(display_location)
+ .with_file(display_location)
+ }
+
#[inline]
fn format_timestamp(&self, writer: &mut Writer<'_>) -> fmt::Result
where
@@ -692,14 +818,24 @@ impl<F, T> Format<F, T> {
if writer.has_ansi_escapes() {
let style = Style::new().dimmed();
write!(writer, "{}", style.prefix())?;
- self.timer.format_time(writer)?;
+
+ // If getting the timestamp failed, don't bail --- only bail on
+ // formatting errors.
+ if self.timer.format_time(writer).is_err() {
+ writer.write_str("<unknown time>")?;
+ }
+
write!(writer, "{} ", style.suffix())?;
return Ok(());
}
}
// Otherwise, just format the timestamp without ANSI formatting.
- self.timer.format_time(writer)?;
+ // If getting the timestamp failed, don't bail --- only bail on
+ // formatting errors.
+ if self.timer.format_time(writer).is_err() {
+ writer.write_str("<unknown time>")?;
+ }
writer.write_char(' ')
}
}
@@ -714,7 +850,7 @@ impl<T> Format<Json, T> {
/// ```ignore,json
/// {"timestamp":"Feb 20 11:28:15.096","level":"INFO","target":"mycrate", "message":"some message", "key": "value"}
/// ```
- /// See [`Json`](../format/struct.Json.html).
+ /// See [`Json`][super::format::Json].
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn flatten_event(mut self, flatten_event: bool) -> Format<Json, T> {
@@ -725,7 +861,7 @@ impl<T> Format<Json, T> {
/// Sets whether or not the formatter will include the current span in
/// formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][Json]
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn with_current_span(mut self, display_current_span: bool) -> Format<Json, T> {
@@ -736,7 +872,7 @@ impl<T> Format<Json, T> {
/// Sets whether or not the formatter will include a list (from root to
/// leaf) of all currently entered spans in formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][Json]
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn with_span_list(mut self, display_span_list: bool) -> Format<Json, T> {
@@ -840,6 +976,34 @@ where
)?;
}
+ let line_number = if self.display_line_number {
+ meta.line()
+ } else {
+ None
+ };
+
+ if self.display_filename {
+ if let Some(filename) = meta.file() {
+ write!(
+ writer,
+ "{}{}{}",
+ dimmed.paint(filename),
+ dimmed.paint(":"),
+ if line_number.is_some() { "" } else { " " }
+ )?;
+ }
+ }
+
+ if let Some(line_number) = line_number {
+ write!(
+ writer,
+ "{}{}:{} ",
+ dimmed.prefix(),
+ line_number,
+ dimmed.suffix()
+ )?;
+ }
+
ctx.format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
@@ -918,23 +1082,49 @@ where
};
write!(writer, "{}", fmt_ctx)?;
+ let bold = writer.bold();
+ let dimmed = writer.dimmed();
+
+ let mut needs_space = false;
if self.display_target {
- write!(
- writer,
- "{}{} ",
- writer.bold().paint(meta.target()),
- writer.dimmed().paint(":")
- )?;
+ write!(writer, "{}{}", bold.paint(meta.target()), dimmed.paint(":"))?;
+ needs_space = true;
+ }
+
+ if self.display_filename {
+ if let Some(filename) = meta.file() {
+ if self.display_target {
+ writer.write_char(' ')?;
+ }
+ write!(writer, "{}{}", bold.paint(filename), dimmed.paint(":"))?;
+ needs_space = true;
+ }
+ }
+
+ if self.display_line_number {
+ if let Some(line_number) = meta.line() {
+ write!(
+ writer,
+ "{}{}{}{}",
+ bold.prefix(),
+ line_number,
+ bold.suffix(),
+ dimmed.paint(":")
+ )?;
+ needs_space = true;
+ }
+ }
+
+ if needs_space {
+ writer.write_char(' ')?;
}
ctx.format_fields(writer.by_ref(), event)?;
- let dimmed = writer.dimmed();
for span in ctx
.event_scope()
.into_iter()
- .map(crate::registry::Scope::from_root)
- .flatten()
+ .flat_map(crate::registry::Scope::from_root)
{
let exts = span.extensions();
if let Some(fields) = exts.get::<FormattedFields<N>>() {
@@ -962,7 +1152,6 @@ where
/// The default [`FormatFields`] implementation.
///
-/// [`FormatFields`]: trait.FormatFields.html
#[derive(Debug)]
pub struct DefaultFields {
// reserve the ability to add fields to this without causing a breaking
@@ -984,7 +1173,6 @@ pub struct DefaultVisitor<'a> {
impl DefaultFields {
/// Returns a new default [`FormatFields`] implementation.
///
- /// [`FormatFields`]: trait.FormatFields.html
pub fn new() -> Self {
Self { _private: () }
}
@@ -1201,6 +1389,14 @@ impl Style {
fn paint(&self, d: impl fmt::Display) -> impl fmt::Display {
d
}
+
+ fn prefix(&self) -> impl fmt::Display {
+ ""
+ }
+
+ fn suffix(&self) -> impl fmt::Display {
+ ""
+ }
}
struct FmtThreadName<'a> {
@@ -1288,11 +1484,11 @@ impl<'a> fmt::Display for FmtLevel<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.ansi {
match *self.level {
- Level::TRACE => write!(f, "{}", Colour::Purple.paint(TRACE_STR)),
- Level::DEBUG => write!(f, "{}", Colour::Blue.paint(DEBUG_STR)),
- Level::INFO => write!(f, "{}", Colour::Green.paint(INFO_STR)),
- Level::WARN => write!(f, "{}", Colour::Yellow.paint(WARN_STR)),
- Level::ERROR => write!(f, "{}", Colour::Red.paint(ERROR_STR)),
+ Level::TRACE => write!(f, "{}", Color::Purple.paint(TRACE_STR)),
+ Level::DEBUG => write!(f, "{}", Color::Blue.paint(DEBUG_STR)),
+ Level::INFO => write!(f, "{}", Color::Green.paint(INFO_STR)),
+ Level::WARN => write!(f, "{}", Color::Yellow.paint(WARN_STR)),
+ Level::ERROR => write!(f, "{}", Color::Red.paint(ERROR_STR)),
}
} else {
match *self.level {
@@ -1366,7 +1562,7 @@ impl<'a, F> fmt::Debug for FieldFnVisitor<'a, F> {
/// Configures what points in the span lifecycle are logged as events.
///
-/// See also [`with_span_events`](../struct.SubscriberBuilder.html#method.with_span_events).
+/// See also [`with_span_events`](super::SubscriberBuilder.html::with_span_events).
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct FmtSpan(u8);
@@ -1528,7 +1724,9 @@ pub(super) mod test {
};
use super::*;
- use std::fmt;
+
+ use regex::Regex;
+ use std::{fmt, path::Path};
pub(crate) struct MockTime;
impl FormatTime for MockTime {
@@ -1550,7 +1748,7 @@ pub(super) mod test {
.with_thread_names(false);
#[cfg(feature = "ansi")]
let subscriber = subscriber.with_ansi(false);
- run_test(subscriber, make_writer, "hello\n")
+ assert_info_hello(subscriber, make_writer, "hello\n")
}
fn test_ansi<T>(
@@ -1598,6 +1796,124 @@ pub(super) mod test {
run_test(subscriber, make_writer, expected);
}
+ #[test]
+ fn with_line_number_and_file_name() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_file(true)
+ .with_line_number(true)
+ .with_level(false)
+ .with_ansi(false)
+ .with_timer(MockTime);
+
+ let expected = Regex::new(&format!(
+ "^fake time tracing_subscriber::fmt::format::test: {}:[0-9]+: hello\n$",
+ current_path()
+ // if we're on Windows, the path might contain backslashes, which
+ // have to be escpaed before compiling the regex.
+ .replace('\\', "\\\\")
+ ))
+ .unwrap();
+ let _default = set_default(&subscriber.into());
+ tracing::info!("hello");
+ let res = make_writer.get_string();
+ assert!(expected.is_match(&res));
+ }
+
+ #[test]
+ fn with_line_number() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_line_number(true)
+ .with_level(false)
+ .with_ansi(false)
+ .with_timer(MockTime);
+
+ let expected =
+ Regex::new("^fake time tracing_subscriber::fmt::format::test: [0-9]+: hello\n$")
+ .unwrap();
+ let _default = set_default(&subscriber.into());
+ tracing::info!("hello");
+ let res = make_writer.get_string();
+ assert!(expected.is_match(&res));
+ }
+
+ #[test]
+ fn with_filename() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_file(true)
+ .with_level(false)
+ .with_ansi(false)
+ .with_timer(MockTime);
+ let expected = &format!(
+ "fake time tracing_subscriber::fmt::format::test: {}: hello\n",
+ current_path(),
+ );
+ assert_info_hello(subscriber, make_writer, expected);
+ }
+
+ #[test]
+ fn with_thread_ids() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_thread_ids(true)
+ .with_ansi(false)
+ .with_timer(MockTime);
+ let expected =
+ "fake time INFO ThreadId(NUMERIC) tracing_subscriber::fmt::format::test: hello\n";
+
+ assert_info_hello_ignore_numeric(subscriber, make_writer, expected);
+ }
+
+ #[test]
+ fn pretty_default() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .pretty()
+ .with_writer(make_writer.clone())
+ .with_ansi(false)
+ .with_timer(MockTime);
+ let expected = format!(
+ r#" fake time INFO tracing_subscriber::fmt::format::test: hello
+ at {}:NUMERIC
+
+"#,
+ file!()
+ );
+
+ assert_info_hello_ignore_numeric(subscriber, make_writer, &expected)
+ }
+
+ fn assert_info_hello(subscriber: impl Into<Dispatch>, buf: MockMakeWriter, expected: &str) {
+ let _default = set_default(&subscriber.into());
+ tracing::info!("hello");
+ let result = buf.get_string();
+
+ assert_eq!(expected, result)
+ }
+
+ // When numeric characters are used they often form a non-deterministic value as they usually represent things like a thread id or line number.
+ // This assert method should be used when non-deterministic numeric characters are present.
+ fn assert_info_hello_ignore_numeric(
+ subscriber: impl Into<Dispatch>,
+ buf: MockMakeWriter,
+ expected: &str,
+ ) {
+ let _default = set_default(&subscriber.into());
+ tracing::info!("hello");
+
+ let regex = Regex::new("[0-9]+").unwrap();
+ let result = buf.get_string();
+ let result_cleaned = regex.replace_all(&result, "NUMERIC");
+
+ assert_eq!(expected, result_cleaned)
+ }
+
fn test_overridden_parents<T>(
expected: &str,
builder: crate::fmt::SubscriberBuilder<DefaultFields, Format<T>>,
@@ -1606,14 +1922,14 @@ pub(super) mod test {
T: Send + Sync + 'static,
{
let make_writer = MockMakeWriter::default();
- let collector = builder
+ let subscriber = builder
.with_writer(make_writer.clone())
.with_level(false)
.with_ansi(false)
.with_timer(MockTime)
.finish();
- with_default(collector, || {
+ with_default(subscriber, || {
let span1 = tracing::info_span!("span1", span = 1);
let span2 = tracing::info_span!(parent: &span1, "span2", span = 2);
tracing::info!(parent: &span2, "hello");
@@ -1659,6 +1975,21 @@ pub(super) mod test {
mod default {
use super::*;
+
+ #[test]
+ fn with_thread_ids() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .with_writer(make_writer.clone())
+ .with_thread_ids(true)
+ .with_ansi(false)
+ .with_timer(MockTime);
+ let expected =
+ "fake time INFO ThreadId(NUMERIC) tracing_subscriber::fmt::format::test: hello\n";
+
+ assert_info_hello_ignore_numeric(subscriber, make_writer, expected);
+ }
+
#[cfg(feature = "ansi")]
#[test]
fn with_ansi_true() {
@@ -1748,6 +2079,26 @@ pub(super) mod test {
}
}
+ mod pretty {
+ use super::*;
+
+ #[test]
+ fn pretty_default() {
+ let make_writer = MockMakeWriter::default();
+ let subscriber = crate::fmt::Subscriber::builder()
+ .pretty()
+ .with_writer(make_writer.clone())
+ .with_ansi(false)
+ .with_timer(MockTime);
+ let expected = format!(
+ " fake time INFO tracing_subscriber::fmt::format::test: hello\n at {}:NUMERIC\n\n",
+ file!()
+ );
+
+ assert_info_hello_ignore_numeric(subscriber, make_writer, &expected)
+ }
+ }
+
#[test]
fn format_nanos() {
fn fmt(t: u64) -> String {
@@ -1795,4 +2146,16 @@ pub(super) mod test {
assert!(!f.contains(FmtSpan::EXIT));
assert!(f.contains(FmtSpan::CLOSE));
}
+
+ /// Returns the test's module path.
+ fn current_path() -> String {
+ Path::new("tracing-subscriber")
+ .join("src")
+ .join("fmt")
+ .join("format")
+ .join("mod.rs")
+ .to_str()
+ .expect("path must not contain invalid unicode")
+ .to_owned()
+ }
}
diff --git a/vendor/tracing-subscriber/src/fmt/format/pretty.rs b/vendor/tracing-subscriber/src/fmt/format/pretty.rs
new file mode 100644
index 000000000..12071de92
--- /dev/null
+++ b/vendor/tracing-subscriber/src/fmt/format/pretty.rs
@@ -0,0 +1,511 @@
+use super::*;
+use crate::{
+ field::{VisitFmt, VisitOutput},
+ fmt::fmt_layer::{FmtContext, FormattedFields},
+ registry::LookupSpan,
+};
+
+use std::fmt;
+use tracing_core::{
+ field::{self, Field},
+ Event, Level, Subscriber,
+};
+
+#[cfg(feature = "tracing-log")]
+use tracing_log::NormalizeEvent;
+
+use nu_ansi_term::{Color, Style};
+
+/// An excessively pretty, human-readable event formatter.
+///
+/// Unlike the [`Full`], [`Compact`], and [`Json`] formatters, this is a
+/// multi-line output format. Each individual event may output multiple lines of
+/// text.
+///
+/// # Example Output
+///
+/// <pre><font color="#4E9A06"><b>:;</b></font> <font color="#4E9A06">cargo</font> run --example fmt-pretty
+/// <font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 0.08s
+/// <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt-pretty`
+/// 2022-02-15T18:44:24.535324Z <font color="#4E9A06"> INFO</font> <font color="#4E9A06"><b>fmt_pretty</b></font><font color="#4E9A06">: preparing to shave yaks, </font><font color="#4E9A06"><b>number_of_yaks</b></font><font color="#4E9A06">: 3</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt-pretty.rs:16 <font color="#AAAAAA"><i>on</i></font> main
+///
+/// 2022-02-15T18:44:24.535403Z <font color="#4E9A06"> INFO</font> <font color="#4E9A06"><b>fmt_pretty::yak_shave</b></font><font color="#4E9A06">: shaving yaks</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:41 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535442Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:16 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 1
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535469Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: yak shaved successfully</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:25 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 1
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535502Z <font color="#3465A4">DEBUG</font> <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 1, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: true</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:46 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535524Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 1</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:55 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535551Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:16 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 2
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535573Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: yak shaved successfully</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:25 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 2
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535600Z <font color="#3465A4">DEBUG</font> <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 2, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: true</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:46 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535618Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 2</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:55 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535644Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:16 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 3
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535670Z <font color="#C4A000"> WARN</font> <font color="#C4A000"><b>fmt_pretty::yak_shave</b></font><font color="#C4A000">: could not locate yak</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:18 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 3
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535698Z <font color="#3465A4">DEBUG</font> <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 3, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: false</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:46 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535720Z <font color="#CC0000">ERROR</font> <font color="#CC0000"><b>fmt_pretty::yak_shave</b></font><font color="#CC0000">: failed to shave yak, </font><font color="#CC0000"><b>yak</b></font><font color="#CC0000">: 3, </font><font color="#CC0000"><b>error</b></font><font color="#CC0000">: missing yak, </font><font color="#CC0000"><b>error.sources</b></font><font color="#CC0000">: [out of space, out of cash]</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:51 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535742Z <font color="#75507B">TRACE</font> <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 2</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:55 <font color="#AAAAAA"><i>on</i></font> main
+/// <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
+///
+/// 2022-02-15T18:44:24.535765Z <font color="#4E9A06"> INFO</font> <font color="#4E9A06"><b>fmt_pretty</b></font><font color="#4E9A06">: yak shaving completed, </font><font color="#4E9A06"><b>all_yaks_shaved</b></font><font color="#4E9A06">: false</font>
+/// <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt-pretty.rs:19 <font color="#AAAAAA"><i>on</i></font> main
+/// </pre>
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Pretty {
+ display_location: bool,
+}
+
+/// The [visitor] produced by [`Pretty`]'s [`MakeVisitor`] implementation.
+///
+/// [visitor]: field::Visit
+/// [`MakeVisitor`]: crate::field::MakeVisitor
+#[derive(Debug)]
+pub struct PrettyVisitor<'a> {
+ writer: Writer<'a>,
+ is_empty: bool,
+ style: Style,
+ result: fmt::Result,
+}
+
+/// An excessively pretty, human-readable [`MakeVisitor`] implementation.
+///
+/// [`MakeVisitor`]: crate::field::MakeVisitor
+#[derive(Debug)]
+pub struct PrettyFields {
+ /// A value to override the provided `Writer`'s ANSI formatting
+ /// configuration.
+ ///
+ /// If this is `Some`, we override the `Writer`'s ANSI setting. This is
+ /// necessary in order to continue supporting the deprecated
+ /// `PrettyFields::with_ansi` method. If it is `None`, we don't override the
+ /// ANSI formatting configuration (because the deprecated method was not
+ /// called).
+ // TODO: when `PrettyFields::with_ansi` is removed, we can get rid
+ // of this entirely.
+ ansi: Option<bool>,
+}
+
+// === impl Pretty ===
+
+impl Default for Pretty {
+ fn default() -> Self {
+ Self {
+ display_location: true,
+ }
+ }
+}
+
+impl Pretty {
+ fn style_for(level: &Level) -> Style {
+ match *level {
+ Level::TRACE => Style::new().fg(Color::Purple),
+ Level::DEBUG => Style::new().fg(Color::Blue),
+ Level::INFO => Style::new().fg(Color::Green),
+ Level::WARN => Style::new().fg(Color::Yellow),
+ Level::ERROR => Style::new().fg(Color::Red),
+ }
+ }
+
+ /// Sets whether the event's source code location is displayed.
+ ///
+ /// This defaults to `true`.
+ #[deprecated(
+ since = "0.3.6",
+ note = "all formatters now support configurable source locations. Use `Format::with_source_location` instead."
+ )]
+ pub fn with_source_location(self, display_location: bool) -> Self {
+ Self {
+ display_location,
+ ..self
+ }
+ }
+}
+
+impl<C, N, T> FormatEvent<C, N> for Format<Pretty, T>
+where
+ C: Subscriber + for<'a> LookupSpan<'a>,
+ N: for<'a> FormatFields<'a> + 'static,
+ T: FormatTime,
+{
+ fn format_event(
+ &self,
+ ctx: &FmtContext<'_, C, N>,
+ mut writer: Writer<'_>,
+ event: &Event<'_>,
+ ) -> fmt::Result {
+ #[cfg(feature = "tracing-log")]
+ let normalized_meta = event.normalized_metadata();
+ #[cfg(feature = "tracing-log")]
+ let meta = normalized_meta.as_ref().unwrap_or_else(|| event.metadata());
+ #[cfg(not(feature = "tracing-log"))]
+ let meta = event.metadata();
+ write!(&mut writer, " ")?;
+
+ // if the `Format` struct *also* has an ANSI color configuration,
+ // override the writer...the API for configuring ANSI color codes on the
+ // `Format` struct is deprecated, but we still need to honor those
+ // configurations.
+ if let Some(ansi) = self.ansi {
+ writer = writer.with_ansi(ansi);
+ }
+
+ self.format_timestamp(&mut writer)?;
+
+ let style = if self.display_level && writer.has_ansi_escapes() {
+ Pretty::style_for(meta.level())
+ } else {
+ Style::new()
+ };
+
+ if self.display_level {
+ write!(
+ writer,
+ "{} ",
+ super::FmtLevel::new(meta.level(), writer.has_ansi_escapes())
+ )?;
+ }
+
+ if self.display_target {
+ let target_style = if writer.has_ansi_escapes() {
+ style.bold()
+ } else {
+ style
+ };
+ write!(
+ writer,
+ "{}{}{}:",
+ target_style.prefix(),
+ meta.target(),
+ target_style.infix(style)
+ )?;
+ }
+ let line_number = if self.display_line_number {
+ meta.line()
+ } else {
+ None
+ };
+
+ // If the file name is disabled, format the line number right after the
+ // target. Otherwise, if we also display the file, it'll go on a
+ // separate line.
+ if let (Some(line_number), false, true) = (
+ line_number,
+ self.display_filename,
+ self.format.display_location,
+ ) {
+ write!(
+ writer,
+ "{}{}{}:",
+ style.prefix(),
+ line_number,
+ style.infix(style)
+ )?;
+ }
+
+ writer.write_char(' ')?;
+
+ let mut v = PrettyVisitor::new(writer.by_ref(), true).with_style(style);
+ event.record(&mut v);
+ v.finish()?;
+ writer.write_char('\n')?;
+
+ let dimmed = if writer.has_ansi_escapes() {
+ Style::new().dimmed().italic()
+ } else {
+ Style::new()
+ };
+ let thread = self.display_thread_name || self.display_thread_id;
+
+ if let (Some(file), true, true) = (
+ meta.file(),
+ self.format.display_location,
+ self.display_filename,
+ ) {
+ write!(writer, " {} {}", dimmed.paint("at"), file,)?;
+
+ if let Some(line) = line_number {
+ write!(writer, ":{}", line)?;
+ }
+ writer.write_char(if thread { ' ' } else { '\n' })?;
+ } else if thread {
+ write!(writer, " ")?;
+ };
+
+ if thread {
+ write!(writer, "{} ", dimmed.paint("on"))?;
+ let thread = std::thread::current();
+ if self.display_thread_name {
+ if let Some(name) = thread.name() {
+ write!(writer, "{}", name)?;
+ if self.display_thread_id {
+ writer.write_char(' ')?;
+ }
+ }
+ }
+ if self.display_thread_id {
+ write!(writer, "{:?}", thread.id())?;
+ }
+ writer.write_char('\n')?;
+ }
+
+ let bold = writer.bold();
+ let span = event
+ .parent()
+ .and_then(|id| ctx.span(id))
+ .or_else(|| ctx.lookup_current());
+
+ let scope = span.into_iter().flat_map(|span| span.scope());
+
+ for span in scope {
+ let meta = span.metadata();
+ if self.display_target {
+ write!(
+ writer,
+ " {} {}::{}",
+ dimmed.paint("in"),
+ meta.target(),
+ bold.paint(meta.name()),
+ )?;
+ } else {
+ write!(
+ writer,
+ " {} {}",
+ dimmed.paint("in"),
+ bold.paint(meta.name()),
+ )?;
+ }
+
+ let ext = span.extensions();
+ let fields = &ext
+ .get::<FormattedFields<N>>()
+ .expect("Unable to find FormattedFields in extensions; this is a bug");
+ if !fields.is_empty() {
+ write!(writer, " {} {}", dimmed.paint("with"), fields)?;
+ }
+ writer.write_char('\n')?;
+ }
+
+ writer.write_char('\n')
+ }
+}
+
+impl<'writer> FormatFields<'writer> for Pretty {
+ fn format_fields<R: RecordFields>(&self, writer: Writer<'writer>, fields: R) -> fmt::Result {
+ let mut v = PrettyVisitor::new(writer, true);
+ fields.record(&mut v);
+ v.finish()
+ }
+
+ fn add_fields(
+ &self,
+ current: &'writer mut FormattedFields<Self>,
+ fields: &span::Record<'_>,
+ ) -> fmt::Result {
+ let empty = current.is_empty();
+ let writer = current.as_writer();
+ let mut v = PrettyVisitor::new(writer, empty);
+ fields.record(&mut v);
+ v.finish()
+ }
+}
+
+// === impl PrettyFields ===
+
+impl Default for PrettyFields {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl PrettyFields {
+ /// Returns a new default [`PrettyFields`] implementation.
+ pub fn new() -> Self {
+ // By default, don't override the `Writer`'s ANSI colors
+ // configuration. We'll only do this if the user calls the
+ // deprecated `PrettyFields::with_ansi` method.
+ Self { ansi: None }
+ }
+
+ /// Enable ANSI encoding for formatted fields.
+ #[deprecated(
+ since = "0.3.3",
+ note = "Use `fmt::Subscriber::with_ansi` or `fmt::Layer::with_ansi` instead."
+ )]
+ pub fn with_ansi(self, ansi: bool) -> Self {
+ Self {
+ ansi: Some(ansi),
+ ..self
+ }
+ }
+}
+
+impl<'a> MakeVisitor<Writer<'a>> for PrettyFields {
+ type Visitor = PrettyVisitor<'a>;
+
+ #[inline]
+ fn make_visitor(&self, mut target: Writer<'a>) -> Self::Visitor {
+ if let Some(ansi) = self.ansi {
+ target = target.with_ansi(ansi);
+ }
+ PrettyVisitor::new(target, true)
+ }
+}
+
+// === impl PrettyVisitor ===
+
+impl<'a> PrettyVisitor<'a> {
+ /// Returns a new default visitor that formats to the provided `writer`.
+ ///
+ /// # Arguments
+ /// - `writer`: the writer to format to.
+ /// - `is_empty`: whether or not any fields have been previously written to
+ /// that writer.
+ pub fn new(writer: Writer<'a>, is_empty: bool) -> Self {
+ Self {
+ writer,
+ is_empty,
+ style: Style::default(),
+ result: Ok(()),
+ }
+ }
+
+ pub(crate) fn with_style(self, style: Style) -> Self {
+ Self { style, ..self }
+ }
+
+ fn write_padded(&mut self, value: &impl fmt::Debug) {
+ let padding = if self.is_empty {
+ self.is_empty = false;
+ ""
+ } else {
+ ", "
+ };
+ self.result = write!(self.writer, "{}{:?}", padding, value);
+ }
+
+ fn bold(&self) -> Style {
+ if self.writer.has_ansi_escapes() {
+ self.style.bold()
+ } else {
+ Style::new()
+ }
+ }
+}
+
+impl<'a> field::Visit for PrettyVisitor<'a> {
+ fn record_str(&mut self, field: &Field, value: &str) {
+ if self.result.is_err() {
+ return;
+ }
+
+ if field.name() == "message" {
+ self.record_debug(field, &format_args!("{}", value))
+ } else {
+ self.record_debug(field, &value)
+ }
+ }
+
+ fn record_error(&mut self, field: &Field, value: &(dyn std::error::Error + 'static)) {
+ if let Some(source) = value.source() {
+ let bold = self.bold();
+ self.record_debug(
+ field,
+ &format_args!(
+ "{}, {}{}.sources{}: {}",
+ value,
+ bold.prefix(),
+ field,
+ bold.infix(self.style),
+ ErrorSourceList(source),
+ ),
+ )
+ } else {
+ self.record_debug(field, &format_args!("{}", value))
+ }
+ }
+
+ fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
+ if self.result.is_err() {
+ return;
+ }
+ let bold = self.bold();
+ match field.name() {
+ "message" => self.write_padded(&format_args!("{}{:?}", self.style.prefix(), value,)),
+ // Skip fields that are actually log metadata that have already been handled
+ #[cfg(feature = "tracing-log")]
+ name if name.starts_with("log.") => self.result = Ok(()),
+ name if name.starts_with("r#") => self.write_padded(&format_args!(
+ "{}{}{}: {:?}",
+ bold.prefix(),
+ &name[2..],
+ bold.infix(self.style),
+ value
+ )),
+ name => self.write_padded(&format_args!(
+ "{}{}{}: {:?}",
+ bold.prefix(),
+ name,
+ bold.infix(self.style),
+ value
+ )),
+ };
+ }
+}
+
+impl<'a> VisitOutput<fmt::Result> for PrettyVisitor<'a> {
+ fn finish(mut self) -> fmt::Result {
+ write!(&mut self.writer, "{}", self.style.suffix())?;
+ self.result
+ }
+}
+
+impl<'a> VisitFmt for PrettyVisitor<'a> {
+ fn writer(&mut self) -> &mut dyn fmt::Write {
+ &mut self.writer
+ }
+}
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/mod.rs b/vendor/tracing-subscriber/src/fmt/mod.rs
index d5deb8f0c..025e17504 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/mod.rs
+++ b/vendor/tracing-subscriber/src/fmt/mod.rs
@@ -13,10 +13,12 @@
//!
//! ```toml
//! [dependencies]
-//! tracing-subscriber = "0.2"
+//! tracing-subscriber = "0.3"
//! ```
//!
-//! *Compiler support: requires rustc 1.39+*
+//! *Compiler support: [requires `rustc` 1.49+][msrv]*
+//!
+//! [msrv]: super#supported-rust-versions
//!
//! Add the following to your executable to initialize the default subscriber:
//! ```rust
@@ -68,132 +70,25 @@
//!
//! * [`format::Full`]: The default formatter. This emits human-readable,
//! single-line logs for each event that occurs, with the current span context
-//! displayed before the formatted representation of the event.
+//! displayed before the formatted representation of the event. See
+//! [here](format::Full#example-output) for sample output.
//!
-//! For example:
-//! <pre><font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 1.59s
-//! <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt`
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#4E9A06"> INFO</font> fmt: preparing to shave yaks number_of_yaks=3
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#4E9A06"> INFO</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: fmt::yak_shave: shaving yaks
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=1<b>}</b>: fmt::yak_shave: hello! I&apos;m gonna shave a yak excitement=&quot;yay!&quot;
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=1<b>}</b>: fmt::yak_shave: yak shaved successfully
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: yak_events: yak=1 shaved=true
-//! <font color="#AAAAAA">Oct 24 12:55:47.814 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: fmt::yak_shave: yaks_shaved=1
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=2<b>}</b>: fmt::yak_shave: hello! I&apos;m gonna shave a yak excitement=&quot;yay!&quot;
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=2<b>}</b>: fmt::yak_shave: yak shaved successfully
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: yak_events: yak=2 shaved=true
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: fmt::yak_shave: yaks_shaved=2
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=3<b>}</b>: fmt::yak_shave: hello! I&apos;m gonna shave a yak excitement=&quot;yay!&quot;
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#C4A000"> WARN</font> <b>shaving_yaks{</b>yaks=3<b>}</b>:<b>shave{</b>yak=3<b>}</b>: fmt::yak_shave: could not locate yak
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#3465A4">DEBUG</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: yak_events: yak=3 shaved=false
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#CC0000">ERROR</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: fmt::yak_shave: failed to shave yak yak=3 error=missing yak
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#75507B">TRACE</font> <b>shaving_yaks{</b>yaks=3<b>}</b>: fmt::yak_shave: yaks_shaved=2
-//! <font color="#AAAAAA">Oct 24 12:55:47.815 </font><font color="#4E9A06"> INFO</font> fmt: yak shaving completed all_yaks_shaved=false
-//! </pre>
+//! * [`format::Compact`]: A variant of the default formatter, optimized for
+//! short line lengths. Fields from the current span context are appended to
+//! the fields of the formatted event. See
+//! [here](format::Compact#example-output) for sample output.
//!
//! * [`format::Pretty`]: Emits excessively pretty, multi-line logs, optimized
//! for human readability. This is primarily intended to be used in local
//! development and debugging, or for command-line applications, where
//! automated analysis and compact storage of logs is less of a priority than
-//! readability and visual appeal.
-//!
-//! For example:
-//! <pre><font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 1.61s
-//! <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt-pretty`
-//! Oct 24 12:57:29.386 <font color="#4E9A06"><b>fmt_pretty</b></font><font color="#4E9A06">: preparing to shave yaks, </font><font color="#4E9A06"><b>number_of_yaks</b></font><font color="#4E9A06">: 3</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt-pretty.rs:16<font color="#AAAAAA"><i> on</i></font> main
-//!
-//! Oct 24 12:57:29.386 <font color="#4E9A06"><b>fmt_pretty::yak_shave</b></font><font color="#4E9A06">: shaving yaks</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:38<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:14<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 1
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: yak shaved successfully</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:22<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 1
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 1, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: true</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:43<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 1</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:52<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:14<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 2
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: yak shaved successfully</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:22<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 2
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 2, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: true</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:43<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 2</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:52<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: hello! I&apos;m gonna shave a yak, </font><font color="#75507B"><b>excitement</b></font><font color="#75507B">: &quot;yay!&quot;</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:14<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 3
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#C4A000"><b>fmt_pretty::yak_shave</b></font><font color="#C4A000">: could not locate yak</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:16<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shave</b> <font color="#AAAAAA"><i>with</i></font> <b>yak</b>: 3
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#3465A4"><b>yak_events</b></font><font color="#3465A4">: </font><font color="#3465A4"><b>yak</b></font><font color="#3465A4">: 3, </font><font color="#3465A4"><b>shaved</b></font><font color="#3465A4">: false</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:43<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#CC0000"><b>fmt_pretty::yak_shave</b></font><font color="#CC0000">: failed to shave yak, </font><font color="#CC0000"><b>yak</b></font><font color="#CC0000">: 3, </font><font color="#CC0000"><b>error</b></font><font color="#CC0000">: missing yak</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:48<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#75507B"><b>fmt_pretty::yak_shave</b></font><font color="#75507B">: </font><font color="#75507B"><b>yaks_shaved</b></font><font color="#75507B">: 2</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt/yak_shave.rs:52<font color="#AAAAAA"><i> on</i></font> main
-//! <font color="#AAAAAA"><i>in</i></font> fmt_pretty::yak_shave::<b>shaving_yaks</b> <font color="#AAAAAA"><i>with</i></font> <b>yaks</b>: 3
-//!
-//! Oct 24 12:57:29.387 <font color="#4E9A06"><b>fmt_pretty</b></font><font color="#4E9A06">: yak shaving completed, </font><font color="#4E9A06"><b>all_yaks_shaved</b></font><font color="#4E9A06">: false</font>
-//! <font color="#AAAAAA"><i>at</i></font> examples/examples/fmt-pretty.rs:19<font color="#AAAAAA"><i> on</i></font> main
-//! </pre>
+//! readability and visual appeal. See [here](format::Pretty#example-output)
+//! for sample output.
//!
//! * [`format::Json`]: Outputs newline-delimited JSON logs. This is intended
//! for production use with systems where structured logs are consumed as JSON
-//! by analysis and viewing tools. The JSON output, as seen below, is *not*
-//! optimized for human readability.
-//!
-//! For example:
-//! <pre><font color="#4E9A06"><b> Finished</b></font> dev [unoptimized + debuginfo] target(s) in 1.58s
-//! <font color="#4E9A06"><b> Running</b></font> `target/debug/examples/fmt-json`
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.873&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;preparing to shave yaks&quot;,&quot;number_of_yaks&quot;:3},&quot;target&quot;:&quot;fmt_json&quot;}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;shaving yaks&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;1&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaved successfully&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;1&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:1,&quot;shaved&quot;:true},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:1},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;2&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaved successfully&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;2&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:2,&quot;shaved&quot;:true},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:2},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.874&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;hello! I&apos;m gonna shave a yak&quot;,&quot;excitement&quot;:&quot;yay!&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;3&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.875&quot;,&quot;level&quot;:&quot;WARN&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;could not locate yak&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;},{&quot;yak&quot;:&quot;3&quot;,&quot;name&quot;:&quot;shave&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.875&quot;,&quot;level&quot;:&quot;DEBUG&quot;,&quot;fields&quot;:{&quot;yak&quot;:3,&quot;shaved&quot;:false},&quot;target&quot;:&quot;yak_events&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.875&quot;,&quot;level&quot;:&quot;ERROR&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;failed to shave yak&quot;,&quot;yak&quot;:3,&quot;error&quot;:&quot;missing yak&quot;},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.875&quot;,&quot;level&quot;:&quot;TRACE&quot;,&quot;fields&quot;:{&quot;yaks_shaved&quot;:2},&quot;target&quot;:&quot;fmt_json::yak_shave&quot;,&quot;spans&quot;:[{&quot;yaks&quot;:3,&quot;name&quot;:&quot;shaving_yaks&quot;}]}
-//! {&quot;timestamp&quot;:&quot;Oct 24 13:00:00.875&quot;,&quot;level&quot;:&quot;INFO&quot;,&quot;fields&quot;:{&quot;message&quot;:&quot;yak shaving completed&quot;,&quot;all_yaks_shaved&quot;:false},&quot;target&quot;:&quot;fmt_json&quot;}
-//! </pre>
+//! by analysis and viewing tools. The JSON output is not optimized for human
+//! readability. See [here](format::Json#example-output) for sample output.
//!
//! ### Customizing Formatters
//!
@@ -221,7 +116,7 @@
//! .with_thread_names(true) // include the name of the current thread
//! .compact(); // use the `Compact` formatting style.
//!
-//! // Create a `fmt` collector that uses our custom event format, and set it
+//! // Create a `fmt` subscriber that uses our custom event format, and set it
//! // as the default.
//! tracing_subscriber::fmt()
//! .event_format(format)
@@ -268,7 +163,7 @@
//!
//! ### Composing Layers
//!
-//! Composing an [`EnvFilter`] `Layer` and a [format `Layer`](../fmt/struct.Layer.html):
+//! Composing an [`EnvFilter`] `Layer` and a [format `Layer`][super::fmt::Layer]:
//!
//! ```rust
//! use tracing_subscriber::{fmt, EnvFilter};
@@ -286,11 +181,10 @@
//! .init();
//! ```
//!
-//! [`EnvFilter`]: ../filter/struct.EnvFilter.html
+//! [`EnvFilter`]: super::filter::EnvFilter
//! [`env_logger`]: https://docs.rs/env_logger/
-//! [`filter`]: ../filter/index.html
-//! [`SubscriberBuilder`]: ./struct.SubscriberBuilder.html
-//! [`FmtSubscriber`]: ./struct.Subscriber.html
+//! [`filter`]: super::filter
+//! [`FmtSubscriber`]: Subscriber
//! [`Subscriber`]:
//! https://docs.rs/tracing/latest/tracing/trait.Subscriber.html
//! [`tracing`]: https://crates.io/crates/tracing
@@ -309,6 +203,7 @@ pub mod writer;
pub use fmt_layer::{FmtContext, FormattedFields, Layer};
use crate::layer::Layer as _;
+use crate::util::SubscriberInitExt;
use crate::{
filter::LevelFilter,
layer,
@@ -348,6 +243,7 @@ pub type Formatter<
/// Configures and constructs `Subscriber`s.
#[cfg_attr(docsrs, doc(cfg(all(feature = "fmt", feature = "std"))))]
#[derive(Debug)]
+#[must_use]
pub struct SubscriberBuilder<
N = format::DefaultFields,
E = format::Format<format::Full>,
@@ -417,7 +313,7 @@ pub struct SubscriberBuilder<
/// ```
///
/// [formatting subscriber]: Subscriber
-/// [`SubscriberBuilder::default()`]: SubscriberBuilder::default()
+/// [`SubscriberBuilder::default()`]: SubscriberBuilder::default
/// [`init`]: SubscriberBuilder::init()
/// [`try_init`]: SubscriberBuilder::try_init()
/// [`finish`]: SubscriberBuilder::finish()
@@ -429,10 +325,11 @@ pub fn fmt() -> SubscriberBuilder {
/// Returns a new [formatting layer] that can be [composed] with other layers to
/// construct a [`Subscriber`].
///
-/// This is a shorthand for the equivalent [`Layer::default`] function.
+/// This is a shorthand for the equivalent [`Layer::default()`] function.
///
/// [formatting layer]: Layer
/// [composed]: crate::layer
+/// [`Layer::default()`]: Layer::default
#[cfg_attr(docsrs, doc(cfg(all(feature = "fmt", feature = "std"))))]
pub fn layer<S>() -> Layer<S> {
Layer::default()
@@ -444,8 +341,8 @@ impl Subscriber {
///
/// This can be overridden with the [`SubscriberBuilder::with_max_level`] method.
///
- /// [verbosity level]: https://docs.rs/tracing-core/0.1.5/tracing_core/struct.Level.html
- /// [`SubscriberBuilder::with_max_level`]: struct.SubscriberBuilder.html#method.with_max_level
+ /// [verbosity level]: tracing_core::Level
+ /// [`SubscriberBuilder::with_max_level`]: SubscriberBuilder::with_max_level
pub const DEFAULT_MAX_LEVEL: LevelFilter = LevelFilter::INFO;
/// Returns a new `SubscriberBuilder` for configuring a format subscriber.
@@ -502,6 +399,11 @@ where
}
#[inline]
+ fn event_enabled(&self, event: &Event<'_>) -> bool {
+ self.inner.event_enabled(event)
+ }
+
+ #[inline]
fn event(&self, event: &Event<'_>) {
self.inner.event(event);
}
@@ -565,6 +467,7 @@ impl Default for SubscriberBuilder {
filter: Subscriber::DEFAULT_MAX_LEVEL,
inner: Default::default(),
}
+ .log_internal_errors(true)
}
}
@@ -701,7 +604,7 @@ where
/// `Layer`s added to this subscriber.
///
/// [lifecycle]: https://docs.rs/tracing/latest/tracing/span/index.html#the-span-lifecycle
- /// [time]: #method.without_time
+ /// [time]: SubscriberBuilder::without_time()
pub fn with_span_events(self, kind: format::FmtSpan) -> Self {
SubscriberBuilder {
inner: self.inner.with_span_events(kind),
@@ -719,6 +622,27 @@ where
}
}
+ /// Sets whether to write errors from [`FormatEvent`] to the writer.
+ /// Defaults to true.
+ ///
+ /// By default, `fmt::Layer` will write any `FormatEvent`-internal errors to
+ /// the writer. These errors are unlikely and will only occur if there is a
+ /// bug in the `FormatEvent` implementation or its dependencies.
+ ///
+ /// If writing to the writer fails, the error message is printed to stderr
+ /// as a fallback.
+ ///
+ /// [`FormatEvent`]: crate::fmt::FormatEvent
+ pub fn log_internal_errors(
+ self,
+ log_internal_errors: bool,
+ ) -> SubscriberBuilder<N, format::Format<L, T>, F, W> {
+ SubscriberBuilder {
+ inner: self.inner.log_internal_errors(log_internal_errors),
+ ..self
+ }
+ }
+
/// Sets whether or not an event's target is displayed.
pub fn with_target(
self,
@@ -730,6 +654,34 @@ where
}
}
+ /// Sets whether or not an event's [source code file path][file] is
+ /// displayed.
+ ///
+ /// [file]: tracing_core::Metadata::file
+ pub fn with_file(
+ self,
+ display_filename: bool,
+ ) -> SubscriberBuilder<N, format::Format<L, T>, F, W> {
+ SubscriberBuilder {
+ inner: self.inner.with_file(display_filename),
+ ..self
+ }
+ }
+
+ /// Sets whether or not an event's [source code line number][line] is
+ /// displayed.
+ ///
+ /// [line]: tracing_core::Metadata::line
+ pub fn with_line_number(
+ self,
+ display_line_number: bool,
+ ) -> SubscriberBuilder<N, format::Format<L, T>, F, W> {
+ SubscriberBuilder {
+ inner: self.inner.with_line_number(display_line_number),
+ ..self
+ }
+ }
+
/// Sets whether or not an event's level is displayed.
pub fn with_level(
self,
@@ -742,9 +694,9 @@ where
}
/// Sets whether or not the [name] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [name]: https://doc.rust-lang.org/stable/std/thread/index.html#naming-threads
+ /// [name]: std::thread#naming-threads
pub fn with_thread_names(
self,
display_thread_names: bool,
@@ -756,9 +708,9 @@ where
}
/// Sets whether or not the [thread ID] of the current thread is displayed
- /// when formatting events
+ /// when formatting events.
///
- /// [thread ID]: https://doc.rust-lang.org/stable/std/thread/struct.ThreadId.html
+ /// [thread ID]: std::thread::ThreadId
pub fn with_thread_ids(
self,
display_thread_ids: bool,
@@ -796,7 +748,7 @@ where
/// Sets the subscriber being built to use a JSON formatter.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::fmt::format::Json]
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn json(
@@ -817,7 +769,7 @@ where
impl<T, F, W> SubscriberBuilder<format::JsonFields, format::Format<format::Json, T>, F, W> {
/// Sets the json subscriber being built to flatten event metadata.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::fmt::format::Json]
pub fn flatten_event(
self,
flatten_event: bool,
@@ -831,7 +783,7 @@ impl<T, F, W> SubscriberBuilder<format::JsonFields, format::Format<format::Json,
/// Sets whether or not the JSON subscriber being built will include the current span
/// in formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::fmt::format::Json]
pub fn with_current_span(
self,
display_current_span: bool,
@@ -845,7 +797,7 @@ impl<T, F, W> SubscriberBuilder<format::JsonFields, format::Format<format::Json,
/// Sets whether or not the JSON subscriber being built will include a list (from
/// root to leaf) of all currently entered spans in formatted events.
///
- /// See [`format::Json`](../fmt/format/struct.Json.html)
+ /// See [`format::Json`][super::fmt::format::Json]
pub fn with_span_list(
self,
display_span_list: bool,
@@ -891,7 +843,7 @@ where
}
impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
- /// Sets the Visitor that the subscriber being built will use to record
+ /// Sets the field formatter that the subscriber being built will use to record
/// fields.
///
/// For example:
@@ -967,8 +919,8 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
/// .try_init()?;
/// # Ok(())}
/// ```
- /// [`EnvFilter`]: ../filter/struct.EnvFilter.html
- /// [`with_max_level`]: #method.with_max_level
+ /// [`EnvFilter`]: super::filter::EnvFilter
+ /// [`with_max_level`]: SubscriberBuilder::with_max_level()
#[cfg(feature = "env-filter")]
#[cfg_attr(docsrs, doc(cfg(feature = "env-filter")))]
pub fn with_env_filter(
@@ -989,7 +941,7 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
/// subscriber.
///
/// If the max level has already been set, or a [`EnvFilter`] was added by
- /// [`with_filter`], this replaces that configuration with the new
+ /// [`with_env_filter`], this replaces that configuration with the new
/// maximum level.
///
/// # Examples
@@ -1011,9 +963,9 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
/// .with_max_level(LevelFilter::OFF)
/// .finish();
/// ```
- /// [verbosity level]: https://docs.rs/tracing-core/0.1.5/tracing_core/struct.Level.html
- /// [`EnvFilter`]: ../filter/struct.EnvFilter.html
- /// [`with_filter`]: #method.with_filter
+ /// [verbosity level]: tracing_core::Level
+ /// [`EnvFilter`]: struct@crate::filter::EnvFilter
+ /// [`with_env_filter`]: fn@Self::with_env_filter
pub fn with_max_level(
self,
filter: impl Into<LevelFilter>,
@@ -1025,8 +977,26 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
}
}
- /// Sets the function that the subscriber being built should use to format
- /// events that occur.
+ /// Sets the [event formatter][`FormatEvent`] that the subscriber being built
+ /// will use to format events that occur.
+ ///
+ /// The event formatter may be any type implementing the [`FormatEvent`]
+ /// trait, which is implemented for all functions taking a [`FmtContext`], a
+ /// [`Writer`], and an [`Event`].
+ ///
+ /// # Examples
+ ///
+ /// Setting a type implementing [`FormatEvent`] as the formatter:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::fmt::format;
+ ///
+ /// let subscriber = tracing_subscriber::fmt()
+ /// .event_format(format().compact())
+ /// .finish();
+ /// ```
+ ///
+ /// [`Writer`]: struct@self::format::Writer
pub fn event_format<E2>(self, fmt_event: E2) -> SubscriberBuilder<N, E2, F, W>
where
E2: FormatEvent<Registry, N> + 'static,
@@ -1053,8 +1023,6 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
/// .with_writer(io::stderr)
/// .init();
/// ```
- ///
- /// [`MakeWriter`]: trait.MakeWriter.html
pub fn with_writer<W2>(self, make_writer: W2) -> SubscriberBuilder<N, E, F, W2>
where
W2: for<'writer> MakeWriter<'writer> + 'static,
@@ -1088,13 +1056,89 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
///
/// [capturing]:
/// https://doc.rust-lang.org/book/ch11-02-running-tests.html#showing-function-output
- /// [`TestWriter`]: writer/struct.TestWriter.html
+ /// [`TestWriter`]: writer::TestWriter
pub fn with_test_writer(self) -> SubscriberBuilder<N, E, F, TestWriter> {
SubscriberBuilder {
filter: self.filter,
inner: self.inner.with_writer(TestWriter::default()),
}
}
+
+ /// Updates the event formatter by applying a function to the existing event formatter.
+ ///
+ /// This sets the event formatter that the subscriber being built will use to record fields.
+ ///
+ /// # Examples
+ ///
+ /// Updating an event formatter:
+ ///
+ /// ```rust
+ /// let subscriber = tracing_subscriber::fmt()
+ /// .map_event_format(|e| e.compact())
+ /// .finish();
+ /// ```
+ pub fn map_event_format<E2>(self, f: impl FnOnce(E) -> E2) -> SubscriberBuilder<N, E2, F, W>
+ where
+ E2: FormatEvent<Registry, N> + 'static,
+ N: for<'writer> FormatFields<'writer> + 'static,
+ W: for<'writer> MakeWriter<'writer> + 'static,
+ {
+ SubscriberBuilder {
+ filter: self.filter,
+ inner: self.inner.map_event_format(f),
+ }
+ }
+
+ /// Updates the field formatter by applying a function to the existing field formatter.
+ ///
+ /// This sets the field formatter that the subscriber being built will use to record fields.
+ ///
+ /// # Examples
+ ///
+ /// Updating a field formatter:
+ ///
+ /// ```rust
+ /// use tracing_subscriber::field::MakeExt;
+ /// let subscriber = tracing_subscriber::fmt()
+ /// .map_fmt_fields(|f| f.debug_alt())
+ /// .finish();
+ /// ```
+ pub fn map_fmt_fields<N2>(self, f: impl FnOnce(N) -> N2) -> SubscriberBuilder<N2, E, F, W>
+ where
+ N2: for<'writer> FormatFields<'writer> + 'static,
+ {
+ SubscriberBuilder {
+ filter: self.filter,
+ inner: self.inner.map_fmt_fields(f),
+ }
+ }
+
+ /// Updates the [`MakeWriter`] by applying a function to the existing [`MakeWriter`].
+ ///
+ /// This sets the [`MakeWriter`] that the subscriber being built will use to write events.
+ ///
+ /// # Examples
+ ///
+ /// Redirect output to stderr if level is <= WARN:
+ ///
+ /// ```rust
+ /// use tracing::Level;
+ /// use tracing_subscriber::fmt::{self, writer::MakeWriterExt};
+ ///
+ /// let stderr = std::io::stderr.with_max_level(Level::WARN);
+ /// let layer = tracing_subscriber::fmt()
+ /// .map_writer(move |w| stderr.or_else(w))
+ /// .finish();
+ /// ```
+ pub fn map_writer<W2>(self, f: impl FnOnce(W) -> W2) -> SubscriberBuilder<N, E, F, W2>
+ where
+ W2: for<'writer> MakeWriter<'writer> + 'static,
+ {
+ SubscriberBuilder {
+ filter: self.filter,
+ inner: self.inner.map_writer(f),
+ }
+ }
}
/// Install a global tracing subscriber that listens for events and
@@ -1121,35 +1165,69 @@ impl<N, E, F, W> SubscriberBuilder<N, E, F, W> {
///
/// [`LogTracer`]:
/// https://docs.rs/tracing-log/0.1.0/tracing_log/struct.LogTracer.html
-/// [`RUST_LOG` environment variable]:
-/// ../filter/struct.EnvFilter.html#associatedconstant.DEFAULT_ENV
+/// [`RUST_LOG` environment variable]: crate::filter::EnvFilter::DEFAULT_ENV
pub fn try_init() -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
let builder = Subscriber::builder();
#[cfg(feature = "env-filter")]
let builder = builder.with_env_filter(crate::EnvFilter::from_default_env());
- builder.try_init()
+ // If `env-filter` is disabled, remove the default max level filter from the
+ // subscriber; it will be added to the `Targets` filter instead if no filter
+ // is set in `RUST_LOG`.
+ // Replacing the default `LevelFilter` with an `EnvFilter` would imply this,
+ // but we can't replace the builder's filter with a `Targets` filter yet.
+ #[cfg(not(feature = "env-filter"))]
+ let builder = builder.with_max_level(LevelFilter::TRACE);
+
+ let subscriber = builder.finish();
+ #[cfg(not(feature = "env-filter"))]
+ let subscriber = {
+ use crate::{filter::Targets, layer::SubscriberExt};
+ use std::{env, str::FromStr};
+ let targets = match env::var("RUST_LOG") {
+ Ok(var) => Targets::from_str(&var)
+ .map_err(|e| {
+ eprintln!("Ignoring `RUST_LOG={:?}`: {}", var, e);
+ })
+ .unwrap_or_default(),
+ Err(env::VarError::NotPresent) => {
+ Targets::new().with_default(Subscriber::DEFAULT_MAX_LEVEL)
+ }
+ Err(e) => {
+ eprintln!("Ignoring `RUST_LOG`: {}", e);
+ Targets::new().with_default(Subscriber::DEFAULT_MAX_LEVEL)
+ }
+ };
+ subscriber.with(targets)
+ };
+
+ subscriber.try_init().map_err(Into::into)
}
/// Install a global tracing subscriber that listens for events and
/// filters based on the value of the [`RUST_LOG` environment variable].
///
+/// The configuration of the subscriber initialized by this function
+/// depends on what [feature flags](crate#feature-flags) are enabled.
+///
/// If the `tracing-log` feature is enabled, this will also install
/// the LogTracer to convert `Log` records into `tracing` `Event`s.
///
-/// This is shorthand for
+/// If the `env-filter` feature is enabled, this is shorthand for
///
/// ```rust
-/// tracing_subscriber::fmt().init()
+/// # use tracing_subscriber::EnvFilter;
+/// tracing_subscriber::fmt()
+/// .with_env_filter(EnvFilter::from_default_env())
+/// .init();
/// ```
///
/// # Panics
/// Panics if the initialization was unsuccessful, likely because a
/// global subscriber was already installed by another call to `try_init`.
///
-/// [`RUST_LOG` environment variable]:
-/// ../filter/struct.EnvFilter.html#associatedconstant.DEFAULT_ENV
+/// [`RUST_LOG` environment variable]: crate::filter::EnvFilter::DEFAULT_ENV
pub fn init() {
try_init().expect("Unable to install global subscriber")
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/time/datetime.rs b/vendor/tracing-subscriber/src/fmt/time/datetime.rs
index 531331687..531331687 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/time/datetime.rs
+++ b/vendor/tracing-subscriber/src/fmt/time/datetime.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/time/mod.rs b/vendor/tracing-subscriber/src/fmt/time/mod.rs
index 621df16e4..e5b7c83b0 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/time/mod.rs
+++ b/vendor/tracing-subscriber/src/fmt/time/mod.rs
@@ -12,9 +12,13 @@ mod time_crate;
pub use time_crate::UtcTime;
#[cfg(feature = "local-time")]
-#[cfg_attr(docsrs, doc(cfg(feature = "local-time")))]
+#[cfg_attr(docsrs, doc(cfg(unsound_local_offset, feature = "local-time")))]
pub use time_crate::LocalTime;
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+pub use time_crate::OffsetTime;
+
/// A type that can measure and format the current time.
///
/// This trait is used by `Format` to include a timestamp with each `Event` when it is logged.
@@ -26,7 +30,7 @@ pub use time_crate::LocalTime;
///
/// The full list of provided implementations can be found in [`time`].
///
-/// [`time`]: ./index.html
+/// [`time`]: self
pub trait FormatTime {
/// Measure and write out the current time.
///
diff --git a/vendor/tracing-subscriber/src/fmt/time/time_crate.rs b/vendor/tracing-subscriber/src/fmt/time/time_crate.rs
new file mode 100644
index 000000000..60d57fd0b
--- /dev/null
+++ b/vendor/tracing-subscriber/src/fmt/time/time_crate.rs
@@ -0,0 +1,470 @@
+use crate::fmt::{format::Writer, time::FormatTime, writer::WriteAdaptor};
+use std::fmt;
+use time::{format_description::well_known, formatting::Formattable, OffsetDateTime, UtcOffset};
+
+/// Formats the current [local time] using a [formatter] from the [`time` crate].
+///
+/// To format the current [UTC time] instead, use the [`UtcTime`] type.
+///
+/// <div class="example-wrap" style="display:inline-block">
+/// <pre class="compile_fail" style="white-space:normal;font:inherit;">
+/// <strong>Warning</strong>: The <a href = "https://docs.rs/time/0.3/time/"><code>time</code>
+/// crate</a> must be compiled with <code>--cfg unsound_local_offset</code> in order to use
+/// local timestamps. When this cfg is not enabled, local timestamps cannot be recorded, and
+/// events will be logged without timestamps.
+///
+/// Alternatively, [`OffsetTime`] can log with a local offset if it is initialized early.
+///
+/// See the <a href="https://docs.rs/time/0.3.4/time/#feature-flags"><code>time</code>
+/// documentation</a> for more details.
+/// </pre></div>
+///
+/// [local time]: time::OffsetDateTime::now_local
+/// [UTC time]: time::OffsetDateTime::now_utc
+/// [formatter]: time::formatting::Formattable
+/// [`time` crate]: time
+#[derive(Clone, Debug)]
+#[cfg_attr(
+ docsrs,
+ doc(cfg(all(unsound_local_offset, feature = "time", feature = "local-time")))
+)]
+#[cfg(feature = "local-time")]
+pub struct LocalTime<F> {
+ format: F,
+}
+
+/// Formats the current [UTC time] using a [formatter] from the [`time` crate].
+///
+/// To format the current [local time] instead, use the [`LocalTime`] type.
+///
+/// [local time]: time::OffsetDateTime::now_local
+/// [UTC time]: time::OffsetDateTime::now_utc
+/// [formatter]: time::formatting::Formattable
+/// [`time` crate]: time
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+#[derive(Clone, Debug)]
+pub struct UtcTime<F> {
+ format: F,
+}
+
+/// Formats the current time using a fixed offset and a [formatter] from the [`time` crate].
+///
+/// This is typically used as an alternative to [`LocalTime`]. `LocalTime` determines the offset
+/// every time it formats a message, which may be unsound or fail. With `OffsetTime`, the offset is
+/// determined once. This makes it possible to do so while the program is still single-threaded and
+/// handle any errors. However, this also means the offset cannot change while the program is
+/// running (the offset will not change across DST changes).
+///
+/// [formatter]: time::formatting::Formattable
+/// [`time` crate]: time
+#[derive(Clone, Debug)]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+pub struct OffsetTime<F> {
+ offset: time::UtcOffset,
+ format: F,
+}
+
+// === impl LocalTime ===
+
+#[cfg(feature = "local-time")]
+impl LocalTime<well_known::Rfc3339> {
+ /// Returns a formatter that formats the current [local time] in the
+ /// [RFC 3339] format (a subset of the [ISO 8601] timestamp format).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time};
+ ///
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(time::LocalTime::rfc_3339());
+ /// # drop(collector);
+ /// ```
+ ///
+ /// [local time]: time::OffsetDateTime::now_local
+ /// [RFC 3339]: https://datatracker.ietf.org/doc/html/rfc3339
+ /// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
+ pub fn rfc_3339() -> Self {
+ Self::new(well_known::Rfc3339)
+ }
+}
+
+#[cfg(feature = "local-time")]
+impl<F: Formattable> LocalTime<F> {
+ /// Returns a formatter that formats the current [local time] using the
+ /// [`time` crate] with the provided provided format. The format may be any
+ /// type that implements the [`Formattable`] trait.
+ ///
+ ///
+ /// <div class="example-wrap" style="display:inline-block">
+ /// <pre class="compile_fail" style="white-space:normal;font:inherit;">
+ /// <strong>Warning</strong>: The <a href = "https://docs.rs/time/0.3/time/">
+ /// <code>time</code> crate</a> must be compiled with <code>--cfg
+ /// unsound_local_offset</code> in order to use local timestamps. When this
+ /// cfg is not enabled, local timestamps cannot be recorded, and
+ /// events will be logged without timestamps.
+ ///
+ /// See the <a href="https://docs.rs/time/0.3.4/time/#feature-flags">
+ /// <code>time</code> documentation</a> for more details.
+ /// </pre></div>
+ ///
+ /// Typically, the format will be a format description string, or one of the
+ /// `time` crate's [well-known formats].
+ ///
+ /// If the format description is statically known, then the
+ /// [`format_description!`] macro should be used. This is identical to the
+ /// [`time::format_description::parse`] method, but runs at compile-time,
+ /// throwing an error if the format description is invalid. If the desired format
+ /// is not known statically (e.g., a user is providing a format string), then the
+ /// [`time::format_description::parse`] method should be used. Note that this
+ /// method is fallible.
+ ///
+ /// See the [`time` book] for details on the format description syntax.
+ ///
+ /// # Examples
+ ///
+ /// Using the [`format_description!`] macro:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::LocalTime};
+ /// use time::macros::format_description;
+ ///
+ /// let timer = LocalTime::new(format_description!("[hour]:[minute]:[second]"));
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using [`time::format_description::parse`]:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::LocalTime};
+ ///
+ /// let time_format = time::format_description::parse("[hour]:[minute]:[second]")
+ /// .expect("format string should be valid!");
+ /// let timer = LocalTime::new(time_format);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using the [`format_description!`] macro requires enabling the `time`
+ /// crate's "macros" feature flag.
+ ///
+ /// Using a [well-known format][well-known formats] (this is equivalent to
+ /// [`LocalTime::rfc_3339`]):
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::LocalTime};
+ ///
+ /// let timer = LocalTime::new(time::format_description::well_known::Rfc3339);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// [local time]: time::OffsetDateTime::now_local()
+ /// [`time` crate]: time
+ /// [`Formattable`]: time::formatting::Formattable
+ /// [well-known formats]: time::format_description::well_known
+ /// [`format_description!`]: time::macros::format_description!
+ /// [`time::format_description::parse`]: time::format_description::parse()
+ /// [`time` book]: https://time-rs.github.io/book/api/format-description.html
+ pub fn new(format: F) -> Self {
+ Self { format }
+ }
+}
+
+#[cfg(feature = "local-time")]
+impl<F> FormatTime for LocalTime<F>
+where
+ F: Formattable,
+{
+ fn format_time(&self, w: &mut Writer<'_>) -> fmt::Result {
+ let now = OffsetDateTime::now_local().map_err(|_| fmt::Error)?;
+ format_datetime(now, w, &self.format)
+ }
+}
+
+#[cfg(feature = "local-time")]
+impl<F> Default for LocalTime<F>
+where
+ F: Formattable + Default,
+{
+ fn default() -> Self {
+ Self::new(F::default())
+ }
+}
+
+// === impl UtcTime ===
+
+impl UtcTime<well_known::Rfc3339> {
+ /// Returns a formatter that formats the current [UTC time] in the
+ /// [RFC 3339] format, which is a subset of the [ISO 8601] timestamp format.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time};
+ ///
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(time::UtcTime::rfc_3339());
+ /// # drop(collector);
+ /// ```
+ ///
+ /// [local time]: time::OffsetDateTime::now_utc
+ /// [RFC 3339]: https://datatracker.ietf.org/doc/html/rfc3339
+ /// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
+ pub fn rfc_3339() -> Self {
+ Self::new(well_known::Rfc3339)
+ }
+}
+
+impl<F: Formattable> UtcTime<F> {
+ /// Returns a formatter that formats the current [UTC time] using the
+ /// [`time` crate], with the provided provided format. The format may be any
+ /// type that implements the [`Formattable`] trait.
+ ///
+ /// Typically, the format will be a format description string, or one of the
+ /// `time` crate's [well-known formats].
+ ///
+ /// If the format description is statically known, then the
+ /// [`format_description!`] macro should be used. This is identical to the
+ /// [`time::format_description::parse`] method, but runs at compile-time,
+ /// failing an error if the format description is invalid. If the desired format
+ /// is not known statically (e.g., a user is providing a format string), then the
+ /// [`time::format_description::parse`] method should be used. Note that this
+ /// method is fallible.
+ ///
+ /// See the [`time` book] for details on the format description syntax.
+ ///
+ /// # Examples
+ ///
+ /// Using the [`format_description!`] macro:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::UtcTime};
+ /// use time::macros::format_description;
+ ///
+ /// let timer = UtcTime::new(format_description!("[hour]:[minute]:[second]"));
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using the [`format_description!`] macro requires enabling the `time`
+ /// crate's "macros" feature flag.
+ ///
+ /// Using [`time::format_description::parse`]:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::UtcTime};
+ ///
+ /// let time_format = time::format_description::parse("[hour]:[minute]:[second]")
+ /// .expect("format string should be valid!");
+ /// let timer = UtcTime::new(time_format);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using a [well-known format][well-known formats] (this is equivalent to
+ /// [`UtcTime::rfc_3339`]):
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::UtcTime};
+ ///
+ /// let timer = UtcTime::new(time::format_description::well_known::Rfc3339);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// [UTC time]: time::OffsetDateTime::now_utc()
+ /// [`time` crate]: time
+ /// [`Formattable`]: time::formatting::Formattable
+ /// [well-known formats]: time::format_description::well_known
+ /// [`format_description!`]: time::macros::format_description!
+ /// [`time::format_description::parse`]: time::format_description::parse
+ /// [`time` book]: https://time-rs.github.io/book/api/format-description.html
+ pub fn new(format: F) -> Self {
+ Self { format }
+ }
+}
+
+impl<F> FormatTime for UtcTime<F>
+where
+ F: Formattable,
+{
+ fn format_time(&self, w: &mut Writer<'_>) -> fmt::Result {
+ format_datetime(OffsetDateTime::now_utc(), w, &self.format)
+ }
+}
+
+impl<F> Default for UtcTime<F>
+where
+ F: Formattable + Default,
+{
+ fn default() -> Self {
+ Self::new(F::default())
+ }
+}
+
+// === impl OffsetTime ===
+
+#[cfg(feature = "local-time")]
+impl OffsetTime<well_known::Rfc3339> {
+ /// Returns a formatter that formats the current time using the [local time offset] in the [RFC
+ /// 3339] format (a subset of the [ISO 8601] timestamp format).
+ ///
+ /// Returns an error if the local time offset cannot be determined. This typically occurs in
+ /// multithreaded programs. To avoid this problem, initialize `OffsetTime` before forking
+ /// threads. When using Tokio, this means initializing `OffsetTime` before the Tokio runtime.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time};
+ ///
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(time::OffsetTime::local_rfc_3339().expect("could not get local offset!"));
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using `OffsetTime` with Tokio:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::time::OffsetTime;
+ ///
+ /// #[tokio::main]
+ /// async fn run() {
+ /// tracing::info!("runtime initialized");
+ ///
+ /// // At this point the Tokio runtime is initialized, and we can use both Tokio and Tracing
+ /// // normally.
+ /// }
+ ///
+ /// fn main() {
+ /// // Because we need to get the local offset before Tokio spawns any threads, our `main`
+ /// // function cannot use `tokio::main`.
+ /// tracing_subscriber::fmt()
+ /// .with_timer(OffsetTime::local_rfc_3339().expect("could not get local time offset"))
+ /// .init();
+ ///
+ /// // Even though `run` is written as an `async fn`, because we used `tokio::main` on it
+ /// // we can call it as a synchronous function.
+ /// run();
+ /// }
+ /// ```
+ ///
+ /// [local time offset]: time::UtcOffset::current_local_offset
+ /// [RFC 3339]: https://datatracker.ietf.org/doc/html/rfc3339
+ /// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
+ pub fn local_rfc_3339() -> Result<Self, time::error::IndeterminateOffset> {
+ Ok(Self::new(
+ UtcOffset::current_local_offset()?,
+ well_known::Rfc3339,
+ ))
+ }
+}
+
+impl<F: time::formatting::Formattable> OffsetTime<F> {
+ /// Returns a formatter that formats the current time using the [`time` crate] with the provided
+ /// provided format and [timezone offset]. The format may be any type that implements the
+ /// [`Formattable`] trait.
+ ///
+ ///
+ /// Typically, the offset will be the [local offset], and format will be a format description
+ /// string, or one of the `time` crate's [well-known formats].
+ ///
+ /// If the format description is statically known, then the
+ /// [`format_description!`] macro should be used. This is identical to the
+ /// [`time::format_description::parse`] method, but runs at compile-time,
+ /// throwing an error if the format description is invalid. If the desired format
+ /// is not known statically (e.g., a user is providing a format string), then the
+ /// [`time::format_description::parse`] method should be used. Note that this
+ /// method is fallible.
+ ///
+ /// See the [`time` book] for details on the format description syntax.
+ ///
+ /// # Examples
+ ///
+ /// Using the [`format_description!`] macro:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::OffsetTime};
+ /// use time::macros::format_description;
+ /// use time::UtcOffset;
+ ///
+ /// let offset = UtcOffset::current_local_offset().expect("should get local offset!");
+ /// let timer = OffsetTime::new(offset, format_description!("[hour]:[minute]:[second]"));
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using [`time::format_description::parse`]:
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::OffsetTime};
+ /// use time::UtcOffset;
+ ///
+ /// let offset = UtcOffset::current_local_offset().expect("should get local offset!");
+ /// let time_format = time::format_description::parse("[hour]:[minute]:[second]")
+ /// .expect("format string should be valid!");
+ /// let timer = OffsetTime::new(offset, time_format);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// Using the [`format_description!`] macro requires enabling the `time`
+ /// crate's "macros" feature flag.
+ ///
+ /// Using a [well-known format][well-known formats] (this is equivalent to
+ /// [`OffsetTime::local_rfc_3339`]):
+ ///
+ /// ```
+ /// use tracing_subscriber::fmt::{self, time::OffsetTime};
+ /// use time::UtcOffset;
+ ///
+ /// let offset = UtcOffset::current_local_offset().expect("should get local offset!");
+ /// let timer = OffsetTime::new(offset, time::format_description::well_known::Rfc3339);
+ /// let collector = tracing_subscriber::fmt()
+ /// .with_timer(timer);
+ /// # drop(collector);
+ /// ```
+ ///
+ /// [`time` crate]: time
+ /// [timezone offset]: time::UtcOffset
+ /// [`Formattable`]: time::formatting::Formattable
+ /// [local offset]: time::UtcOffset::current_local_offset()
+ /// [well-known formats]: time::format_description::well_known
+ /// [`format_description!`]: time::macros::format_description
+ /// [`time::format_description::parse`]: time::format_description::parse
+ /// [`time` book]: https://time-rs.github.io/book/api/format-description.html
+ pub fn new(offset: time::UtcOffset, format: F) -> Self {
+ Self { offset, format }
+ }
+}
+
+impl<F> FormatTime for OffsetTime<F>
+where
+ F: time::formatting::Formattable,
+{
+ fn format_time(&self, w: &mut Writer<'_>) -> fmt::Result {
+ let now = OffsetDateTime::now_utc().to_offset(self.offset);
+ format_datetime(now, w, &self.format)
+ }
+}
+
+fn format_datetime(
+ now: OffsetDateTime,
+ into: &mut Writer<'_>,
+ fmt: &impl Formattable,
+) -> fmt::Result {
+ let mut into = WriteAdaptor::new(into);
+ now.format_into(&mut into, fmt)
+ .map_err(|_| fmt::Error)
+ .map(|_| ())
+}
diff --git a/vendor/tracing-subscriber-0.3.3/src/fmt/writer.rs b/vendor/tracing-subscriber/src/fmt/writer.rs
index 0974891f7..3fe945566 100644
--- a/vendor/tracing-subscriber-0.3.3/src/fmt/writer.rs
+++ b/vendor/tracing-subscriber/src/fmt/writer.rs
@@ -1,6 +1,6 @@
//! Abstractions for creating [`io::Write`] instances.
//!
-//! [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
+//! [`io::Write`]: std::io::Write
use std::{
fmt,
io::{self, Write},
@@ -96,8 +96,8 @@ use tracing_core::Metadata;
pub trait MakeWriter<'a> {
/// The concrete [`io::Write`] implementation returned by [`make_writer`].
///
- /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
- /// [`make_writer`]: #tymethod.make_writer
+ /// [`io::Write`]: std::io::Write
+ /// [`make_writer`]: MakeWriter::make_writer
type Writer: io::Write;
/// Returns an instance of [`Writer`].
@@ -109,7 +109,7 @@ pub trait MakeWriter<'a> {
/// creating a [`io::Write`] instance is expensive, be sure to cache it when implementing
/// [`MakeWriter`] to improve performance.
///
- /// [`Writer`]: #associatedtype.Writer
+ /// [`Writer`]: MakeWriter::Writer
/// [`fmt::Layer`]: crate::fmt::Layer
/// [`fmt::Subscriber`]: crate::fmt::Subscriber
/// [`io::Write`]: std::io::Write
@@ -501,13 +501,13 @@ pub trait MakeWriterExt<'a>: MakeWriter<'a> {
/// Writing to [`io::stdout`] and [`io::stderr`] produces the same results as using
/// [`libtest`'s `--nocapture` option][nocapture] which may make the results look unreadable.
///
-/// [`fmt::Subscriber`]: ../struct.Subscriber.html
-/// [`fmt::Layer`]: ../struct.Layer.html
+/// [`fmt::Subscriber`]: super::Subscriber
+/// [`fmt::Layer`]: super::Layer
/// [capturing]: https://doc.rust-lang.org/book/ch11-02-running-tests.html#showing-function-output
/// [nocapture]: https://doc.rust-lang.org/cargo/commands/cargo-test.html
-/// [`io::stdout`]: https://doc.rust-lang.org/std/io/fn.stdout.html
-/// [`io::stderr`]: https://doc.rust-lang.org/std/io/fn.stderr.html
-/// [`print!`]: https://doc.rust-lang.org/std/macro.print.html
+/// [`io::stdout`]: std::io::stdout
+/// [`io::stderr`]: std::io::stderr
+/// [`print!`]: std::print!
#[derive(Default, Debug)]
pub struct TestWriter {
_p: (),
@@ -646,10 +646,9 @@ pub struct Tee<A, B> {
/// requires the `Writer` type to implement [`io::Write`], it's necessary to add
/// a newtype that forwards the trait implementation.
///
-/// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
-/// [`MutexGuard`]: https://doc.rust-lang.org/std/sync/struct.MutexGuard.html
-/// [`Mutex`]: https://doc.rust-lang.org/std/sync/struct.Mutex.html
-/// [`MakeWriter`]: trait.MakeWriter.html
+/// [`io::Write`]: std::io::Write
+/// [`MutexGuard`]: std::sync::MutexGuard
+/// [`Mutex`]: std::sync::Mutex
#[derive(Debug)]
pub struct MutexGuardWriter<'a, W>(MutexGuard<'a, W>);
@@ -689,7 +688,7 @@ where
{
type Writer = &'a W;
fn make_writer(&'a self) -> Self::Writer {
- &*self
+ self
}
}
@@ -734,7 +733,6 @@ impl<'a> MakeWriter<'a> for TestWriter {
impl BoxMakeWriter {
/// Constructs a `BoxMakeWriter` wrapping a type implementing [`MakeWriter`].
///
- /// [`MakeWriter`]: trait.MakeWriter.html
pub fn new<M>(make_writer: M) -> Self
where
M: for<'a> MakeWriter<'a> + Send + Sync + 'static,
@@ -1025,6 +1023,8 @@ impl<A, B> Tee<A, B> {
/// outputs.
///
/// See the documentation for [`MakeWriterExt::and`] for details.
+ ///
+ /// [writers]: std::io::Write
pub fn new(a: A, b: B) -> Self {
Self { a, b }
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/layer/context.rs b/vendor/tracing-subscriber/src/layer/context.rs
index e11959526..46254994f 100644
--- a/vendor/tracing-subscriber-0.3.3/src/layer/context.rs
+++ b/vendor/tracing-subscriber/src/layer/context.rs
@@ -25,10 +25,10 @@ use crate::{filter::FilterId, registry::Registry};
/// }
/// ```
///
-/// [`Layer`]: ../layer/trait.Layer.html
-/// [`Subscriber`]: https://docs.rs/tracing-core/latest/tracing_core/trait.Subscriber.html
-/// [stored data]: ../registry/struct.SpanRef.html
-/// [`LookupSpan`]: "../registry/trait.LookupSpan.html
+/// [`Layer`]: super::Layer
+/// [`Subscriber`]: tracing_core::Subscriber
+/// [stored data]: crate::registry::SpanRef
+/// [`LookupSpan`]: crate::registry::LookupSpan
#[derive(Debug)]
pub struct Context<'a, S> {
subscriber: Option<&'a S>,
@@ -99,9 +99,9 @@ where
/// check whether the event would be enabled. This allows `Layer`s to
/// elide constructing the event if it would not be recorded.
///
- /// [register]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html#method.register_callsite
- /// [`enabled`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html#method.enabled
- /// [`Context::enabled`]: #method.enabled
+ /// [register]: tracing_core::subscriber::Subscriber::register_callsite()
+ /// [`enabled`]: tracing_core::subscriber::Subscriber::enabled()
+ /// [`Context::enabled`]: Context::enabled()
#[inline]
pub fn event(&self, event: &Event<'_>) {
if let Some(subscriber) = self.subscriber {
@@ -206,7 +206,7 @@ where
/// declaration</a> for details.
/// </pre>
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
#[inline]
pub fn span(&self, id: &span::Id) -> Option<registry::SpanRef<'_, S>>
where
@@ -251,7 +251,7 @@ where
/// declaration</a> for details.
/// </pre>
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
#[inline]
pub fn lookup_current(&self) -> Option<registry::SpanRef<'_, S>>
where
@@ -333,7 +333,7 @@ where
/// declaration</a> for details.
/// </pre>
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
pub fn span_scope(&self, id: &span::Id) -> Option<registry::Scope<'_, S>>
where
S: for<'lookup> LookupSpan<'lookup>,
@@ -360,7 +360,7 @@ where
/// declaration</a> for details.
/// </pre>
///
- /// [stored data]: ../registry/struct.SpanRef.html
+ /// [stored data]: crate::registry::SpanRef
pub fn event_scope(&self, event: &Event<'_>) -> Option<registry::Scope<'_, S>>
where
S: for<'lookup> LookupSpan<'lookup>,
diff --git a/vendor/tracing-subscriber-0.3.3/src/layer/layered.rs b/vendor/tracing-subscriber/src/layer/layered.rs
index c690764ad..f09c58c97 100644
--- a/vendor/tracing-subscriber-0.3.3/src/layer/layered.rs
+++ b/vendor/tracing-subscriber/src/layer/layered.rs
@@ -1,9 +1,4 @@
-use tracing_core::{
- metadata::Metadata,
- span,
- subscriber::{Interest, Subscriber},
- Event, LevelFilter,
-};
+use tracing_core::{metadata::Metadata, span, Dispatch, Event, Interest, LevelFilter, Subscriber};
use crate::{
filter,
@@ -12,13 +7,17 @@ use crate::{
};
#[cfg(all(feature = "registry", feature = "std"))]
use crate::{filter::FilterId, registry::Registry};
-use core::{any::TypeId, cmp, fmt, marker::PhantomData};
+use core::{
+ any::{Any, TypeId},
+ cmp, fmt,
+ marker::PhantomData,
+};
/// A [`Subscriber`] composed of a `Subscriber` wrapped by one or more
/// [`Layer`]s.
///
/// [`Layer`]: crate::Layer
-/// [`Subscriber`]: https://docs.rs/tracing-core/latest/tracing_core/trait.Subscriber.html
+/// [`Subscriber`]: tracing_core::Subscriber
#[derive(Clone)]
pub struct Layered<L, I, S = I> {
/// The layer.
@@ -63,6 +62,30 @@ pub struct Layered<L, I, S = I> {
// === impl Layered ===
+impl<L, S> Layered<L, S>
+where
+ L: Layer<S>,
+ S: Subscriber,
+{
+ /// Returns `true` if this [`Subscriber`] is the same type as `T`.
+ pub fn is<T: Any>(&self) -> bool {
+ self.downcast_ref::<T>().is_some()
+ }
+
+ /// Returns some reference to this [`Subscriber`] value if it is of type `T`,
+ /// or `None` if it isn't.
+ pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
+ unsafe {
+ let raw = self.downcast_raw(TypeId::of::<T>())?;
+ if raw.is_null() {
+ None
+ } else {
+ Some(&*(raw as *const T))
+ }
+ }
+ }
+}
+
impl<L, S> Subscriber for Layered<L, S>
where
L: Layer<S>,
@@ -92,7 +115,11 @@ where
}
fn max_level_hint(&self) -> Option<LevelFilter> {
- self.pick_level_hint(self.layer.max_level_hint(), self.inner.max_level_hint())
+ self.pick_level_hint(
+ self.layer.max_level_hint(),
+ self.inner.max_level_hint(),
+ super::subscriber_is_none(&self.inner),
+ )
}
fn new_span(&self, span: &span::Attributes<'_>) -> span::Id {
@@ -111,6 +138,16 @@ where
self.layer.on_follows_from(span, follows, self.ctx());
}
+ fn event_enabled(&self, event: &Event<'_>) -> bool {
+ if self.layer.event_enabled(event, self.ctx()) {
+ // if the outer layer enables the event, ask the inner subscriber.
+ self.inner.event_enabled(event)
+ } else {
+ // otherwise, the event is disabled by this layer
+ false
+ }
+ }
+
fn event(&self, event: &Event<'_>) {
self.inner.event(event);
self.layer.on_event(event, self.ctx());
@@ -152,7 +189,7 @@ where
#[cfg(all(feature = "registry", feature = "std"))]
{
if let Some(g) = guard.as_mut() {
- g.is_closing()
+ g.set_closing()
};
}
@@ -207,6 +244,11 @@ where
B: Layer<S>,
S: Subscriber,
{
+ fn on_register_dispatch(&self, subscriber: &Dispatch) {
+ self.layer.on_register_dispatch(subscriber);
+ self.inner.on_register_dispatch(subscriber);
+ }
+
fn on_layer(&mut self, subscriber: &mut S) {
self.layer.on_layer(subscriber);
self.inner.on_layer(subscriber);
@@ -229,7 +271,11 @@ where
}
fn max_level_hint(&self) -> Option<LevelFilter> {
- self.pick_level_hint(self.layer.max_level_hint(), self.inner.max_level_hint())
+ self.pick_level_hint(
+ self.layer.max_level_hint(),
+ self.inner.max_level_hint(),
+ super::layer_is_none(&self.inner),
+ )
}
#[inline]
@@ -251,6 +297,17 @@ where
}
#[inline]
+ fn event_enabled(&self, event: &Event<'_>, ctx: Context<'_, S>) -> bool {
+ if self.layer.event_enabled(event, ctx.clone()) {
+ // if the outer layer enables the event, ask the inner subscriber.
+ self.inner.event_enabled(event, ctx)
+ } else {
+ // otherwise, the event is disabled by this layer
+ false
+ }
+ }
+
+ #[inline]
fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
self.inner.on_event(event, ctx.clone());
self.layer.on_event(event, ctx);
@@ -386,7 +443,7 @@ where
// (rather than calling into the inner type), clear the current
// per-layer filter interest state.
#[cfg(feature = "registry")]
- drop(filter::FilterState::take_interest());
+ filter::FilterState::take_interest();
return outer;
}
@@ -421,6 +478,7 @@ where
&self,
outer_hint: Option<LevelFilter>,
inner_hint: Option<LevelFilter>,
+ inner_is_none: bool,
) -> Option<LevelFilter> {
if self.inner_is_registry {
return outer_hint;
@@ -438,6 +496,31 @@ where
return None;
}
+ // If the layer is `Option::None`, then we
+ // want to short-circuit the layer underneath, if it
+ // returns `None`, to override the `None` layer returning
+ // `Some(OFF)`, which should ONLY apply when there are
+ // no other layers that return `None`. Note this
+ // `None` does not == `Some(TRACE)`, it means
+ // something more like: "whatever all the other
+ // layers agree on, default to `TRACE` if none
+ // have an opinion". We also choose do this AFTER
+ // we check for per-layer filters, which
+ // have their own logic.
+ //
+ // Also note that this does come at some perf cost, but
+ // this function is only called on initialization and
+ // subscriber reloading.
+ if super::layer_is_none(&self.layer) {
+ return cmp::max(outer_hint, Some(inner_hint?));
+ }
+
+ // Similarly, if the layer on the inside is `None` and it returned an
+ // `Off` hint, we want to override that with the outer hint.
+ if inner_is_none && inner_hint == Some(LevelFilter::OFF) {
+ return outer_hint;
+ }
+
cmp::max(outer_hint, inner_hint)
}
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/layer/mod.rs b/vendor/tracing-subscriber/src/layer/mod.rs
index f3f994490..bdc154301 100644
--- a/vendor/tracing-subscriber-0.3.3/src/layer/mod.rs
+++ b/vendor/tracing-subscriber/src/layer/mod.rs
@@ -19,7 +19,7 @@
//! [`Subscriber`] behavior; it can _observe_ events and spans, but does not
//! assign IDs.
//!
-//! ## Composing Layers
+//! # Composing Layers
//!
//! Since a [`Layer`] does not implement a complete strategy for collecting
//! traces, it must be composed with a `Subscriber` in order to be used. The
@@ -135,9 +135,245 @@
//! [`Layer::with_subscriber`] as an implementation detail, as `with_subscriber`
//! calls must be nested, leading to less clear code for the reader.
//!
+//! ## Runtime Configuration With `Layer`s
+//!
+//! In some cases, a particular [`Layer`] may be enabled or disabled based on
+//! runtime configuration. This can introduce challenges, because the type of a
+//! layered [`Subscriber`] depends on which layers are added to it: if an `if`
+//! or `match` expression adds some [`Layer`] implementation in one branch,
+//! and other layers in another, the [`Subscriber`] values returned by those
+//! branches will have different types. For example, the following _will not_
+//! work:
+//!
+//! ```compile_fail
+//! # fn docs() -> Result<(), Box<dyn std::error::Error + 'static>> {
+//! # struct Config {
+//! # is_prod: bool,
+//! # path: &'static str,
+//! # }
+//! # let cfg = Config { is_prod: false, path: "debug.log" };
+//! use std::fs::File;
+//! use tracing_subscriber::{Registry, prelude::*};
+//!
+//! let stdout_log = tracing_subscriber::fmt::layer().pretty();
+//! let subscriber = Registry::default().with(stdout_log);
+//!
+//! // The compile error will occur here because the if and else
+//! // branches have different (and therefore incompatible) types.
+//! let subscriber = if cfg.is_prod {
+//! let file = File::create(cfg.path)?;
+//! let layer = tracing_subscriber::fmt::layer()
+//! .json()
+//! .with_writer(Arc::new(file));
+//! layer.with(subscriber)
+//! } else {
+//! layer
+//! };
+//!
+//! tracing::subscriber::set_global_default(subscriber)
+//! .expect("Unable to set global subscriber");
+//! # Ok(()) }
+//! ```
+//!
+//! However, a [`Layer`] wrapped in an [`Option`] [also implements the `Layer`
+//! trait][option-impl]. This allows individual layers to be enabled or disabled at
+//! runtime while always producing a [`Subscriber`] of the same type. For
+//! example:
+//!
+//! ```
+//! # fn docs() -> Result<(), Box<dyn std::error::Error + 'static>> {
+//! # struct Config {
+//! # is_prod: bool,
+//! # path: &'static str,
+//! # }
+//! # let cfg = Config { is_prod: false, path: "debug.log" };
+//! use std::fs::File;
+//! use tracing_subscriber::{Registry, prelude::*};
+//!
+//! let stdout_log = tracing_subscriber::fmt::layer().pretty();
+//! let subscriber = Registry::default().with(stdout_log);
+//!
+//! // if `cfg.is_prod` is true, also log JSON-formatted logs to a file.
+//! let json_log = if cfg.is_prod {
+//! let file = File::create(cfg.path)?;
+//! let json_log = tracing_subscriber::fmt::layer()
+//! .json()
+//! .with_writer(file);
+//! Some(json_log)
+//! } else {
+//! None
+//! };
+//!
+//! // If `cfg.is_prod` is false, then `json` will be `None`, and this layer
+//! // will do nothing. However, the subscriber will still have the same type
+//! // regardless of whether the `Option`'s value is `None` or `Some`.
+//! let subscriber = subscriber.with(json_log);
+//!
+//! tracing::subscriber::set_global_default(subscriber)
+//! .expect("Unable to set global subscriber");
+//! # Ok(()) }
+//! ```
+//!
+//! If a [`Layer`] may be one of several different types, note that [`Box<dyn
+//! Layer<S> + Send + Sync>` implements `Layer`][box-impl].
+//! This may be used to erase the type of a [`Layer`].
+//!
+//! For example, a function that configures a [`Layer`] to log to one of
+//! several outputs might return a `Box<dyn Layer<S> + Send + Sync + 'static>`:
+//! ```
+//! use tracing_subscriber::{
+//! Layer,
+//! registry::LookupSpan,
+//! prelude::*,
+//! };
+//! use std::{path::PathBuf, fs::File, io};
+//!
+//! /// Configures whether logs are emitted to a file, to stdout, or to stderr.
+//! pub enum LogConfig {
+//! File(PathBuf),
+//! Stdout,
+//! Stderr,
+//! }
+//!
+//! impl LogConfig {
+//! pub fn layer<S>(self) -> Box<dyn Layer<S> + Send + Sync + 'static>
+//! where
+//! S: tracing_core::Subscriber,
+//! for<'a> S: LookupSpan<'a>,
+//! {
+//! // Shared configuration regardless of where logs are output to.
+//! let fmt = tracing_subscriber::fmt::layer()
+//! .with_target(true)
+//! .with_thread_names(true);
+//!
+//! // Configure the writer based on the desired log target:
+//! match self {
+//! LogConfig::File(path) => {
+//! let file = File::create(path).expect("failed to create log file");
+//! Box::new(fmt.with_writer(file))
+//! },
+//! LogConfig::Stdout => Box::new(fmt.with_writer(io::stdout)),
+//! LogConfig::Stderr => Box::new(fmt.with_writer(io::stderr)),
+//! }
+//! }
+//! }
+//!
+//! let config = LogConfig::Stdout;
+//! tracing_subscriber::registry()
+//! .with(config.layer())
+//! .init();
+//! ```
+//!
+//! The [`Layer::boxed`] method is provided to make boxing a `Layer`
+//! more convenient, but [`Box::new`] may be used as well.
+//!
+//! When the number of `Layer`s varies at runtime, note that a
+//! [`Vec<L> where L: Layer` also implements `Layer`][vec-impl]. This
+//! can be used to add a variable number of `Layer`s to a `Subscriber`:
+//!
+//! ```
+//! use tracing_subscriber::{Layer, prelude::*};
+//! struct MyLayer {
+//! // ...
+//! }
+//! # impl MyLayer { fn new() -> Self { Self {} }}
+//!
+//! impl<S: tracing_core::Subscriber> Layer<S> for MyLayer {
+//! // ...
+//! }
+//!
+//! /// Returns how many layers we need
+//! fn how_many_layers() -> usize {
+//! // ...
+//! # 3
+//! }
+//!
+//! // Create a variable-length `Vec` of layers
+//! let mut layers = Vec::new();
+//! for _ in 0..how_many_layers() {
+//! layers.push(MyLayer::new());
+//! }
+//!
+//! tracing_subscriber::registry()
+//! .with(layers)
+//! .init();
+//! ```
+//!
+//! If a variable number of `Layer` is needed and those `Layer`s have
+//! different types, a `Vec` of [boxed `Layer` trait objects][box-impl] may
+//! be used. For example:
+//!
+//! ```
+//! use tracing_subscriber::{filter::LevelFilter, Layer, prelude::*};
+//! use std::fs::File;
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! struct Config {
+//! enable_log_file: bool,
+//! enable_stdout: bool,
+//! enable_stderr: bool,
+//! // ...
+//! }
+//! # impl Config {
+//! # fn from_config_file()-> Result<Self, Box<dyn std::error::Error>> {
+//! # // don't enable the log file so that the example doesn't actually create it
+//! # Ok(Self { enable_log_file: false, enable_stdout: true, enable_stderr: true })
+//! # }
+//! # }
+//!
+//! let cfg = Config::from_config_file()?;
+//!
+//! // Based on our dynamically loaded config file, create any number of layers:
+//! let mut layers = Vec::new();
+//!
+//! if cfg.enable_log_file {
+//! let file = File::create("myapp.log")?;
+//! let layer = tracing_subscriber::fmt::layer()
+//! .with_thread_names(true)
+//! .with_target(true)
+//! .json()
+//! .with_writer(file)
+//! // Box the layer as a type-erased trait object, so that it can
+//! // be pushed to the `Vec`.
+//! .boxed();
+//! layers.push(layer);
+//! }
+//!
+//! if cfg.enable_stdout {
+//! let layer = tracing_subscriber::fmt::layer()
+//! .pretty()
+//! .with_filter(LevelFilter::INFO)
+//! // Box the layer as a type-erased trait object, so that it can
+//! // be pushed to the `Vec`.
+//! .boxed();
+//! layers.push(layer);
+//! }
+//!
+//! if cfg.enable_stdout {
+//! let layer = tracing_subscriber::fmt::layer()
+//! .with_target(false)
+//! .with_filter(LevelFilter::WARN)
+//! // Box the layer as a type-erased trait object, so that it can
+//! // be pushed to the `Vec`.
+//! .boxed();
+//! layers.push(layer);
+//! }
+//!
+//! tracing_subscriber::registry()
+//! .with(layers)
+//! .init();
+//!# Ok(()) }
+//! ```
+//!
+//! Finally, if the number of layers _changes_ at runtime, a `Vec` of
+//! subscribers can be used alongside the [`reload`](crate::reload) module to
+//! add or remove subscribers dynamically at runtime.
+//!
+//! [option-impl]: Layer#impl-Layer<S>-for-Option<L>
+//! [box-impl]: Layer#impl-Layer%3CS%3E-for-Box%3Cdyn%20Layer%3CS%3E%20+%20Send%20+%20Sync%3E
+//! [vec-impl]: Layer#impl-Layer<S>-for-Vec<L>
//! [prelude]: crate::prelude
//!
-//! ## Recording Traces
+//! # Recording Traces
//!
//! The [`Layer`] trait defines a set of methods for consuming notifications from
//! tracing instrumentation, which are generally equivalent to the similarly
@@ -146,7 +382,7 @@
//! information provided by the wrapped subscriber (such as [the current span])
//! to the layer.
//!
-//! ## Filtering with `Layer`s
+//! # Filtering with `Layer`s
//!
//! As well as strategies for handling trace events, the `Layer` trait may also
//! be used to represent composable _filters_. This allows the determination of
@@ -158,7 +394,7 @@
//! combined with _per-layer filters_ that control what spans and events are
//! recorded by those layers.
//!
-//! ### Global Filtering
+//! ## Global Filtering
//!
//! A `Layer` that implements a filtering strategy should override the
//! [`register_callsite`] and/or [`enabled`] methods. It may also choose to implement
@@ -179,7 +415,29 @@
//! [`Interest::never()`] from its [`register_callsite`] method, filter
//! evaluation will short-circuit and the span or event will be disabled.
//!
-//! ### Per-Layer Filtering
+//! ### Enabling Interest
+//!
+//! Whenever an tracing event (or span) is emitted, it goes through a number of
+//! steps to determine how and how much it should be processed. The earlier an
+//! event is disabled, the less work has to be done to process the event, so
+//! `Layer`s that implement filtering should attempt to disable unwanted
+//! events as early as possible. In order, each event checks:
+//!
+//! - [`register_callsite`], once per callsite (roughly: once per time that
+//! `event!` or `span!` is written in the source code; this is cached at the
+//! callsite). See [`Subscriber::register_callsite`] and
+//! [`tracing_core::callsite`] for a summary of how this behaves.
+//! - [`enabled`], once per emitted event (roughly: once per time that `event!`
+//! or `span!` is *executed*), and only if `register_callsite` regesters an
+//! [`Interest::sometimes`]. This is the main customization point to globally
+//! filter events based on their [`Metadata`]. If an event can be disabled
+//! based only on [`Metadata`], it should be, as this allows the construction
+//! of the actual `Event`/`Span` to be skipped.
+//! - For events only (and not spans), [`event_enabled`] is called just before
+//! processing the event. This gives layers one last chance to say that
+//! an event should be filtered out, now that the event's fields are known.
+//!
+//! ## Per-Layer Filtering
//!
//! **Note**: per-layer filtering APIs currently require the [`"registry"` crate
//! feature flag][feat] to be enabled.
@@ -393,94 +651,16 @@
//! # Ok(()) }
//! ```
//!
-//! ## Runtime Configuration With Layers
-//!
-//! In some cases, a particular [`Layer`] may be enabled or disabled based on
-//! runtime configuration. This can introduce challenges, because the type of a
-//! layered [`Subscriber`] depends on which layers are added to it: if an `if`
-//! or `match` expression adds some [`Layer`]s in one branch and other layers
-//! in another, the [`Subscriber`] values returned by those branches will have
-//! different types. For example, the following _will not_ work:
-//!
-//! ```compile_fail
-//! # fn docs() -> Result<(), Box<dyn std::error::Error + 'static>> {
-//! # struct Config {
-//! # is_prod: bool,
-//! # path: &'static str,
-//! # }
-//! # let cfg = Config { is_prod: false, path: "debug.log" };
-//! use std::{fs::File, sync::Arc};
-//! use tracing_subscriber::{Registry, prelude::*};
-//!
-//! let stdout_log = tracing_subscriber::fmt::layer().pretty();
-//! let subscriber = Registry::default().with(stdout_log);
-//!
-//! // The compile error will occur here because the if and else
-//! // branches have different (and therefore incompatible) types.
-//! let subscriber = if cfg.is_prod {
-//! let file = File::create(cfg.path)?;
-//! let layer = tracing_subscriber::fmt::layer()
-//! .json()
-//! .with_writer(Arc::new(file));
-//! subscriber.with(layer)
-//! } else {
-//! subscriber
-//! };
-//!
-//! tracing::subscriber::set_global_default(subscriber)
-//! .expect("Unable to set global subscriber");
-//! # Ok(()) }
-//! ```
-//!
-//! However, a [`Layer`] wrapped in an [`Option`] [also implements the `Layer`
-//! trait][option-impl]. This allows individual layers to be enabled or disabled at
-//! runtime while always producing a [`Subscriber`] of the same type. For
-//! example:
-//!
-//! ```
-//! # fn docs() -> Result<(), Box<dyn std::error::Error + 'static>> {
-//! # struct Config {
-//! # is_prod: bool,
-//! # path: &'static str,
-//! # }
-//! # let cfg = Config { is_prod: false, path: "debug.log" };
-//! use std::{fs::File, sync::Arc};
-//! use tracing_subscriber::{Registry, prelude::*};
-//!
-//! let stdout_log = tracing_subscriber::fmt::layer().pretty();
-//! let subscriber = Registry::default().with(stdout_log);
-//!
-//! // if `cfg.is_prod` is true, also log JSON-formatted logs to a file.
-//! let json_log = if cfg.is_prod {
-//! let file = File::create(cfg.path)?;
-//! let json_log = tracing_subscriber::fmt::layer()
-//! .json()
-//! .with_writer(Arc::new(file));
-//! Some(json_log)
-//! } else {
-//! None
-//! };
-//!
-//! // If `cfg.is_prod` is false, then `json` will be `None`, and this layer
-//! // will do nothing. However, the subscriber will still have the same type
-//! // regardless of whether the `Option`'s value is `None` or `Some`.
-//! let subscriber = subscriber.with(json_log);
-//!
-//! tracing::subscriber::set_global_default(subscriber)
-//! .expect("Unable to set global subscriber");
-//! # Ok(()) }
-//! ```
-//!
-//! [`Subscriber`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html
-//! [span IDs]: https://docs.rs/tracing-core/latest/tracing_core/span/struct.Id.html
+//! [`Subscriber`]: tracing_core::subscriber::Subscriber
+//! [span IDs]: tracing_core::span::Id
//! [the current span]: Context::current_span
//! [`register_callsite`]: Layer::register_callsite
//! [`enabled`]: Layer::enabled
+//! [`event_enabled`]: Layer::event_enabled
//! [`on_enter`]: Layer::on_enter
//! [`Layer::register_callsite`]: Layer::register_callsite
//! [`Layer::enabled`]: Layer::enabled
-//! [`Interest::never()`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/struct.Interest.html#method.never
-//! [option-impl]: crate::layer::Layer#impl-Layer<S>-for-Option<L>
+//! [`Interest::never()`]: tracing_core::subscriber::Interest::never()
//! [`Filtered`]: crate::filter::Filtered
//! [`filter`]: crate::filter
//! [`Targets`]: crate::filter::Targets
@@ -498,7 +678,7 @@ use tracing_core::{
metadata::Metadata,
span,
subscriber::{Interest, Subscriber},
- Event, LevelFilter,
+ Dispatch, Event, LevelFilter,
};
use core::any::TypeId;
@@ -531,6 +711,31 @@ where
S: Subscriber,
Self: 'static,
{
+ /// Performs late initialization when installing this layer as a
+ /// [`Subscriber`].
+ ///
+ /// ## Avoiding Memory Leaks
+ ///
+ /// `Layer`s should not store the [`Dispatch`] pointing to the [`Subscriber`]
+ /// that they are a part of. Because the `Dispatch` owns the `Subscriber`,
+ /// storing the `Dispatch` within the `Subscriber` will create a reference
+ /// count cycle, preventing the `Dispatch` from ever being dropped.
+ ///
+ /// Instead, when it is necessary to store a cyclical reference to the
+ /// `Dispatch` within a `Layer`, use [`Dispatch::downgrade`] to convert a
+ /// `Dispatch` into a [`WeakDispatch`]. This type is analogous to
+ /// [`std::sync::Weak`], and does not create a reference count cycle. A
+ /// [`WeakDispatch`] can be stored within a subscriber without causing a
+ /// memory leak, and can be [upgraded] into a `Dispatch` temporarily when
+ /// the `Dispatch` must be accessed by the subscriber.
+ ///
+ /// [`WeakDispatch`]: tracing_core::dispatcher::WeakDispatch
+ /// [upgraded]: tracing_core::dispatcher::WeakDispatch::upgrade
+ /// [`Subscriber`]: tracing_core::Subscriber
+ fn on_register_dispatch(&self, collector: &Dispatch) {
+ let _ = collector;
+ }
+
/// Performs late initialization when attaching a `Layer` to a
/// [`Subscriber`].
///
@@ -592,15 +797,15 @@ where
/// globally enable or disable those callsites, it should always return
/// [`Interest::always()`].
///
- /// [`Interest`]: https://docs.rs/tracing-core/latest/tracing_core/struct.Interest.html
- /// [`Subscriber::register_callsite`]: https://docs.rs/tracing-core/latest/tracing_core/trait.Subscriber.html#method.register_callsite
- /// [`Interest::never()`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/struct.Interest.html#method.never
- /// [`Interest::always()`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/struct.Interest.html#method.always
- /// [`self.enabled`]: #method.enabled
- /// [`Layer::enabled`]: #method.enabled
- /// [`on_event`]: #method.on_event
- /// [`on_enter`]: #method.on_enter
- /// [`on_exit`]: #method.on_exit
+ /// [`Interest`]: tracing_core::Interest
+ /// [`Subscriber::register_callsite`]: tracing_core::Subscriber::register_callsite()
+ /// [`Interest::never()`]: tracing_core::subscriber::Interest::never()
+ /// [`Interest::always()`]: tracing_core::subscriber::Interest::always()
+ /// [`self.enabled`]: Layer::enabled()
+ /// [`Layer::enabled`]: Layer::enabled()
+ /// [`on_event`]: Layer::on_event()
+ /// [`on_enter`]: Layer::on_enter()
+ /// [`on_exit`]: Layer::on_exit()
/// [the trait-level documentation]: #filtering-with-layers
fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
if self.enabled(metadata, Context::none()) {
@@ -635,13 +840,12 @@ where
/// See [the trait-level documentation] for more information on filtering
/// with `Layer`s.
///
- /// [`Interest`]: https://docs.rs/tracing-core/latest/tracing_core/struct.Interest.html
- /// [`Context`]: ../struct.Context.html
- /// [`Subscriber::enabled`]: https://docs.rs/tracing-core/latest/tracing_core/trait.Subscriber.html#method.enabled
- /// [`Layer::register_callsite`]: #method.register_callsite
- /// [`on_event`]: #method.on_event
- /// [`on_enter`]: #method.on_enter
- /// [`on_exit`]: #method.on_exit
+ /// [`Interest`]: tracing_core::Interest
+ /// [`Subscriber::enabled`]: tracing_core::Subscriber::enabled()
+ /// [`Layer::register_callsite`]: Layer::register_callsite()
+ /// [`on_event`]: Layer::on_event()
+ /// [`on_enter`]: Layer::on_enter()
+ /// [`on_exit`]: Layer::on_exit()
/// [the trait-level documentation]: #filtering-with-layers
fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, S>) -> bool {
let _ = (metadata, ctx);
@@ -676,6 +880,31 @@ where
// seems like a good future-proofing measure as it may grow other methods later...
fn on_follows_from(&self, _span: &span::Id, _follows: &span::Id, _ctx: Context<'_, S>) {}
+ /// Called before [`on_event`], to determine if `on_event` should be called.
+ ///
+ /// <div class="example-wrap" style="display:inline-block">
+ /// <pre class="ignore" style="white-space:normal;font:inherit;">
+ ///
+ /// **Note**: This method determines whether an event is globally enabled,
+ /// *not* whether the individual `Layer` will be notified about the
+ /// event. This is intended to be used by `Layer`s that implement
+ /// filtering for the entire stack. `Layer`s which do not wish to be
+ /// notified about certain events but do not wish to globally disable them
+ /// should ignore those events in their [on_event][Self::on_event].
+ ///
+ /// </pre></div>
+ ///
+ /// See [the trait-level documentation] for more information on filtering
+ /// with `Layer`s.
+ ///
+ /// [`on_event`]: Self::on_event
+ /// [`Interest`]: tracing_core::Interest
+ /// [the trait-level documentation]: #filtering-with-layers
+ #[inline] // collapse this to a constant please mrs optimizer
+ fn event_enabled(&self, _event: &Event<'_>, _ctx: Context<'_, S>) -> bool {
+ true
+ }
+
/// Notifies this layer that an event has occurred.
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
@@ -840,7 +1069,7 @@ where
/// .with_subscriber(MySubscriber::new());
///```
///
- /// [`Subscriber`]: https://docs.rs/tracing-core/latest/tracing_core/trait.Subscriber.html
+ /// [`Subscriber`]: tracing_core::Subscriber
fn with_subscriber(mut self, mut inner: S) -> Layered<Self, S>
where
Self: Sized,
@@ -857,7 +1086,7 @@ where
/// per-layer filtering.
///
/// [`Filtered`]: crate::filter::Filtered
- /// [plf]: #per-layer-filtering
+ /// [plf]: crate::layer#per-layer-filtering
#[cfg(all(feature = "registry", feature = "std"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "registry", feature = "std"))))]
fn with_filter<F>(self, filter: F) -> filter::Filtered<Self, F, S>
@@ -868,6 +1097,131 @@ where
filter::Filtered::new(self, filter)
}
+ /// Erases the type of this [`Layer`], returning a [`Box`]ed `dyn
+ /// Layer` trait object.
+ ///
+ /// This can be used when a function returns a `Layer` which may be of
+ /// one of several types, or when a `Layer` subscriber has a very long type
+ /// signature.
+ ///
+ /// # Examples
+ ///
+ /// The following example will *not* compile, because the value assigned to
+ /// `log_layer` may have one of several different types:
+ ///
+ /// ```compile_fail
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// use tracing_subscriber::{Layer, filter::LevelFilter, prelude::*};
+ /// use std::{path::PathBuf, fs::File, io};
+ ///
+ /// /// Configures whether logs are emitted to a file, to stdout, or to stderr.
+ /// pub enum LogConfig {
+ /// File(PathBuf),
+ /// Stdout,
+ /// Stderr,
+ /// }
+ ///
+ /// let config = // ...
+ /// # LogConfig::Stdout;
+ ///
+ /// // Depending on the config, construct a layer of one of several types.
+ /// let log_layer = match config {
+ /// // If logging to a file, use a maximally-verbose configuration.
+ /// LogConfig::File(path) => {
+ /// let file = File::create(path)?;
+ /// tracing_subscriber::fmt::layer()
+ /// .with_thread_ids(true)
+ /// .with_thread_names(true)
+ /// // Selecting the JSON logging format changes the layer's
+ /// // type.
+ /// .json()
+ /// .with_span_list(true)
+ /// // Setting the writer to use our log file changes the
+ /// // layer's type again.
+ /// .with_writer(file)
+ /// },
+ ///
+ /// // If logging to stdout, use a pretty, human-readable configuration.
+ /// LogConfig::Stdout => tracing_subscriber::fmt::layer()
+ /// // Selecting the "pretty" logging format changes the
+ /// // layer's type!
+ /// .pretty()
+ /// .with_writer(io::stdout)
+ /// // Add a filter based on the RUST_LOG environment variable;
+ /// // this changes the type too!
+ /// .and_then(tracing_subscriber::EnvFilter::from_default_env()),
+ ///
+ /// // If logging to stdout, only log errors and warnings.
+ /// LogConfig::Stderr => tracing_subscriber::fmt::layer()
+ /// // Changing the writer changes the layer's type
+ /// .with_writer(io::stderr)
+ /// // Only log the `WARN` and `ERROR` levels. Adding a filter
+ /// // changes the layer's type to `Filtered<LevelFilter, ...>`.
+ /// .with_filter(LevelFilter::WARN),
+ /// };
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(log_layer)
+ /// .init();
+ /// # Ok(()) }
+ /// ```
+ ///
+ /// However, adding a call to `.boxed()` after each match arm erases the
+ /// layer's type, so this code *does* compile:
+ ///
+ /// ```
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// # use tracing_subscriber::{Layer, filter::LevelFilter, prelude::*};
+ /// # use std::{path::PathBuf, fs::File, io};
+ /// # pub enum LogConfig {
+ /// # File(PathBuf),
+ /// # Stdout,
+ /// # Stderr,
+ /// # }
+ /// # let config = LogConfig::Stdout;
+ /// let log_layer = match config {
+ /// LogConfig::File(path) => {
+ /// let file = File::create(path)?;
+ /// tracing_subscriber::fmt::layer()
+ /// .with_thread_ids(true)
+ /// .with_thread_names(true)
+ /// .json()
+ /// .with_span_list(true)
+ /// .with_writer(file)
+ /// // Erase the type by boxing the layer
+ /// .boxed()
+ /// },
+ ///
+ /// LogConfig::Stdout => tracing_subscriber::fmt::layer()
+ /// .pretty()
+ /// .with_writer(io::stdout)
+ /// .and_then(tracing_subscriber::EnvFilter::from_default_env())
+ /// // Erase the type by boxing the layer
+ /// .boxed(),
+ ///
+ /// LogConfig::Stderr => tracing_subscriber::fmt::layer()
+ /// .with_writer(io::stderr)
+ /// .with_filter(LevelFilter::WARN)
+ /// // Erase the type by boxing the layer
+ /// .boxed(),
+ /// };
+ ///
+ /// tracing_subscriber::registry()
+ /// .with(log_layer)
+ /// .init();
+ /// # Ok(()) }
+ /// ```
+ #[cfg(any(feature = "alloc", feature = "std"))]
+ #[cfg_attr(docsrs, doc(cfg(any(feature = "alloc", feature = "std"))))]
+ fn boxed(self) -> Box<dyn Layer<S> + Send + Sync + 'static>
+ where
+ Self: Sized,
+ Self: Layer<S> + Send + Sync + 'static,
+ S: Subscriber,
+ {
+ Box::new(self)
+ }
+
#[doc(hidden)]
unsafe fn downcast_raw(&self, id: TypeId) -> Option<*const ()> {
if id == TypeId::of::<Self>() {
@@ -880,6 +1234,7 @@ where
feature! {
#![all(feature = "registry", feature = "std")]
+
/// A per-[`Layer`] filter that determines whether a span or event is enabled
/// for an individual layer.
///
@@ -1022,6 +1377,26 @@ feature! {
Interest::sometimes()
}
+ /// Called before the filtered [`Layer]'s [`on_event`], to determine if
+ /// `on_event` should be called.
+ ///
+ /// This gives a chance to filter events based on their fields. Note,
+ /// however, that this *does not* override [`enabled`], and is not even
+ /// called if [`enabled`] returns `false`.
+ ///
+ /// ## Default Implementation
+ ///
+ /// By default, this method returns `true`, indicating that no events are
+ /// filtered out based on their fields.
+ ///
+ /// [`enabled`]: crate::layer::Filter::enabled
+ /// [`on_event`]: crate::layer::Layer::on_event
+ #[inline] // collapse this to a constant please mrs optimizer
+ fn event_enabled(&self, event: &Event<'_>, cx: &Context<'_, S>) -> bool {
+ let _ = (event, cx);
+ true
+ }
+
/// Returns an optional hint of the highest [verbosity level][level] that
/// this `Filter` will enable.
///
@@ -1054,6 +1429,51 @@ feature! {
fn max_level_hint(&self) -> Option<LevelFilter> {
None
}
+
+ /// Notifies this filter that a new span was constructed with the given
+ /// `Attributes` and `Id`.
+ ///
+ /// By default, this method does nothing. `Filter` implementations that
+ /// need to be notified when new spans are created can override this
+ /// method.
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) {
+ let _ = (attrs, id, ctx);
+ }
+
+
+ /// Notifies this filter that a span with the given `Id` recorded the given
+ /// `values`.
+ ///
+ /// By default, this method does nothing. `Filter` implementations that
+ /// need to be notified when new spans are created can override this
+ /// method.
+ fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) {
+ let _ = (id, values, ctx);
+ }
+
+ /// Notifies this filter that a span with the given ID was entered.
+ ///
+ /// By default, this method does nothing. `Filter` implementations that
+ /// need to be notified when a span is entered can override this method.
+ fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) {
+ let _ = (id, ctx);
+ }
+
+ /// Notifies this filter that a span with the given ID was exited.
+ ///
+ /// By default, this method does nothing. `Filter` implementations that
+ /// need to be notified when a span is exited can override this method.
+ fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) {
+ let _ = (id, ctx);
+ }
+
+ /// Notifies this filter that a span with the given ID has been closed.
+ ///
+ /// By default, this method does nothing. `Filter` implementations that
+ /// need to be notified when a span is closed can override this method.
+ fn on_close(&self, id: span::Id, ctx: Context<'_, S>) {
+ let _ = (id, ctx);
+ }
}
}
@@ -1077,6 +1497,47 @@ pub struct Identity {
// === impl Layer ===
+#[derive(Clone, Copy)]
+pub(crate) struct NoneLayerMarker(());
+static NONE_LAYER_MARKER: NoneLayerMarker = NoneLayerMarker(());
+
+/// Is a type implementing `Layer` `Option::<_>::None`?
+pub(crate) fn layer_is_none<L, S>(layer: &L) -> bool
+where
+ L: Layer<S>,
+ S: Subscriber,
+{
+ unsafe {
+ // Safety: we're not actually *doing* anything with this pointer ---
+ // this only care about the `Option`, which is essentially being used
+ // as a bool. We can rely on the pointer being valid, because it is
+ // a crate-private type, and is only returned by the `Layer` impl
+ // for `Option`s. However, even if the layer *does* decide to be
+ // evil and give us an invalid pointer here, that's fine, because we'll
+ // never actually dereference it.
+ layer.downcast_raw(TypeId::of::<NoneLayerMarker>())
+ }
+ .is_some()
+}
+
+/// Is a type implementing `Subscriber` `Option::<_>::None`?
+pub(crate) fn subscriber_is_none<S>(subscriber: &S) -> bool
+where
+ S: Subscriber,
+{
+ unsafe {
+ // Safety: we're not actually *doing* anything with this pointer ---
+ // this only care about the `Option`, which is essentially being used
+ // as a bool. We can rely on the pointer being valid, because it is
+ // a crate-private type, and is only returned by the `Layer` impl
+ // for `Option`s. However, even if the subscriber *does* decide to be
+ // evil and give us an invalid pointer here, that's fine, because we'll
+ // never actually dereference it.
+ subscriber.downcast_raw(TypeId::of::<NoneLayerMarker>())
+ }
+ .is_some()
+}
+
impl<L, S> Layer<S> for Option<L>
where
L: Layer<S>,
@@ -1115,7 +1576,11 @@ where
fn max_level_hint(&self) -> Option<LevelFilter> {
match self {
Some(ref inner) => inner.max_level_hint(),
- None => None,
+ None => {
+ // There is no inner layer, so this layer will
+ // never enable anything.
+ Some(LevelFilter::OFF)
+ }
}
}
@@ -1134,6 +1599,14 @@ where
}
#[inline]
+ fn event_enabled(&self, event: &Event<'_>, ctx: Context<'_, S>) -> bool {
+ match self {
+ Some(ref inner) => inner.event_enabled(event, ctx),
+ None => true,
+ }
+ }
+
+ #[inline]
fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
if let Some(ref inner) = self {
inner.on_event(event, ctx);
@@ -1173,6 +1646,8 @@ where
unsafe fn downcast_raw(&self, id: TypeId) -> Option<*const ()> {
if id == TypeId::of::<Self>() {
Some(self as *const _ as *const ())
+ } else if id == TypeId::of::<NoneLayerMarker>() && self.is_none() {
+ Some(&NONE_LAYER_MARKER as *const _ as *const ())
} else {
self.as_ref().and_then(|inner| inner.downcast_raw(id))
}
@@ -1181,10 +1656,17 @@ where
feature! {
#![any(feature = "std", feature = "alloc")]
+ #[cfg(not(feature = "std"))]
+ use alloc::vec::Vec;
macro_rules! layer_impl_body {
() => {
#[inline]
+ fn on_register_dispatch(&self, subscriber: &Dispatch) {
+ self.deref().on_register_dispatch(subscriber);
+ }
+
+ #[inline]
fn on_layer(&mut self, subscriber: &mut S) {
self.deref_mut().on_layer(subscriber);
}
@@ -1220,6 +1702,11 @@ feature! {
}
#[inline]
+ fn event_enabled(&self, event: &Event<'_>, ctx: Context<'_, S>) -> bool {
+ self.deref().event_enabled(event, ctx)
+ }
+
+ #[inline]
fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
self.deref().on_event(event, ctx)
}
@@ -1266,6 +1753,125 @@ feature! {
{
layer_impl_body! {}
}
+
+
+
+ impl<S, L> Layer<S> for Vec<L>
+ where
+ L: Layer<S>,
+ S: Subscriber,
+ {
+
+ fn on_layer(&mut self, subscriber: &mut S) {
+ for l in self {
+ l.on_layer(subscriber);
+ }
+ }
+
+ fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
+ // Return highest level of interest.
+ let mut interest = Interest::never();
+ for l in self {
+ let new_interest = l.register_callsite(metadata);
+ if (interest.is_sometimes() && new_interest.is_always())
+ || (interest.is_never() && !new_interest.is_never())
+ {
+ interest = new_interest;
+ }
+ }
+
+ interest
+ }
+
+ fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, S>) -> bool {
+ self.iter().all(|l| l.enabled(metadata, ctx.clone()))
+ }
+
+ fn event_enabled(&self, event: &Event<'_>, ctx: Context<'_, S>) -> bool {
+ self.iter().all(|l| l.event_enabled(event, ctx.clone()))
+ }
+
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_new_span(attrs, id, ctx.clone());
+ }
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ // Default to `OFF` if there are no inner layers.
+ let mut max_level = LevelFilter::OFF;
+ for l in self {
+ // NOTE(eliza): this is slightly subtle: if *any* layer
+ // returns `None`, we have to return `None`, assuming there is
+ // no max level hint, since that particular layer cannot
+ // provide a hint.
+ let hint = l.max_level_hint()?;
+ max_level = core::cmp::max(hint, max_level);
+ }
+ Some(max_level)
+ }
+
+ fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_record(span, values, ctx.clone())
+ }
+ }
+
+ fn on_follows_from(&self, span: &span::Id, follows: &span::Id, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_follows_from(span, follows, ctx.clone());
+ }
+ }
+
+ fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_event(event, ctx.clone());
+ }
+ }
+
+ fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_enter(id, ctx.clone());
+ }
+ }
+
+ fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_exit(id, ctx.clone());
+ }
+ }
+
+ fn on_close(&self, id: span::Id, ctx: Context<'_, S>) {
+ for l in self {
+ l.on_close(id.clone(), ctx.clone());
+ }
+ }
+
+ #[doc(hidden)]
+ unsafe fn downcast_raw(&self, id: TypeId) -> Option<*const ()> {
+ // If downcasting to `Self`, return a pointer to `self`.
+ if id == TypeId::of::<Self>() {
+ return Some(self as *const _ as *const ());
+ }
+
+ // Someone is looking for per-layer filters. But, this `Vec`
+ // might contain layers with per-layer filters *and*
+ // layers without filters. It should only be treated as a
+ // per-layer-filtered layer if *all* its layers have
+ // per-layer filters.
+ // XXX(eliza): it's a bummer we have to do this linear search every
+ // time. It would be nice if this could be cached, but that would
+ // require replacing the `Vec` impl with an impl for a newtype...
+ if filter::is_plf_downcast_marker(id) && self.iter().any(|s| s.downcast_raw(id).is_none()) {
+ return None;
+ }
+
+ // Otherwise, return the first child of `self` that downcaasts to
+ // the selected type, if any.
+ // XXX(eliza): hope this is reasonable lol
+ self.iter().find_map(|l| l.downcast_raw(id))
+ }
+ }
}
// === impl SubscriberExt ===
diff --git a/vendor/tracing-subscriber-0.3.3/src/layer/tests.rs b/vendor/tracing-subscriber/src/layer/tests.rs
index d7ad61769..d7ad61769 100644
--- a/vendor/tracing-subscriber-0.3.3/src/layer/tests.rs
+++ b/vendor/tracing-subscriber/src/layer/tests.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/lib.rs b/vendor/tracing-subscriber/src/lib.rs
index 563a86dee..808923007 100644
--- a/vendor/tracing-subscriber-0.3.3/src/lib.rs
+++ b/vendor/tracing-subscriber/src/lib.rs
@@ -10,7 +10,7 @@
//! `tracing-subscriber` is intended for use by both `Subscriber` authors and
//! application authors using `tracing` to instrument their applications.
//!
-//! *Compiler support: [requires `rustc` 1.42+][msrv]*
+//! *Compiler support: [requires `rustc` 1.50+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
@@ -60,9 +60,11 @@
//! **Requires "std"**.
//! - `json`: Enables `fmt` support for JSON output. In JSON output, the ANSI
//! feature does nothing. **Requires "fmt" and "std"**.
-//! - [`local-time`]: Enables local time formatting when using the [`time`
+//! - `local-time`: Enables local time formatting when using the [`time`
//! crate]'s timestamp formatters with the `fmt` subscriber.
//!
+//! [`registry`]: mod@registry
+//!
//! ### Optional Dependencies
//!
//! - [`tracing-log`]: Enables better formatting for events emitted by `log`
@@ -80,7 +82,7 @@
//! used without requiring the Rust standard library, although some features are
//! disabled. Although most of the APIs provided by `tracing-subscriber`, such
//! as [`fmt`] and [`EnvFilter`], require the standard library, some
-//! functionality, such as the [`Subscriber`] trait, can still be used in
+//! functionality, such as the [`Layer`] trait, can still be used in
//! `no_std` environments.
//!
//! The dependency on the standard library is controlled by two crate feature
@@ -102,10 +104,41 @@
//! tracing-subscriber = { version = "0.3", default-features = false, features = ["alloc"] }
//! ```
//!
+//! ### Unstable Features
+//!
+//! These feature flags enable **unstable** features. The public API may break in 0.1.x
+//! releases. To enable these features, the `--cfg tracing_unstable` must be passed to
+//! `rustc` when compiling.
+//!
+//! The following unstable feature flags are currently available:
+//!
+//! * `valuable`: Enables support for serializing values recorded using the
+//! [`valuable`] crate as structured JSON in the [`format::Json`] formatter.
+//!
+//! #### Enabling Unstable Features
+//!
+//! The easiest way to set the `tracing_unstable` cfg is to use the `RUSTFLAGS`
+//! env variable when running `cargo` commands:
+//!
+//! ```shell
+//! RUSTFLAGS="--cfg tracing_unstable" cargo build
+//! ```
+//! Alternatively, the following can be added to the `.cargo/config` file in a
+//! project to automatically enable the cfg flag for that project:
+//!
+//! ```toml
+//! [build]
+//! rustflags = ["--cfg", "tracing_unstable"]
+//! ```
+//!
+//! [feature flags]: https://doc.rust-lang.org/cargo/reference/manifest.html#the-features-section
+//! [`valuable`]: https://crates.io/crates/valuable
+//! [`format::Json`]: crate::fmt::format::Json
+//!
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.42. The current Tracing version is not guaranteed to build on
+//! version is 1.50. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
@@ -116,17 +149,18 @@
//! supported compiler version is not considered a semver breaking change as
//! long as doing so complies with this policy.
//!
-//! [`tracing`]: https://docs.rs/tracing/latest/tracing/
-//! [`Subscriber`]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html
-//! [`EnvFilter`]: filter/struct.EnvFilter.html
-//! [`fmt`]: fmt/index.html
+//! [`Subscriber`]: tracing_core::subscriber::Subscriber
+//! [`tracing`]: https://docs.rs/tracing/latest/tracing
+//! [`EnvFilter`]: filter::EnvFilter
+//! [`fmt`]: mod@fmt
//! [`tracing-log`]: https://crates.io/crates/tracing-log
//! [`smallvec`]: https://crates.io/crates/smallvec
//! [`env_logger` crate]: https://crates.io/crates/env_logger
//! [`parking_lot`]: https://crates.io/crates/parking_lot
//! [`time` crate]: https://crates.io/crates/time
-//! [`liballoc`]: https://doc.rust-lang.org/alloc/index.html
-#![doc(html_root_url = "https://docs.rs/tracing-subscriber/0.3.1")]
+//! [`libstd`]: std
+//! [`liballoc`]: alloc
+#![doc(html_root_url = "https://docs.rs/tracing-subscriber/0.3.15")]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/tokio-rs/tracing/master/assets/logo-type.png",
issue_tracker_base_url = "https://github.com/tokio-rs/tracing/issues/"
diff --git a/vendor/tracing-subscriber-0.3.3/src/macros.rs b/vendor/tracing-subscriber/src/macros.rs
index 81351132f..81351132f 100644
--- a/vendor/tracing-subscriber-0.3.3/src/macros.rs
+++ b/vendor/tracing-subscriber/src/macros.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/prelude.rs b/vendor/tracing-subscriber/src/prelude.rs
index c2230907b..c2230907b 100644
--- a/vendor/tracing-subscriber-0.3.3/src/prelude.rs
+++ b/vendor/tracing-subscriber/src/prelude.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/registry/extensions.rs b/vendor/tracing-subscriber/src/registry/extensions.rs
index 899e1549f..ff76fb599 100644
--- a/vendor/tracing-subscriber-0.3.3/src/registry/extensions.rs
+++ b/vendor/tracing-subscriber/src/registry/extensions.rs
@@ -78,7 +78,7 @@ impl<'a> ExtensionsMut<'a> {
/// should be able to reuse timestamp _x_.
///
/// Therefore, extensions should generally be newtypes, rather than common
- /// types like [`String`](https://doc.rust-lang.org/std/string/struct.String.html), to avoid accidental
+ /// types like [`String`](std::string::String), to avoid accidental
/// cross-`Layer` clobbering.
///
/// ## Panics
diff --git a/vendor/tracing-subscriber-0.3.3/src/registry/mod.rs b/vendor/tracing-subscriber/src/registry/mod.rs
index f3b77b6a9..38af53e8a 100644
--- a/vendor/tracing-subscriber-0.3.3/src/registry/mod.rs
+++ b/vendor/tracing-subscriber/src/registry/mod.rs
@@ -55,8 +55,7 @@
//! require the root subscriber to be a registry.
//!
//! [`Layer`]: crate::layer::Layer
-//! [`Subscriber`]:
-//! https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html
+//! [`Subscriber`]: tracing_core::Subscriber
//! [ctx]: crate::layer::Context
//! [lookup]: crate::layer::Context::span()
use tracing_core::{field::FieldSet, span::Id, Metadata};
@@ -87,9 +86,9 @@ feature! {
/// implement this trait; if they do, any [`Layer`]s wrapping them can look up
/// metadata via the [`Context`] type's [`span()`] method.
///
-/// [`Layer`]: ../layer/trait.Layer.html
-/// [`Context`]: ../layer/struct.Context.html
-/// [`span()`]: ../layer/struct.Context.html#method.span
+/// [`Layer`]: super::layer::Layer
+/// [`Context`]: super::layer::Context
+/// [`span()`]: super::layer::Context::span
pub trait LookupSpan<'a> {
/// The type of span data stored in this registry.
type Data: SpanData<'a>;
@@ -104,7 +103,6 @@ pub trait LookupSpan<'a> {
/// capable of performing more sophisiticated queries.
/// </pre>
///
- /// [`SpanData`]: trait.SpanData.html
fn span_data(&'a self, id: &Id) -> Option<Self::Data>;
/// Returns a [`SpanRef`] for the span with the given `Id`, if it exists.
@@ -116,9 +114,7 @@ pub trait LookupSpan<'a> {
/// rather than the [`span_data`] method; while _implementors_ of this trait
/// should only implement `span_data`.
///
- /// [`SpanRef`]: struct.SpanRef.html
- /// [`SpanData`]: trait.SpanData.html
- /// [`span_data`]: #method.span_data
+ /// [`span_data`]: LookupSpan::span_data()
fn span(&'a self, id: &Id) -> Option<SpanRef<'_, Self>>
where
Self: Sized,
@@ -208,8 +204,8 @@ pub trait SpanData<'a> {
/// provides additional methods for querying the registry based on values from
/// the span.
///
-/// [span data]: trait.SpanData.html
-/// [registry]: trait.LookupSpan.html
+/// [span data]: SpanData
+/// [registry]: LookupSpan
#[derive(Debug)]
pub struct SpanRef<'a, R: LookupSpan<'a>> {
registry: &'a R,
@@ -360,7 +356,7 @@ where
/// Returns a list of [fields] defined by the span.
///
- /// [fields]: https://docs.rs/tracing-core/latest/tracing_core/field/index.html
+ /// [fields]: tracing_core::field
pub fn fields(&self) -> &FieldSet {
self.data.metadata().fields()
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/registry/sharded.rs b/vendor/tracing-subscriber/src/registry/sharded.rs
index a6311cb71..797899767 100644
--- a/vendor/tracing-subscriber-0.3.3/src/registry/sharded.rs
+++ b/vendor/tracing-subscriber/src/registry/sharded.rs
@@ -75,16 +75,16 @@ use tracing_core::{
/// the distributed tracing system. These IDs can be associated with
/// `tracing` spans using [fields] and/or [stored span data].
///
-/// [span IDs]: https://docs.rs/tracing-core/latest/tracing_core/span/struct.Id.html
-/// [slab]: https://docs.rs/crate/sharded-slab/
+/// [span IDs]: tracing_core::span::Id
+/// [slab]: sharded_slab
/// [`Layer`]: crate::Layer
/// [added]: crate::layer::Layer#composing-layers
/// [extensions]: super::Extensions
/// [closed]: https://docs.rs/tracing/latest/tracing/span/index.html#closing-spans
-/// [considered closed]: https://docs.rs/tracing-core/latest/tracing_core/subscriber/trait.Subscriber.html#method.try_close
+/// [considered closed]: tracing_core::subscriber::Subscriber::try_close()
/// [`Span`]: https://docs.rs/tracing/latest/tracing/span/struct.Span.html
/// [ot]: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#spancontext
-/// [fields]: https://docs.rs/tracing-core/latest/tracing-core/field/index.html
+/// [fields]: tracing_core::field
/// [stored span data]: crate::registry::SpanData::extensions_mut
#[cfg(feature = "registry")]
#[cfg_attr(docsrs, doc(cfg(all(feature = "registry", feature = "std"))))]
@@ -102,9 +102,8 @@ pub struct Registry {
/// [`Layer`s], such as formatted fields, metrics, or distributed traces should
/// be stored in the [extensions] typemap.
///
-/// [`Registry`]: struct.Registry.html
-/// [`Layer`s]: ../layer/trait.Layer.html
-/// [extensions]: struct.Extensions.html
+/// [`Layer`s]: crate::layer::Layer
+/// [extensions]: Extensions
#[cfg(feature = "registry")]
#[cfg_attr(docsrs, doc(cfg(all(feature = "registry", feature = "std"))))]
#[derive(Debug)]
@@ -115,10 +114,11 @@ pub struct Data<'a> {
/// Stored data associated with a span.
///
-/// This type is pooled using `sharded_slab::Pool`; when a span is dropped, the
-/// `DataInner` entry at that span's slab index is cleared in place and reused
-/// by a future span. Thus, the `Default` and `sharded_slab::Clear`
-/// implementations for this type are load-bearing.
+/// This type is pooled using [`sharded_slab::Pool`]; when a span is
+/// dropped, the `DataInner` entry at that span's slab index is cleared
+/// in place and reused by a future span. Thus, the `Default` and
+/// [`sharded_slab::Clear`] implementations for this type are
+/// load-bearing.
#[derive(Debug)]
struct DataInner {
filter_map: FilterMap,
@@ -173,7 +173,6 @@ fn id_to_idx(id: &Id) -> usize {
/// greater than 0, `CloseGuard` decrements the counter by one and
/// _does not_ remove the span from the [`Registry`].
///
-/// [`Registry`]: ./struct.Registry.html
pub(crate) struct CloseGuard<'a> {
id: Id,
registry: &'a Registry,
@@ -189,7 +188,6 @@ impl Registry {
/// processed an `on_close` notification via the `CLOSE_COUNT` thread-local.
/// For additional details, see [`CloseGuard`].
///
- /// [`CloseGuard`]: ./struct.CloseGuard.html
pub(crate) fn start_close(&self, id: Id) -> CloseGuard<'_> {
CLOSE_COUNT.with(|count| {
let c = count.get();
@@ -216,7 +214,6 @@ thread_local! {
/// track how many layers have processed the close.
/// For additional details, see [`CloseGuard`].
///
- /// [`CloseGuard`]: ./struct.CloseGuard.html
static CLOSE_COUNT: Cell<usize> = Cell::new(0);
}
@@ -278,6 +275,13 @@ impl Subscriber for Registry {
fn record_follows_from(&self, _span: &span::Id, _follows: &span::Id) {}
+ fn event_enabled(&self, _event: &Event<'_>) -> bool {
+ if self.has_per_layer_filters() {
+ return FilterState::event_enabled();
+ }
+ true
+ }
+
/// This is intentionally not implemented, as recording events
/// is the responsibility of layers atop of this registry.
fn event(&self, _: &Event<'_>) {}
@@ -380,7 +384,7 @@ impl<'a> LookupSpan<'a> for Registry {
// === impl CloseGuard ===
impl<'a> CloseGuard<'a> {
- pub(crate) fn is_closing(&mut self) {
+ pub(crate) fn set_closing(&mut self) {
self.is_closing = true;
}
}
diff --git a/vendor/tracing-subscriber-0.3.3/src/registry/stack.rs b/vendor/tracing-subscriber/src/registry/stack.rs
index 4a3f7e59d..4a3f7e59d 100644
--- a/vendor/tracing-subscriber-0.3.3/src/registry/stack.rs
+++ b/vendor/tracing-subscriber/src/registry/stack.rs
diff --git a/vendor/tracing-subscriber/src/reload.rs b/vendor/tracing-subscriber/src/reload.rs
new file mode 100644
index 000000000..096f83d38
--- /dev/null
+++ b/vendor/tracing-subscriber/src/reload.rs
@@ -0,0 +1,384 @@
+//! Wrapper for a `Layer` to allow it to be dynamically reloaded.
+//!
+//! This module provides a [`Layer` type] implementing the [`Layer` trait] or [`Filter` trait]
+//! which wraps another type implementing the corresponding trait. This
+//! allows the wrapped type to be replaced with another
+//! instance of that type at runtime.
+//!
+//! This can be used in cases where a subset of `Layer` or `Filter` functionality
+//! should be dynamically reconfigured, such as when filtering directives may
+//! change at runtime. Note that this layer introduces a (relatively small)
+//! amount of overhead, and should thus only be used as needed.
+//!
+//! # Examples
+//!
+//! Reloading a [global filtering](crate::layer#global-filtering) layer:
+//!
+//! ```rust
+//! # use tracing::info;
+//! use tracing_subscriber::{filter, fmt, reload, prelude::*};
+//! let filter = filter::LevelFilter::WARN;
+//! let (filter, reload_handle) = reload::Layer::new(filter);
+//! tracing_subscriber::registry()
+//! .with(filter)
+//! .with(fmt::Layer::default())
+//! .init();
+//! #
+//! # // specifying the Registry type is required
+//! # let _: &reload::Handle<filter::LevelFilter, tracing_subscriber::Registry> = &reload_handle;
+//! #
+//! info!("This will be ignored");
+//! reload_handle.modify(|filter| *filter = filter::LevelFilter::INFO);
+//! info!("This will be logged");
+//! ```
+//!
+//! Reloading a [`Filtered`](crate::filter::Filtered) layer:
+//!
+//! ```rust
+//! # use tracing::info;
+//! use tracing_subscriber::{filter, fmt, reload, prelude::*};
+//! let filtered_layer = fmt::Layer::default().with_filter(filter::LevelFilter::WARN);
+//! let (filtered_layer, reload_handle) = reload::Layer::new(filtered_layer);
+//! #
+//! # // specifying the Registry type is required
+//! # let _: &reload::Handle<filter::Filtered<fmt::Layer<tracing_subscriber::Registry>,
+//! # filter::LevelFilter, tracing_subscriber::Registry>,tracing_subscriber::Registry>
+//! # = &reload_handle;
+//! #
+//! tracing_subscriber::registry()
+//! .with(filtered_layer)
+//! .init();
+//! info!("This will be ignored");
+//! reload_handle.modify(|layer| *layer.filter_mut() = filter::LevelFilter::INFO);
+//! info!("This will be logged");
+//! ```
+//!
+//! ## Note
+//!
+//! The [`Layer`] implementation is unable to implement downcasting functionality,
+//! so certain [`Layer`] will fail to downcast if wrapped in a `reload::Layer`.
+//!
+//! If you only want to be able to dynamically change the
+//! `Filter` on a layer, prefer wrapping that `Filter` in the `reload::Layer`.
+//!
+//! [`Filter` trait]: crate::layer::Filter
+//! [`Layer` type]: Layer
+//! [`Layer` trait]: super::layer::Layer
+use crate::layer;
+use crate::sync::RwLock;
+
+use core::any::TypeId;
+use std::{
+ error, fmt,
+ marker::PhantomData,
+ sync::{Arc, Weak},
+};
+use tracing_core::{
+ callsite, span,
+ subscriber::{Interest, Subscriber},
+ Dispatch, Event, LevelFilter, Metadata,
+};
+
+/// Wraps a `Layer` or `Filter`, allowing it to be reloaded dynamically at runtime.
+#[derive(Debug)]
+pub struct Layer<L, S> {
+ // TODO(eliza): this once used a `crossbeam_util::ShardedRwLock`. We may
+ // eventually wish to replace it with a sharded lock implementation on top
+ // of our internal `RwLock` wrapper type. If possible, we should profile
+ // this first to determine if it's necessary.
+ inner: Arc<RwLock<L>>,
+ _s: PhantomData<fn(S)>,
+}
+
+/// Allows reloading the state of an associated [`Layer`](crate::layer::Layer).
+#[derive(Debug)]
+pub struct Handle<L, S> {
+ inner: Weak<RwLock<L>>,
+ _s: PhantomData<fn(S)>,
+}
+
+/// Indicates that an error occurred when reloading a layer.
+#[derive(Debug)]
+pub struct Error {
+ kind: ErrorKind,
+}
+
+#[derive(Debug)]
+enum ErrorKind {
+ SubscriberGone,
+ Poisoned,
+}
+
+// ===== impl Layer =====
+
+impl<L, S> crate::Layer<S> for Layer<L, S>
+where
+ L: crate::Layer<S> + 'static,
+ S: Subscriber,
+{
+ fn on_register_dispatch(&self, subscriber: &Dispatch) {
+ try_lock!(self.inner.read()).on_register_dispatch(subscriber);
+ }
+
+ fn on_layer(&mut self, subscriber: &mut S) {
+ try_lock!(self.inner.write(), else return).on_layer(subscriber);
+ }
+
+ #[inline]
+ fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
+ try_lock!(self.inner.read(), else return Interest::sometimes()).register_callsite(metadata)
+ }
+
+ #[inline]
+ fn enabled(&self, metadata: &Metadata<'_>, ctx: layer::Context<'_, S>) -> bool {
+ try_lock!(self.inner.read(), else return false).enabled(metadata, ctx)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_record(span, values, ctx)
+ }
+
+ #[inline]
+ fn on_follows_from(&self, span: &span::Id, follows: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_follows_from(span, follows, ctx)
+ }
+
+ #[inline]
+ fn event_enabled(&self, event: &Event<'_>, ctx: layer::Context<'_, S>) -> bool {
+ try_lock!(self.inner.read(), else return false).event_enabled(event, ctx)
+ }
+
+ #[inline]
+ fn on_event(&self, event: &Event<'_>, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_event(event, ctx)
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_enter(id, ctx)
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_exit(id, ctx)
+ }
+
+ #[inline]
+ fn on_close(&self, id: span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_close(id, ctx)
+ }
+
+ #[inline]
+ fn on_id_change(&self, old: &span::Id, new: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_id_change(old, new, ctx)
+ }
+
+ #[inline]
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ try_lock!(self.inner.read(), else return None).max_level_hint()
+ }
+
+ #[doc(hidden)]
+ unsafe fn downcast_raw(&self, id: TypeId) -> Option<*const ()> {
+ // Safety: it is generally unsafe to downcast through a reload, because
+ // the pointer can be invalidated after the lock is dropped.
+ // `NoneLayerMarker` is a special case because it
+ // is never dereferenced.
+ //
+ // Additionally, even if the marker type *is* dereferenced (which it
+ // never will be), the pointer should be valid even if the subscriber
+ // is reloaded, because all `NoneLayerMarker` pointers that we return
+ // actually point to the global static singleton `NoneLayerMarker`,
+ // rather than to a field inside the lock.
+ if id == TypeId::of::<layer::NoneLayerMarker>() {
+ return try_lock!(self.inner.read(), else return None).downcast_raw(id);
+ }
+
+ None
+ }
+}
+
+// ===== impl Filter =====
+
+#[cfg(all(feature = "registry", feature = "std"))]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "registry", feature = "std"))))]
+impl<S, L> crate::layer::Filter<S> for Layer<L, S>
+where
+ L: crate::layer::Filter<S> + 'static,
+ S: Subscriber,
+{
+ #[inline]
+ fn callsite_enabled(&self, metadata: &'static Metadata<'static>) -> Interest {
+ try_lock!(self.inner.read(), else return Interest::sometimes()).callsite_enabled(metadata)
+ }
+
+ #[inline]
+ fn enabled(&self, metadata: &Metadata<'_>, ctx: &layer::Context<'_, S>) -> bool {
+ try_lock!(self.inner.read(), else return false).enabled(metadata, ctx)
+ }
+
+ #[inline]
+ fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_new_span(attrs, id, ctx)
+ }
+
+ #[inline]
+ fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_record(span, values, ctx)
+ }
+
+ #[inline]
+ fn on_enter(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_enter(id, ctx)
+ }
+
+ #[inline]
+ fn on_exit(&self, id: &span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_exit(id, ctx)
+ }
+
+ #[inline]
+ fn on_close(&self, id: span::Id, ctx: layer::Context<'_, S>) {
+ try_lock!(self.inner.read()).on_close(id, ctx)
+ }
+
+ #[inline]
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ try_lock!(self.inner.read(), else return None).max_level_hint()
+ }
+}
+
+impl<L, S> Layer<L, S> {
+ /// Wraps the given [`Layer`] or [`Filter`], returning a `reload::Layer`
+ /// and a `Handle` that allows the inner value to be modified at runtime.
+ ///
+ /// [`Layer`]: crate::layer::Layer
+ /// [`Filter`]: crate::layer::Filter
+ pub fn new(inner: L) -> (Self, Handle<L, S>) {
+ let this = Self {
+ inner: Arc::new(RwLock::new(inner)),
+ _s: PhantomData,
+ };
+ let handle = this.handle();
+ (this, handle)
+ }
+
+ /// Returns a `Handle` that can be used to reload the wrapped [`Layer`] or [`Filter`].
+ ///
+ /// [`Layer`]: crate::layer::Layer
+ /// [`Filter`]: crate::filter::Filter
+ pub fn handle(&self) -> Handle<L, S> {
+ Handle {
+ inner: Arc::downgrade(&self.inner),
+ _s: PhantomData,
+ }
+ }
+}
+
+// ===== impl Handle =====
+
+impl<L, S> Handle<L, S> {
+ /// Replace the current [`Layer`] or [`Filter`] with the provided `new_value`.
+ ///
+ /// [`Handle::reload`] cannot be used with the [`Filtered`] layer; use
+ /// [`Handle::modify`] instead (see [this issue] for additional details).
+ ///
+ /// However, if the _only_ the [`Filter`] needs to be modified, use
+ /// `reload::Layer` to wrap the `Filter` directly.
+ ///
+ /// [`Layer`]: crate::layer::Layer
+ /// [`Filter`]: crate::layer::Filter
+ /// [`Filtered`]: crate::filter::Filtered
+ ///
+ /// [this issue]: https://github.com/tokio-rs/tracing/issues/1629
+ pub fn reload(&self, new_value: impl Into<L>) -> Result<(), Error> {
+ self.modify(|layer| {
+ *layer = new_value.into();
+ })
+ }
+
+ /// Invokes a closure with a mutable reference to the current layer or filter,
+ /// allowing it to be modified in place.
+ pub fn modify(&self, f: impl FnOnce(&mut L)) -> Result<(), Error> {
+ let inner = self.inner.upgrade().ok_or(Error {
+ kind: ErrorKind::SubscriberGone,
+ })?;
+
+ let mut lock = try_lock!(inner.write(), else return Err(Error::poisoned()));
+ f(&mut *lock);
+ // Release the lock before rebuilding the interest cache, as that
+ // function will lock the new layer.
+ drop(lock);
+
+ callsite::rebuild_interest_cache();
+ Ok(())
+ }
+
+ /// Returns a clone of the layer or filter's current value if it still exists.
+ /// Otherwise, if the subscriber has been dropped, returns `None`.
+ pub fn clone_current(&self) -> Option<L>
+ where
+ L: Clone,
+ {
+ self.with_current(L::clone).ok()
+ }
+
+ /// Invokes a closure with a borrowed reference to the current layer or filter,
+ /// returning the result (or an error if the subscriber no longer exists).
+ pub fn with_current<T>(&self, f: impl FnOnce(&L) -> T) -> Result<T, Error> {
+ let inner = self.inner.upgrade().ok_or(Error {
+ kind: ErrorKind::SubscriberGone,
+ })?;
+ let inner = try_lock!(inner.read(), else return Err(Error::poisoned()));
+ Ok(f(&*inner))
+ }
+}
+
+impl<L, S> Clone for Handle<L, S> {
+ fn clone(&self) -> Self {
+ Handle {
+ inner: self.inner.clone(),
+ _s: PhantomData,
+ }
+ }
+}
+
+// ===== impl Error =====
+
+impl Error {
+ fn poisoned() -> Self {
+ Self {
+ kind: ErrorKind::Poisoned,
+ }
+ }
+
+ /// Returns `true` if this error occurred because the layer was poisoned by
+ /// a panic on another thread.
+ pub fn is_poisoned(&self) -> bool {
+ matches!(self.kind, ErrorKind::Poisoned)
+ }
+
+ /// Returns `true` if this error occurred because the `Subscriber`
+ /// containing the reloadable layer was dropped.
+ pub fn is_dropped(&self) -> bool {
+ matches!(self.kind, ErrorKind::SubscriberGone)
+ }
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let msg = match self.kind {
+ ErrorKind::SubscriberGone => "subscriber no longer exists",
+ ErrorKind::Poisoned => "lock poisoned",
+ };
+ f.pad(msg)
+ }
+}
+
+impl error::Error for Error {}
diff --git a/vendor/tracing-subscriber-0.3.3/src/sync.rs b/vendor/tracing-subscriber/src/sync.rs
index ec42b834a..ec42b834a 100644
--- a/vendor/tracing-subscriber-0.3.3/src/sync.rs
+++ b/vendor/tracing-subscriber/src/sync.rs
diff --git a/vendor/tracing-subscriber-0.3.3/src/util.rs b/vendor/tracing-subscriber/src/util.rs
index 1c98aa4d2..1c98aa4d2 100644
--- a/vendor/tracing-subscriber-0.3.3/src/util.rs
+++ b/vendor/tracing-subscriber/src/util.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/cached_layer_filters_dont_break_other_layers.rs b/vendor/tracing-subscriber/tests/cached_layer_filters_dont_break_other_layers.rs
index 00e98a994..00e98a994 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/cached_layer_filters_dont_break_other_layers.rs
+++ b/vendor/tracing-subscriber/tests/cached_layer_filters_dont_break_other_layers.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/duplicate_spans.rs b/vendor/tracing-subscriber/tests/duplicate_spans.rs
index c4a736f74..5d4dc6a85 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/duplicate_spans.rs
+++ b/vendor/tracing-subscriber/tests/duplicate_spans.rs
@@ -1,5 +1,4 @@
#![cfg(all(feature = "env-filter", feature = "fmt"))]
-mod support;
use tracing::{self, subscriber::with_default, Span};
use tracing_subscriber::{filter::EnvFilter, FmtSubscriber};
diff --git a/vendor/tracing-subscriber/tests/env_filter/main.rs b/vendor/tracing-subscriber/tests/env_filter/main.rs
new file mode 100644
index 000000000..3c3d4868b
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/env_filter/main.rs
@@ -0,0 +1,547 @@
+#![cfg(feature = "env-filter")]
+
+#[path = "../support.rs"]
+mod support;
+use self::support::*;
+
+mod per_layer;
+
+use tracing::{self, subscriber::with_default, Level};
+use tracing_subscriber::{
+ filter::{EnvFilter, LevelFilter},
+ prelude::*,
+};
+
+#[test]
+fn level_filter_event() {
+ let filter: EnvFilter = "info".parse().expect("filter should parse");
+ let (subscriber, finished) = subscriber::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "foo", "this should also be disabled");
+ tracing::warn!(target: "foo", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn same_name_spans() {
+ let filter: EnvFilter = "[foo{bar}]=trace,[foo{baz}]=trace"
+ .parse()
+ .expect("filter should parse");
+ let (subscriber, finished) = subscriber::mock()
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("bar")),
+ )
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("baz")),
+ )
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+ with_default(subscriber, || {
+ tracing::trace_span!("foo", bar = 1);
+ tracing::trace_span!("foo", baz = 1);
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn level_filter_event_with_target() {
+ let filter: EnvFilter = "info,stuff=debug".parse().expect("filter should parse");
+ let (subscriber, finished) = subscriber::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn level_filter_event_with_target_and_span_global() {
+ let filter: EnvFilter = "info,stuff[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+
+ let cool_span = span::named("cool_span");
+ let uncool_span = span::named("uncool_span");
+ let (subscriber, handle) = subscriber::mock()
+ .enter(cool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![cool_span.clone()]),
+ )
+ .exit(cool_span)
+ .enter(uncool_span.clone())
+ .exit(uncool_span)
+ .done()
+ .run_with_handle();
+
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ {
+ let _span = tracing::info_span!(target: "stuff", "cool_span").entered();
+ tracing::debug!("this should be enabled");
+ }
+
+ tracing::debug!("should also be disabled");
+
+ {
+ let _span = tracing::info_span!("uncool_span").entered();
+ tracing::debug!("this should be disabled");
+ }
+ });
+
+ handle.assert_finished();
+}
+
+#[test]
+fn not_order_dependent() {
+ // this test reproduces tokio-rs/tracing#623
+
+ let filter: EnvFilter = "stuff=debug,info".parse().expect("filter should parse");
+ let (subscriber, finished) = subscriber::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn add_directive_enables_event() {
+ // this test reproduces tokio-rs/tracing#591
+
+ // by default, use info level
+ let mut filter = EnvFilter::new(LevelFilter::INFO.to_string());
+
+ // overwrite with a more specific directive
+ filter = filter.add_directive("hello=trace".parse().expect("directive should parse"));
+
+ let (subscriber, finished) = subscriber::mock()
+ .event(event::mock().at_level(Level::INFO).with_target("hello"))
+ .event(event::mock().at_level(Level::TRACE).with_target("hello"))
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ tracing::info!(target: "hello", "hello info");
+ tracing::trace!(target: "hello", "hello trace");
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn span_name_filter_is_dynamic() {
+ let filter: EnvFilter = "info,[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+ let (subscriber, finished) = subscriber::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .enter(span::named("cool_span"))
+ .event(event::mock().at_level(Level::DEBUG))
+ .enter(span::named("uncool_span"))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::DEBUG))
+ .exit(span::named("uncool_span"))
+ .exit(span::named("cool_span"))
+ .enter(span::named("uncool_span"))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .exit(span::named("uncool_span"))
+ .done()
+ .run_with_handle();
+ let subscriber = subscriber.with(filter);
+
+ with_default(subscriber, || {
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ let cool_span = tracing::info_span!("cool_span");
+ let uncool_span = tracing::info_span!("uncool_span");
+
+ {
+ let _enter = cool_span.enter();
+ tracing::debug!("i'm a cool event");
+ tracing::trace!("i'm cool, but not cool enough");
+ let _enter2 = uncool_span.enter();
+ tracing::warn!("warning: extremely cool!");
+ tracing::debug!("i'm still cool");
+ }
+
+ let _enter = uncool_span.enter();
+ tracing::warn!("warning: not that cool");
+ tracing::trace!("im not cool enough");
+ tracing::error!("uncool error");
+ });
+
+ finished.assert_finished();
+}
+
+#[test]
+fn method_name_resolution() {
+ #[allow(unused_imports)]
+ use tracing_subscriber::layer::{Filter, Layer};
+
+ let filter = EnvFilter::new("hello_world=info");
+ filter.max_level_hint();
+}
+
+// contains the same tests as the first half of this file
+// but using EnvFilter as a `Filter`, not as a `Layer`
+mod per_layer_filter {
+ use super::*;
+
+ #[test]
+ fn level_filter_event() {
+ let filter: EnvFilter = "info".parse().expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "foo", "this should also be disabled");
+ tracing::warn!(target: "foo", "this should be enabled");
+ tracing::error!("this should be enabled too");
+
+ handle.assert_finished();
+ }
+
+ #[test]
+ fn same_name_spans() {
+ let filter: EnvFilter = "[foo{bar}]=trace,[foo{baz}]=trace"
+ .parse()
+ .expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("bar")),
+ )
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("baz")),
+ )
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace_span!("foo", bar = 1);
+ tracing::trace_span!("foo", baz = 1);
+
+ handle.assert_finished();
+ }
+
+ #[test]
+ fn level_filter_event_with_target() {
+ let filter: EnvFilter = "info,stuff=debug".parse().expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+
+ handle.assert_finished();
+ }
+
+ #[test]
+ fn level_filter_event_with_target_and_span() {
+ let filter: EnvFilter = "stuff[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+
+ let cool_span = span::named("cool_span");
+ let (layer, handle) = layer::mock()
+ .enter(cool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![cool_span.clone()]),
+ )
+ .exit(cool_span)
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ {
+ let _span = tracing::info_span!(target: "stuff", "cool_span").entered();
+ tracing::debug!("this should be enabled");
+ }
+
+ tracing::debug!("should also be disabled");
+
+ {
+ let _span = tracing::info_span!("uncool_span").entered();
+ tracing::debug!("this should be disabled");
+ }
+
+ handle.assert_finished();
+ }
+
+ #[test]
+ fn not_order_dependent() {
+ // this test reproduces tokio-rs/tracing#623
+
+ let filter: EnvFilter = "stuff=debug,info".parse().expect("filter should parse");
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+
+ finished.assert_finished();
+ }
+
+ #[test]
+ fn add_directive_enables_event() {
+ // this test reproduces tokio-rs/tracing#591
+
+ // by default, use info level
+ let mut filter = EnvFilter::new(LevelFilter::INFO.to_string());
+
+ // overwrite with a more specific directive
+ filter = filter.add_directive("hello=trace".parse().expect("directive should parse"));
+
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO).with_target("hello"))
+ .event(event::mock().at_level(Level::TRACE).with_target("hello"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::info!(target: "hello", "hello info");
+ tracing::trace!(target: "hello", "hello trace");
+
+ finished.assert_finished();
+ }
+
+ #[test]
+ fn span_name_filter_is_dynamic() {
+ let filter: EnvFilter = "info,[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+ let cool_span = span::named("cool_span");
+ let uncool_span = span::named("uncool_span");
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .enter(cool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![cool_span.clone()]),
+ )
+ .enter(uncool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::WARN)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .exit(uncool_span.clone())
+ .exit(cool_span)
+ .enter(uncool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::WARN)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .event(
+ event::mock()
+ .at_level(Level::ERROR)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .exit(uncool_span)
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ let cool_span = tracing::info_span!("cool_span");
+ let uncool_span = tracing::info_span!("uncool_span");
+
+ {
+ let _enter = cool_span.enter();
+ tracing::debug!("i'm a cool event");
+ tracing::trace!("i'm cool, but not cool enough");
+ let _enter2 = uncool_span.enter();
+ tracing::warn!("warning: extremely cool!");
+ tracing::debug!("i'm still cool");
+ }
+
+ {
+ let _enter = uncool_span.enter();
+ tracing::warn!("warning: not that cool");
+ tracing::trace!("im not cool enough");
+ tracing::error!("uncool error");
+ }
+
+ finished.assert_finished();
+ }
+
+ #[test]
+ fn multiple_dynamic_filters() {
+ // Test that multiple dynamic (span) filters only apply to the layers
+ // they're attached to.
+ let (layer1, handle1) = {
+ let span = span::named("span1");
+ let filter: EnvFilter = "[span1]=debug".parse().expect("filter 1 should parse");
+ let (layer, handle) = layer::named("layer1")
+ .enter(span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![span.clone()]),
+ )
+ .exit(span)
+ .done()
+ .run_with_handle();
+ (layer.with_filter(filter), handle)
+ };
+
+ let (layer2, handle2) = {
+ let span = span::named("span2");
+ let filter: EnvFilter = "[span2]=info".parse().expect("filter 2 should parse");
+ let (layer, handle) = layer::named("layer2")
+ .enter(span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::INFO)
+ .in_scope(vec![span.clone()]),
+ )
+ .exit(span)
+ .done()
+ .run_with_handle();
+ (layer.with_filter(filter), handle)
+ };
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer1)
+ .with(layer2)
+ .set_default();
+
+ tracing::info_span!("span1").in_scope(|| {
+ tracing::debug!("hello from span 1");
+ tracing::trace!("not enabled");
+ });
+
+ tracing::info_span!("span2").in_scope(|| {
+ tracing::info!("hello from span 2");
+ tracing::debug!("not enabled");
+ });
+
+ handle1.assert_finished();
+ handle2.assert_finished();
+ }
+}
diff --git a/vendor/tracing-subscriber/tests/env_filter/per_layer.rs b/vendor/tracing-subscriber/tests/env_filter/per_layer.rs
new file mode 100644
index 000000000..8bf5698a4
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/env_filter/per_layer.rs
@@ -0,0 +1,305 @@
+//! Tests for using `EnvFilter` as a per-layer filter (rather than a global
+//! `Layer` filter).
+#![cfg(feature = "registry")]
+use super::*;
+
+#[test]
+fn level_filter_event() {
+ let filter: EnvFilter = "info".parse().expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "foo", "this should also be disabled");
+ tracing::warn!(target: "foo", "this should be enabled");
+ tracing::error!("this should be enabled too");
+
+ handle.assert_finished();
+}
+
+#[test]
+fn same_name_spans() {
+ let filter: EnvFilter = "[foo{bar}]=trace,[foo{baz}]=trace"
+ .parse()
+ .expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("bar")),
+ )
+ .new_span(
+ span::mock()
+ .named("foo")
+ .at_level(Level::TRACE)
+ .with_field(field::mock("baz")),
+ )
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace_span!("foo", bar = 1);
+ tracing::trace_span!("foo", baz = 1);
+
+ handle.assert_finished();
+}
+
+#[test]
+fn level_filter_event_with_target() {
+ let filter: EnvFilter = "info,stuff=debug".parse().expect("filter should parse");
+ let (layer, handle) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+
+ handle.assert_finished();
+}
+
+#[test]
+fn level_filter_event_with_target_and_span() {
+ let filter: EnvFilter = "stuff[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+
+ let cool_span = span::named("cool_span");
+ let (layer, handle) = layer::mock()
+ .enter(cool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![cool_span.clone()]),
+ )
+ .exit(cool_span)
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ {
+ let _span = tracing::info_span!(target: "stuff", "cool_span").entered();
+ tracing::debug!("this should be enabled");
+ }
+
+ tracing::debug!("should also be disabled");
+
+ {
+ let _span = tracing::info_span!("uncool_span").entered();
+ tracing::debug!("this should be disabled");
+ }
+
+ handle.assert_finished();
+}
+
+#[test]
+fn not_order_dependent() {
+ // this test reproduces tokio-rs/tracing#623
+
+ let filter: EnvFilter = "stuff=debug,info".parse().expect("filter should parse");
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::DEBUG).with_target("stuff"))
+ .event(event::mock().at_level(Level::WARN).with_target("stuff"))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::ERROR).with_target("stuff"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ tracing::debug!(target: "stuff", "this should be enabled");
+ tracing::debug!("but this shouldn't");
+ tracing::trace!(target: "stuff", "and neither should this");
+ tracing::warn!(target: "stuff", "this should be enabled");
+ tracing::error!("this should be enabled too");
+ tracing::error!(target: "stuff", "this should be enabled also");
+
+ finished.assert_finished();
+}
+
+#[test]
+fn add_directive_enables_event() {
+ // this test reproduces tokio-rs/tracing#591
+
+ // by default, use info level
+ let mut filter = EnvFilter::new(LevelFilter::INFO.to_string());
+
+ // overwrite with a more specific directive
+ filter = filter.add_directive("hello=trace".parse().expect("directive should parse"));
+
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO).with_target("hello"))
+ .event(event::mock().at_level(Level::TRACE).with_target("hello"))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::info!(target: "hello", "hello info");
+ tracing::trace!(target: "hello", "hello trace");
+
+ finished.assert_finished();
+}
+
+#[test]
+fn span_name_filter_is_dynamic() {
+ let filter: EnvFilter = "info,[cool_span]=debug"
+ .parse()
+ .expect("filter should parse");
+ let cool_span = span::named("cool_span");
+ let uncool_span = span::named("uncool_span");
+ let (layer, finished) = layer::mock()
+ .event(event::mock().at_level(Level::INFO))
+ .enter(cool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![cool_span.clone()]),
+ )
+ .enter(uncool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::WARN)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .exit(uncool_span.clone())
+ .exit(cool_span)
+ .enter(uncool_span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::WARN)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .event(
+ event::mock()
+ .at_level(Level::ERROR)
+ .in_scope(vec![uncool_span.clone()]),
+ )
+ .exit(uncool_span)
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ tracing::trace!("this should be disabled");
+ tracing::info!("this shouldn't be");
+ let cool_span = tracing::info_span!("cool_span");
+ let uncool_span = tracing::info_span!("uncool_span");
+
+ {
+ let _enter = cool_span.enter();
+ tracing::debug!("i'm a cool event");
+ tracing::trace!("i'm cool, but not cool enough");
+ let _enter2 = uncool_span.enter();
+ tracing::warn!("warning: extremely cool!");
+ tracing::debug!("i'm still cool");
+ }
+
+ {
+ let _enter = uncool_span.enter();
+ tracing::warn!("warning: not that cool");
+ tracing::trace!("im not cool enough");
+ tracing::error!("uncool error");
+ }
+
+ finished.assert_finished();
+}
+
+#[test]
+fn multiple_dynamic_filters() {
+ // Test that multiple dynamic (span) filters only apply to the layers
+ // they're attached to.
+ let (layer1, handle1) = {
+ let span = span::named("span1");
+ let filter: EnvFilter = "[span1]=debug".parse().expect("filter 1 should parse");
+ let (layer, handle) = layer::named("layer1")
+ .enter(span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::DEBUG)
+ .in_scope(vec![span.clone()]),
+ )
+ .exit(span)
+ .done()
+ .run_with_handle();
+ (layer.with_filter(filter), handle)
+ };
+
+ let (layer2, handle2) = {
+ let span = span::named("span2");
+ let filter: EnvFilter = "[span2]=info".parse().expect("filter 2 should parse");
+ let (layer, handle) = layer::named("layer2")
+ .enter(span.clone())
+ .event(
+ event::mock()
+ .at_level(Level::INFO)
+ .in_scope(vec![span.clone()]),
+ )
+ .exit(span)
+ .done()
+ .run_with_handle();
+ (layer.with_filter(filter), handle)
+ };
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer1)
+ .with(layer2)
+ .set_default();
+
+ tracing::info_span!("span1").in_scope(|| {
+ tracing::debug!("hello from span 1");
+ tracing::trace!("not enabled");
+ });
+
+ tracing::info_span!("span2").in_scope(|| {
+ tracing::info!("hello from span 2");
+ tracing::debug!("not enabled");
+ });
+
+ handle1.assert_finished();
+ handle2.assert_finished();
+}
diff --git a/vendor/tracing-subscriber/tests/event_enabling.rs b/vendor/tracing-subscriber/tests/event_enabling.rs
new file mode 100644
index 000000000..8f67cfcba
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/event_enabling.rs
@@ -0,0 +1,81 @@
+#![cfg(feature = "registry")]
+
+use std::sync::{Arc, Mutex};
+use tracing::{subscriber::with_default, Event, Metadata, Subscriber};
+use tracing_subscriber::{layer::Context, prelude::*, registry, Layer};
+
+struct TrackingLayer {
+ enabled: bool,
+ event_enabled_count: Arc<Mutex<usize>>,
+ event_enabled: bool,
+ on_event_count: Arc<Mutex<usize>>,
+}
+
+impl<C> Layer<C> for TrackingLayer
+where
+ C: Subscriber + Send + Sync + 'static,
+{
+ fn enabled(&self, _metadata: &Metadata<'_>, _ctx: Context<'_, C>) -> bool {
+ self.enabled
+ }
+
+ fn event_enabled(&self, _event: &Event<'_>, _ctx: Context<'_, C>) -> bool {
+ *self.event_enabled_count.lock().unwrap() += 1;
+ self.event_enabled
+ }
+
+ fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, C>) {
+ *self.on_event_count.lock().unwrap() += 1;
+ }
+}
+
+#[test]
+fn event_enabled_is_only_called_once() {
+ let event_enabled_count = Arc::new(Mutex::default());
+ let count = event_enabled_count.clone();
+ let subscriber = registry().with(TrackingLayer {
+ enabled: true,
+ event_enabled_count,
+ event_enabled: true,
+ on_event_count: Arc::new(Mutex::default()),
+ });
+ with_default(subscriber, || {
+ tracing::error!("hiya!");
+ });
+
+ assert_eq!(1, *count.lock().unwrap());
+}
+
+#[test]
+fn event_enabled_not_called_when_not_enabled() {
+ let event_enabled_count = Arc::new(Mutex::default());
+ let count = event_enabled_count.clone();
+ let subscriber = registry().with(TrackingLayer {
+ enabled: false,
+ event_enabled_count,
+ event_enabled: true,
+ on_event_count: Arc::new(Mutex::default()),
+ });
+ with_default(subscriber, || {
+ tracing::error!("hiya!");
+ });
+
+ assert_eq!(0, *count.lock().unwrap());
+}
+
+#[test]
+fn event_disabled_does_disable_event() {
+ let on_event_count = Arc::new(Mutex::default());
+ let count = on_event_count.clone();
+ let subscriber = registry().with(TrackingLayer {
+ enabled: true,
+ event_enabled_count: Arc::new(Mutex::default()),
+ event_enabled: false,
+ on_event_count,
+ });
+ with_default(subscriber, || {
+ tracing::error!("hiya!");
+ });
+
+ assert_eq!(0, *count.lock().unwrap());
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/field_filter.rs b/vendor/tracing-subscriber/tests/field_filter.rs
index 12b4053b6..f14a0626d 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/field_filter.rs
+++ b/vendor/tracing-subscriber/tests/field_filter.rs
@@ -1,7 +1,7 @@
#![cfg(feature = "env-filter")]
-mod support;
-use self::support::*;
+
use tracing::{self, subscriber::with_default, Level};
+use tracing_mock::*;
use tracing_subscriber::{filter::EnvFilter, prelude::*};
#[test]
diff --git a/vendor/tracing-subscriber-0.3.3/tests/filter_log.rs b/vendor/tracing-subscriber/tests/filter_log.rs
index 28e742501..8d57ed600 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/filter_log.rs
+++ b/vendor/tracing-subscriber/tests/filter_log.rs
@@ -1,7 +1,7 @@
#![cfg(all(feature = "env-filter", feature = "tracing-log"))]
-mod support;
-use self::support::*;
+
use tracing::{self, Level};
+use tracing_mock::*;
use tracing_subscriber::{filter::EnvFilter, prelude::*};
mod my_module {
diff --git a/vendor/tracing-subscriber-0.3.3/tests/fmt_max_level_hint.rs b/vendor/tracing-subscriber/tests/fmt_max_level_hint.rs
index 57a0f6e3f..57a0f6e3f 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/fmt_max_level_hint.rs
+++ b/vendor/tracing-subscriber/tests/fmt_max_level_hint.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/hinted_layer_filters_dont_break_other_layers.rs b/vendor/tracing-subscriber/tests/hinted_layer_filters_dont_break_other_layers.rs
index 897dae282..897dae282 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/hinted_layer_filters_dont_break_other_layers.rs
+++ b/vendor/tracing-subscriber/tests/hinted_layer_filters_dont_break_other_layers.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filter_interests_are_cached.rs b/vendor/tracing-subscriber/tests/layer_filter_interests_are_cached.rs
index d89d3bf17..d89d3bf17 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filter_interests_are_cached.rs
+++ b/vendor/tracing-subscriber/tests/layer_filter_interests_are_cached.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/boxed.rs b/vendor/tracing-subscriber/tests/layer_filters/boxed.rs
index 0fe37188e..0fe37188e 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/boxed.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/boxed.rs
diff --git a/vendor/tracing-subscriber/tests/layer_filters/combinators.rs b/vendor/tracing-subscriber/tests/layer_filters/combinators.rs
new file mode 100644
index 000000000..6052a2d00
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/layer_filters/combinators.rs
@@ -0,0 +1,42 @@
+use super::*;
+use tracing_subscriber::{
+ filter::{filter_fn, FilterExt, LevelFilter},
+ prelude::*,
+};
+
+#[test]
+fn and() {
+ let (layer, handle) = layer::mock()
+ .event(
+ event::msg("a very interesting event")
+ .at_level(tracing::Level::INFO)
+ .with_target("interesting_target"),
+ )
+ .done()
+ .run_with_handle();
+
+ // Enables spans and events with targets starting with `interesting_target`:
+ let target_filter = filter::filter_fn(|meta| meta.target().starts_with("interesting_target"));
+
+ // Enables spans and events with levels `INFO` and below:
+ let level_filter = LevelFilter::INFO;
+
+ // Combine the two filters together, returning a filter that only enables
+ // spans and events that *both* filters will enable:
+ let filter = target_filter.and(level_filter);
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(layer.with_filter(filter))
+ .set_default();
+
+ // This event will *not* be enabled:
+ tracing::info!("an event with an uninteresting target");
+
+ // This event *will* be enabled:
+ tracing::info!(target: "interesting_target", "a very interesting event");
+
+ // This event will *not* be enabled:
+ tracing::debug!(target: "interesting_target", "interesting debug event...");
+
+ handle.assert_finished();
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/downcast_raw.rs b/vendor/tracing-subscriber/tests/layer_filters/downcast_raw.rs
index b5f7e35ce..b5f7e35ce 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/downcast_raw.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/downcast_raw.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/filter_scopes.rs b/vendor/tracing-subscriber/tests/layer_filters/filter_scopes.rs
index 7fd7d843b..7fd7d843b 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/filter_scopes.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/filter_scopes.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/main.rs b/vendor/tracing-subscriber/tests/layer_filters/main.rs
index 2359584d7..10f06c24c 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/main.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/main.rs
@@ -5,11 +5,13 @@ use self::support::*;
mod boxed;
mod downcast_raw;
mod filter_scopes;
+mod per_event;
mod targets;
mod trees;
+mod vec;
use tracing::{level_filters::LevelFilter, Level};
-use tracing_subscriber::{filter, prelude::*};
+use tracing_subscriber::{filter, prelude::*, Layer};
#[test]
fn basic_layer_filters() {
diff --git a/vendor/tracing-subscriber/tests/layer_filters/per_event.rs b/vendor/tracing-subscriber/tests/layer_filters/per_event.rs
new file mode 100644
index 000000000..9c785f9a2
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/layer_filters/per_event.rs
@@ -0,0 +1,61 @@
+use crate::support::*;
+use tracing::Level;
+use tracing_subscriber::{field::Visit, layer::Filter, prelude::*};
+
+struct FilterEvent;
+
+impl<S> Filter<S> for FilterEvent {
+ fn enabled(
+ &self,
+ _meta: &tracing::Metadata<'_>,
+ _cx: &tracing_subscriber::layer::Context<'_, S>,
+ ) -> bool {
+ true
+ }
+
+ fn event_enabled(
+ &self,
+ event: &tracing::Event<'_>,
+ _cx: &tracing_subscriber::layer::Context<'_, S>,
+ ) -> bool {
+ struct ShouldEnable(bool);
+ impl Visit for ShouldEnable {
+ fn record_bool(&mut self, field: &tracing_core::Field, value: bool) {
+ if field.name() == "enable" {
+ self.0 = value;
+ }
+ }
+
+ fn record_debug(
+ &mut self,
+ _field: &tracing_core::Field,
+ _value: &dyn core::fmt::Debug,
+ ) {
+ }
+ }
+ let mut should_enable = ShouldEnable(false);
+ event.record(&mut should_enable);
+ should_enable.0
+ }
+}
+
+#[test]
+fn per_subscriber_event_field_filtering() {
+ let (expect, handle) = layer::mock()
+ .event(event::mock().at_level(Level::TRACE))
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(expect.with_filter(FilterEvent))
+ .set_default();
+
+ tracing::trace!(enable = true, "hello trace");
+ tracing::debug!("hello debug");
+ tracing::info!(enable = true, "hello info");
+ tracing::warn!(enable = false, "hello warn");
+ tracing::error!("hello error");
+
+ handle.assert_finished();
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/targets.rs b/vendor/tracing-subscriber/tests/layer_filters/targets.rs
index c8133044b..c8133044b 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/targets.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/targets.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/trees.rs b/vendor/tracing-subscriber/tests/layer_filters/trees.rs
index 18cdd8ccc..18cdd8ccc 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/layer_filters/trees.rs
+++ b/vendor/tracing-subscriber/tests/layer_filters/trees.rs
diff --git a/vendor/tracing-subscriber/tests/layer_filters/vec.rs b/vendor/tracing-subscriber/tests/layer_filters/vec.rs
new file mode 100644
index 000000000..87244e4ab
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/layer_filters/vec.rs
@@ -0,0 +1,120 @@
+use super::*;
+use tracing::Subscriber;
+
+#[test]
+fn with_filters_unboxed() {
+ let (trace_layer, trace_handle) = layer::named("trace")
+ .event(event::mock().at_level(Level::TRACE))
+ .event(event::mock().at_level(Level::DEBUG))
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let trace_layer = trace_layer.with_filter(LevelFilter::TRACE);
+
+ let (debug_layer, debug_handle) = layer::named("debug")
+ .event(event::mock().at_level(Level::DEBUG))
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let debug_layer = debug_layer.with_filter(LevelFilter::DEBUG);
+
+ let (info_layer, info_handle) = layer::named("info")
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let info_layer = info_layer.with_filter(LevelFilter::INFO);
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(vec![trace_layer, debug_layer, info_layer])
+ .set_default();
+
+ tracing::trace!("hello trace");
+ tracing::debug!("hello debug");
+ tracing::info!("hello info");
+
+ trace_handle.assert_finished();
+ debug_handle.assert_finished();
+ info_handle.assert_finished();
+}
+
+#[test]
+fn with_filters_boxed() {
+ let (unfiltered_layer, unfiltered_handle) = layer::named("unfiltered")
+ .event(event::mock().at_level(Level::TRACE))
+ .event(event::mock().at_level(Level::DEBUG))
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let unfiltered_layer = unfiltered_layer.boxed();
+
+ let (debug_layer, debug_handle) = layer::named("debug")
+ .event(event::mock().at_level(Level::DEBUG))
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let debug_layer = debug_layer.with_filter(LevelFilter::DEBUG).boxed();
+
+ let (target_layer, target_handle) = layer::named("target")
+ .event(event::mock().at_level(Level::INFO))
+ .done()
+ .run_with_handle();
+ let target_layer = target_layer
+ .with_filter(filter::filter_fn(|meta| meta.target() == "my_target"))
+ .boxed();
+
+ let _subscriber = tracing_subscriber::registry()
+ .with(vec![unfiltered_layer, debug_layer, target_layer])
+ .set_default();
+
+ tracing::trace!("hello trace");
+ tracing::debug!("hello debug");
+ tracing::info!(target: "my_target", "hello my target");
+
+ unfiltered_handle.assert_finished();
+ debug_handle.assert_finished();
+ target_handle.assert_finished();
+}
+
+#[test]
+fn mixed_max_level_hint() {
+ let unfiltered = layer::named("unfiltered").run().boxed();
+ let info = layer::named("info")
+ .run()
+ .with_filter(LevelFilter::INFO)
+ .boxed();
+ let debug = layer::named("debug")
+ .run()
+ .with_filter(LevelFilter::DEBUG)
+ .boxed();
+
+ let subscriber = tracing_subscriber::registry().with(vec![unfiltered, info, debug]);
+
+ assert_eq!(subscriber.max_level_hint(), None);
+}
+
+#[test]
+fn all_filtered_max_level_hint() {
+ let warn = layer::named("warn")
+ .run()
+ .with_filter(LevelFilter::WARN)
+ .boxed();
+ let info = layer::named("info")
+ .run()
+ .with_filter(LevelFilter::INFO)
+ .boxed();
+ let debug = layer::named("debug")
+ .run()
+ .with_filter(LevelFilter::DEBUG)
+ .boxed();
+
+ let subscriber = tracing_subscriber::registry().with(vec![warn, info, debug]);
+
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::DEBUG));
+}
+
+#[test]
+fn empty_vec() {
+ // Just a None means everything is off
+ let subscriber = tracing_subscriber::registry().with(Vec::<ExpectLayer>::new());
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::OFF));
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/multiple_layer_filter_interests_cached.rs b/vendor/tracing-subscriber/tests/multiple_layer_filter_interests_cached.rs
index 5c25e7f03..5c25e7f03 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/multiple_layer_filter_interests_cached.rs
+++ b/vendor/tracing-subscriber/tests/multiple_layer_filter_interests_cached.rs
diff --git a/vendor/tracing-subscriber/tests/option.rs b/vendor/tracing-subscriber/tests/option.rs
new file mode 100644
index 000000000..c87519c30
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/option.rs
@@ -0,0 +1,262 @@
+#![cfg(feature = "registry")]
+use tracing_core::{subscriber::Interest, LevelFilter, Metadata, Subscriber};
+use tracing_subscriber::{layer, prelude::*};
+
+// A basic layer that returns its inner for `max_level_hint`
+#[derive(Debug)]
+struct BasicLayer(Option<LevelFilter>);
+impl<S: Subscriber> tracing_subscriber::Layer<S> for BasicLayer {
+ fn register_callsite(&self, _m: &Metadata<'_>) -> Interest {
+ Interest::sometimes()
+ }
+
+ fn enabled(&self, _m: &Metadata<'_>, _: layer::Context<'_, S>) -> bool {
+ true
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ self.0
+ }
+}
+
+// This test is just used to compare to the tests below
+#[test]
+fn just_layer() {
+ let subscriber = tracing_subscriber::registry().with(LevelFilter::INFO);
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::INFO));
+}
+
+#[test]
+fn subscriber_and_option_some_layer() {
+ let subscriber = tracing_subscriber::registry()
+ .with(LevelFilter::INFO)
+ .with(Some(LevelFilter::DEBUG));
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::DEBUG));
+}
+
+#[test]
+fn subscriber_and_option_none_layer() {
+ // None means the other layer takes control
+ let subscriber = tracing_subscriber::registry()
+ .with(LevelFilter::ERROR)
+ .with(None::<LevelFilter>);
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::ERROR));
+}
+
+#[test]
+fn just_option_some_layer() {
+ // Just a None means everything is off
+ let subscriber = tracing_subscriber::registry().with(None::<LevelFilter>);
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::OFF));
+}
+
+/// Tests that the logic tested in `doesnt_override_none` works through the reload subscriber
+#[test]
+fn just_option_none_layer() {
+ let subscriber = tracing_subscriber::registry().with(Some(LevelFilter::ERROR));
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::ERROR));
+}
+
+// Test that the `None` max level hint only applies if its the only layer
+#[test]
+fn none_outside_doesnt_override_max_level() {
+ // None means the other layer takes control
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(None))
+ .with(None::<LevelFilter>);
+ assert_eq!(
+ subscriber.max_level_hint(),
+ None,
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The `None`-returning layer still wins
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(None))
+ .with(Some(LevelFilter::ERROR));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::ERROR),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Check that we aren't doing anything truly wrong
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(Some(LevelFilter::DEBUG)))
+ .with(None::<LevelFilter>);
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Test that per-subscriber filters aren't affected
+
+ // One layer is None so it "wins"
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(None))
+ .with(None::<LevelFilter>.with_filter(LevelFilter::DEBUG));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ None,
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The max-levels wins
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(Some(LevelFilter::INFO)))
+ .with(None::<LevelFilter>.with_filter(LevelFilter::DEBUG));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Test filter on the other layer
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(Some(LevelFilter::INFO)).with_filter(LevelFilter::DEBUG))
+ .with(None::<LevelFilter>);
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(None).with_filter(LevelFilter::DEBUG))
+ .with(None::<LevelFilter>);
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The `OFF` from `None` over overridden.
+ let subscriber = tracing_subscriber::registry()
+ .with(BasicLayer(Some(LevelFilter::INFO)))
+ .with(None::<LevelFilter>);
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::INFO),
+ "\n stack: {:#?}",
+ subscriber
+ );
+}
+
+// Test that the `None` max level hint only applies if its the only layer
+#[test]
+fn none_inside_doesnt_override_max_level() {
+ // None means the other layer takes control
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>)
+ .with(BasicLayer(None));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ None,
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The `None`-returning layer still wins
+ let subscriber = tracing_subscriber::registry()
+ .with(Some(LevelFilter::ERROR))
+ .with(BasicLayer(None));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::ERROR),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Check that we aren't doing anything truly wrong
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>)
+ .with(BasicLayer(Some(LevelFilter::DEBUG)));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Test that per-subscriber filters aren't affected
+
+ // One layer is None so it "wins"
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>.with_filter(LevelFilter::DEBUG))
+ .with(BasicLayer(None));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ None,
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The max-levels wins
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>.with_filter(LevelFilter::DEBUG))
+ .with(BasicLayer(Some(LevelFilter::INFO)));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // Test filter on the other layer
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>)
+ .with(BasicLayer(Some(LevelFilter::INFO)).with_filter(LevelFilter::DEBUG));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>)
+ .with(BasicLayer(None).with_filter(LevelFilter::DEBUG));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::DEBUG),
+ "\n stack: {:#?}",
+ subscriber
+ );
+
+ // The `OFF` from `None` over overridden.
+ let subscriber = tracing_subscriber::registry()
+ .with(None::<LevelFilter>)
+ .with(BasicLayer(Some(LevelFilter::INFO)));
+ assert_eq!(
+ subscriber.max_level_hint(),
+ Some(LevelFilter::INFO),
+ "\n stack: {:#?}",
+ subscriber
+ );
+}
+
+/// Tests that the logic tested in `doesnt_override_none` works through the reload layer
+#[test]
+fn reload_works_with_none() {
+ let (layer1, handle1) = tracing_subscriber::reload::Layer::new(None::<BasicLayer>);
+ let (layer2, _handle2) = tracing_subscriber::reload::Layer::new(None::<BasicLayer>);
+
+ let subscriber = tracing_subscriber::registry().with(layer1).with(layer2);
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::OFF));
+
+ // reloading one should pass through correctly.
+ handle1.reload(Some(BasicLayer(None))).unwrap();
+ assert_eq!(subscriber.max_level_hint(), None);
+
+ // Check pass-through of an actual level as well
+ handle1
+ .reload(Some(BasicLayer(Some(LevelFilter::DEBUG))))
+ .unwrap();
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::DEBUG));
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/registry_max_level_hint.rs b/vendor/tracing-subscriber/tests/registry_max_level_hint.rs
index f94c8a1fb..f94c8a1fb 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/registry_max_level_hint.rs
+++ b/vendor/tracing-subscriber/tests/registry_max_level_hint.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/registry_with_subscriber.rs b/vendor/tracing-subscriber/tests/registry_with_subscriber.rs
index 3f8d99b1d..50d2f551d 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/registry_with_subscriber.rs
+++ b/vendor/tracing-subscriber/tests/registry_with_subscriber.rs
@@ -4,7 +4,7 @@ use tracing_subscriber::prelude::*;
#[tokio::test]
async fn future_with_subscriber() {
- let _default = tracing_subscriber::registry().init();
+ tracing_subscriber::registry().init();
let span = tracing::info_span!("foo");
let _e = span.enter();
let span = tracing::info_span!("bar");
diff --git a/vendor/tracing-subscriber/tests/reload.rs b/vendor/tracing-subscriber/tests/reload.rs
new file mode 100644
index 000000000..28662e2e6
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/reload.rs
@@ -0,0 +1,155 @@
+#![cfg(feature = "registry")]
+use std::sync::atomic::{AtomicUsize, Ordering};
+use tracing_core::{
+ span::{Attributes, Id, Record},
+ subscriber::Interest,
+ Event, LevelFilter, Metadata, Subscriber,
+};
+use tracing_subscriber::{layer, prelude::*, reload::*};
+
+pub struct NopSubscriber;
+fn event() {
+ tracing::info!("my event");
+}
+
+impl Subscriber for NopSubscriber {
+ fn register_callsite(&self, _: &'static Metadata<'static>) -> Interest {
+ Interest::never()
+ }
+
+ fn enabled(&self, _: &Metadata<'_>) -> bool {
+ false
+ }
+
+ fn new_span(&self, _: &Attributes<'_>) -> Id {
+ Id::from_u64(1)
+ }
+
+ fn record(&self, _: &Id, _: &Record<'_>) {}
+ fn record_follows_from(&self, _: &Id, _: &Id) {}
+ fn event(&self, _: &Event<'_>) {}
+ fn enter(&self, _: &Id) {}
+ fn exit(&self, _: &Id) {}
+}
+
+#[test]
+fn reload_handle() {
+ static FILTER1_CALLS: AtomicUsize = AtomicUsize::new(0);
+ static FILTER2_CALLS: AtomicUsize = AtomicUsize::new(0);
+
+ enum Filter {
+ One,
+ Two,
+ }
+
+ impl<S: Subscriber> tracing_subscriber::Layer<S> for Filter {
+ fn register_callsite(&self, m: &Metadata<'_>) -> Interest {
+ println!("REGISTER: {:?}", m);
+ Interest::sometimes()
+ }
+
+ fn enabled(&self, m: &Metadata<'_>, _: layer::Context<'_, S>) -> bool {
+ println!("ENABLED: {:?}", m);
+ match self {
+ Filter::One => FILTER1_CALLS.fetch_add(1, Ordering::SeqCst),
+ Filter::Two => FILTER2_CALLS.fetch_add(1, Ordering::SeqCst),
+ };
+ true
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ match self {
+ Filter::One => Some(LevelFilter::INFO),
+ Filter::Two => Some(LevelFilter::DEBUG),
+ }
+ }
+ }
+
+ let (layer, handle) = Layer::new(Filter::One);
+
+ let subscriber = tracing_core::dispatcher::Dispatch::new(layer.with_subscriber(NopSubscriber));
+
+ tracing_core::dispatcher::with_default(&subscriber, || {
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 0);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
+
+ event();
+
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
+
+ assert_eq!(LevelFilter::current(), LevelFilter::INFO);
+ handle.reload(Filter::Two).expect("should reload");
+ assert_eq!(LevelFilter::current(), LevelFilter::DEBUG);
+
+ event();
+
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 1);
+ })
+}
+
+#[test]
+fn reload_filter() {
+ struct NopLayer;
+ impl<S: Subscriber> tracing_subscriber::Layer<S> for NopLayer {
+ fn register_callsite(&self, _m: &Metadata<'_>) -> Interest {
+ Interest::sometimes()
+ }
+
+ fn enabled(&self, _m: &Metadata<'_>, _: layer::Context<'_, S>) -> bool {
+ true
+ }
+ }
+
+ static FILTER1_CALLS: AtomicUsize = AtomicUsize::new(0);
+ static FILTER2_CALLS: AtomicUsize = AtomicUsize::new(0);
+
+ enum Filter {
+ One,
+ Two,
+ }
+
+ impl<S: Subscriber> tracing_subscriber::layer::Filter<S> for Filter {
+ fn enabled(&self, m: &Metadata<'_>, _: &layer::Context<'_, S>) -> bool {
+ println!("ENABLED: {:?}", m);
+ match self {
+ Filter::One => FILTER1_CALLS.fetch_add(1, Ordering::SeqCst),
+ Filter::Two => FILTER2_CALLS.fetch_add(1, Ordering::SeqCst),
+ };
+ true
+ }
+
+ fn max_level_hint(&self) -> Option<LevelFilter> {
+ match self {
+ Filter::One => Some(LevelFilter::INFO),
+ Filter::Two => Some(LevelFilter::DEBUG),
+ }
+ }
+ }
+
+ let (filter, handle) = Layer::new(Filter::One);
+
+ let dispatcher = tracing_core::Dispatch::new(
+ tracing_subscriber::registry().with(NopLayer.with_filter(filter)),
+ );
+
+ tracing_core::dispatcher::with_default(&dispatcher, || {
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 0);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
+
+ event();
+
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 0);
+
+ assert_eq!(LevelFilter::current(), LevelFilter::INFO);
+ handle.reload(Filter::Two).expect("should reload");
+ assert_eq!(LevelFilter::current(), LevelFilter::DEBUG);
+
+ event();
+
+ assert_eq!(FILTER1_CALLS.load(Ordering::SeqCst), 1);
+ assert_eq!(FILTER2_CALLS.load(Ordering::SeqCst), 1);
+ })
+}
diff --git a/vendor/tracing-subscriber-0.3.3/tests/same_len_filters.rs b/vendor/tracing-subscriber/tests/same_len_filters.rs
index b525ea6fd..879e578d7 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/same_len_filters.rs
+++ b/vendor/tracing-subscriber/tests/same_len_filters.rs
@@ -1,9 +1,9 @@
// These tests include field filters with no targets, so they have to go in a
// separate file.
#![cfg(feature = "env-filter")]
-mod support;
-use self::support::*;
+
use tracing::{self, subscriber::with_default, Level};
+use tracing_mock::*;
use tracing_subscriber::{filter::EnvFilter, prelude::*};
#[test]
diff --git a/vendor/tracing-subscriber-0.3.3/tests/support.rs b/vendor/tracing-subscriber/tests/support.rs
index 848ebdc63..50e0e6669 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/support.rs
+++ b/vendor/tracing-subscriber/tests/support.rs
@@ -1,20 +1,15 @@
#![allow(missing_docs, dead_code)]
-pub use self::support::{event, field, span, subscriber};
-// This has to have the same name as the module in `tracing`.
-// path attribute requires referenced module to have same name so allow module inception here
-#[allow(clippy::module_inception)]
-#[path = "../../tracing/tests/support/mod.rs"]
-mod support;
-
-use self::{
- event::MockEvent,
- span::{MockSpan, NewSpan},
- subscriber::{Expect, MockHandle},
-};
+pub use tracing_mock::{event, field, span, subscriber};
+
use tracing_core::{
span::{Attributes, Id, Record},
Event, Subscriber,
};
+use tracing_mock::{
+ event::MockEvent,
+ span::{MockSpan, NewSpan},
+ subscriber::{Expect, MockHandle},
+};
use tracing_subscriber::{
layer::{Context, Layer},
registry::{LookupSpan, SpanRef},
diff --git a/vendor/tracing-subscriber-0.3.3/tests/unhinted_layer_filters_dont_break_other_layers.rs b/vendor/tracing-subscriber/tests/unhinted_layer_filters_dont_break_other_layers.rs
index 9fa5c6bd4..9fa5c6bd4 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/unhinted_layer_filters_dont_break_other_layers.rs
+++ b/vendor/tracing-subscriber/tests/unhinted_layer_filters_dont_break_other_layers.rs
diff --git a/vendor/tracing-subscriber-0.3.3/tests/utils.rs b/vendor/tracing-subscriber/tests/utils.rs
index ff025a2a2..e95868d5e 100644
--- a/vendor/tracing-subscriber-0.3.3/tests/utils.rs
+++ b/vendor/tracing-subscriber/tests/utils.rs
@@ -1,6 +1,6 @@
#![cfg(feature = "std")]
-mod support;
-use self::support::*;
+
+use tracing_mock::*;
use tracing_subscriber::prelude::*;
#[test]
diff --git a/vendor/tracing-subscriber/tests/vec.rs b/vendor/tracing-subscriber/tests/vec.rs
new file mode 100644
index 000000000..92abf0bff
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/vec.rs
@@ -0,0 +1,19 @@
+#![cfg(feature = "registry")]
+use tracing::level_filters::LevelFilter;
+use tracing::Subscriber;
+use tracing_subscriber::prelude::*;
+
+#[test]
+fn just_empty_vec() {
+ // Just a None means everything is off
+ let subscriber = tracing_subscriber::registry().with(Vec::<LevelFilter>::new());
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::OFF));
+}
+
+#[test]
+fn layer_and_empty_vec() {
+ let subscriber = tracing_subscriber::registry()
+ .with(LevelFilter::INFO)
+ .with(Vec::<LevelFilter>::new());
+ assert_eq!(subscriber.max_level_hint(), Some(LevelFilter::INFO));
+}
diff --git a/vendor/tracing-subscriber/tests/vec_subscriber_filter_interests_cached.rs b/vendor/tracing-subscriber/tests/vec_subscriber_filter_interests_cached.rs
new file mode 100644
index 000000000..10467cb7d
--- /dev/null
+++ b/vendor/tracing-subscriber/tests/vec_subscriber_filter_interests_cached.rs
@@ -0,0 +1,117 @@
+#![cfg(feature = "registry")]
+mod support;
+use self::support::*;
+
+use std::{
+ collections::HashMap,
+ sync::{Arc, Mutex},
+};
+use tracing::{Level, Subscriber};
+use tracing_subscriber::{filter, prelude::*};
+
+#[test]
+fn vec_layer_filter_interests_are_cached() {
+ let mk_filtered = |level: Level, subscriber: ExpectLayer| {
+ let seen = Arc::new(Mutex::new(HashMap::new()));
+ let filter = filter::filter_fn({
+ let seen = seen.clone();
+ move |meta| {
+ *seen.lock().unwrap().entry(*meta.level()).or_insert(0usize) += 1;
+ meta.level() <= &level
+ }
+ });
+ (subscriber.with_filter(filter).boxed(), seen)
+ };
+
+ // This layer will return Interest::always for INFO and lower.
+ let (info_layer, info_handle) = layer::named("info")
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::INFO))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .done()
+ .run_with_handle();
+ let (info_layer, seen_info) = mk_filtered(Level::INFO, info_layer);
+
+ // This layer will return Interest::always for WARN and lower.
+ let (warn_layer, warn_handle) = layer::named("warn")
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .event(event::mock().at_level(Level::WARN))
+ .event(event::mock().at_level(Level::ERROR))
+ .done()
+ .run_with_handle();
+ let (warn_layer, seen_warn) = mk_filtered(Level::WARN, warn_layer);
+
+ let subscriber = tracing_subscriber::registry().with(vec![warn_layer, info_layer]);
+ assert!(subscriber.max_level_hint().is_none());
+
+ let _subscriber = subscriber.set_default();
+
+ fn events() {
+ tracing::trace!("hello trace");
+ tracing::debug!("hello debug");
+ tracing::info!("hello info");
+ tracing::warn!("hello warn");
+ tracing::error!("hello error");
+ }
+
+ events();
+ {
+ let lock = seen_info.lock().unwrap();
+ for (&level, &count) in lock.iter() {
+ if level == Level::INFO {
+ continue;
+ }
+ assert_eq!(
+ count, 1,
+ "level {:?} should have been seen 1 time by the INFO subscriber (after first set of events)",
+ level
+ );
+ }
+
+ let lock = seen_warn.lock().unwrap();
+ for (&level, &count) in lock.iter() {
+ if level == Level::INFO {
+ continue;
+ }
+ assert_eq!(
+ count, 1,
+ "level {:?} should have been seen 1 time by the WARN subscriber (after first set of events)",
+ level
+ );
+ }
+ }
+
+ events();
+ {
+ let lock = seen_info.lock().unwrap();
+ for (&level, &count) in lock.iter() {
+ if level == Level::INFO {
+ continue;
+ }
+ assert_eq!(
+ count, 1,
+ "level {:?} should have been seen 1 time by the INFO subscriber (after second set of events)",
+ level
+ );
+ }
+
+ let lock = seen_warn.lock().unwrap();
+ for (&level, &count) in lock.iter() {
+ if level == Level::INFO {
+ continue;
+ }
+ assert_eq!(
+ count, 1,
+ "level {:?} should have been seen 1 time by the WARN subscriber (after second set of events)",
+ level
+ );
+ }
+ }
+
+ info_handle.assert_finished();
+ warn_handle.assert_finished();
+}
diff --git a/vendor/tracing-tree/.cargo-checksum.json b/vendor/tracing-tree/.cargo-checksum.json
index c140e495f..215b14ece 100644
--- a/vendor/tracing-tree/.cargo-checksum.json
+++ b/vendor/tracing-tree/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"e54b0301e8816a5e2d82ba08646102d89ee556d5e9150c571123a34013e6a79a","Cargo.toml":"734e1e9cf702e362c0661b968e92fd4599b732208a65634dd516d085449d45b2","LICENSE-APACHE":"c9c77d27bd737506eee1be90c11885fad935695714eafacd6725f41fff9ec8da","LICENSE-MIT":"d31139de68f7e19f9b32cd7de53dbb125b3c68522bbbb1d0c186c33112d34466","README.md":"589c2534836513b4edfa2811269767a9ac3c48da909c8ab936db3e646410dcf7","examples/basic.rs":"b56dad20aa283aae17a9c1364cc0326714cdc89d6a02a6329034e476b3597fd7","examples/basic.stdout":"49767873eb520f3e457b48d930fa86583d9ca5ef5571d67582b08dcb788ff6f2","examples/quiet.rs":"65a4cbb650cea4939ed274c2fd4d50c953151716fa6564b23393e1df6fb7afd7","examples/quiet.stdout":"d58a3240258279821b5d08e0d39ea60dee2dbd6a955f8639daf0d149ad0616fa","examples/stderr.rs":"ce0896f50be810908fb7083661bf39da6c9733ddef9188d40499313a40fe2c7a","examples/stderr.stderr":"8a0f6553fee903f159c6ea528dda6c5b193064ef9b58c765d1f3f60ecba5ac1f","examples/wraparound.rs":"7998f2e7b9e1f0dfc4b0cf11b1cdcf0f44c85aeaabbea9749113584ac7a3879f","examples/wraparound.stdout":"1c042231b3f8b20a843c18af063e43a51fcc852597dd1713f5878d4a8128271c","src/format.rs":"a986c0a259aec3135bcfd4908f6a80173dea03b3f57efe1dfa7c9d79f2b55257","src/lib.rs":"0a62652d641131ff67c31103331b977ca42ed268a1703aa7365180c70465f536","tests/ui.rs":"32a02ef41ae5cbabe13164f8d665b287b0bb764b2dc5bcb80443cb551df5289a"},"package":"d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"} \ No newline at end of file
+{"files":{"Cargo.lock":"a8a2a7ff05ca79c6ded4cf0625f6188d43625b8ba7358950eafa11b3af01dbda","Cargo.toml":"d883458574569e20468318abd1792c7312d9a31ba44dcc59203f6aa6b4bb3b01","LICENSE-APACHE":"c9c77d27bd737506eee1be90c11885fad935695714eafacd6725f41fff9ec8da","LICENSE-MIT":"d31139de68f7e19f9b32cd7de53dbb125b3c68522bbbb1d0c186c33112d34466","README.md":"589c2534836513b4edfa2811269767a9ac3c48da909c8ab936db3e646410dcf7","examples/basic.rs":"b56dad20aa283aae17a9c1364cc0326714cdc89d6a02a6329034e476b3597fd7","examples/basic.stdout":"49767873eb520f3e457b48d930fa86583d9ca5ef5571d67582b08dcb788ff6f2","examples/quiet.rs":"65a4cbb650cea4939ed274c2fd4d50c953151716fa6564b23393e1df6fb7afd7","examples/quiet.stdout":"d58a3240258279821b5d08e0d39ea60dee2dbd6a955f8639daf0d149ad0616fa","examples/stderr.rs":"ce0896f50be810908fb7083661bf39da6c9733ddef9188d40499313a40fe2c7a","examples/stderr.stderr":"8a0f6553fee903f159c6ea528dda6c5b193064ef9b58c765d1f3f60ecba5ac1f","examples/wraparound.rs":"7998f2e7b9e1f0dfc4b0cf11b1cdcf0f44c85aeaabbea9749113584ac7a3879f","examples/wraparound.stdout":"1c042231b3f8b20a843c18af063e43a51fcc852597dd1713f5878d4a8128271c","src/format.rs":"0dbfbe7a1939fefc3ade3ffa2113d382424f387130388fd3431210bbccef8b8b","src/lib.rs":"8a286a67ae3860d1c30cb336c1a6253920bf63b789e455a27d0e8b611228c1a6","tests/ui.rs":"32a02ef41ae5cbabe13164f8d665b287b0bb764b2dc5bcb80443cb551df5289a"},"package":"758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb"} \ No newline at end of file
diff --git a/vendor/tracing-tree/Cargo.lock b/vendor/tracing-tree/Cargo.lock
index 82a94c9e0..2d4ae7226 100644
--- a/vendor/tracing-tree/Cargo.lock
+++ b/vendor/tracing-tree/Cargo.lock
@@ -3,15 +3,6 @@
version = 3
[[package]]
-name = "ansi_term"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
-dependencies = [
- "winapi",
-]
-
-[[package]]
name = "assert_cmd"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -123,12 +114,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
name = "once_cell"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -277,13 +284,13 @@ dependencies = [
[[package]]
name = "tracing-tree"
-version = "0.2.1"
+version = "0.2.2"
dependencies = [
- "ansi_term",
"assert_cmd",
"atty",
"glob",
"log",
+ "nu-ansi-term",
"tracing",
"tracing-core",
"tracing-log",
diff --git a/vendor/tracing-tree/Cargo.toml b/vendor/tracing-tree/Cargo.toml
index 32c47092f..7e8799662 100644
--- a/vendor/tracing-tree/Cargo.toml
+++ b/vendor/tracing-tree/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "tracing-tree"
-version = "0.2.1"
+version = "0.2.2"
authors = [
"David Barsky <me@davidbarsky.com>",
"Nathan Whitaker",
@@ -26,12 +26,12 @@ repository = "https://github.com/davidbarsky/tracing-tree"
name = "ui"
harness = false
-[dependencies.ansi_term]
-version = "0.12"
-
[dependencies.atty]
version = "0.2"
+[dependencies.nu-ansi-term]
+version = "0.46.0"
+
[dependencies.tracing-core]
version = "0.1"
diff --git a/vendor/tracing-tree/src/format.rs b/vendor/tracing-tree/src/format.rs
index 067ea9771..895c04be2 100644
--- a/vendor/tracing-tree/src/format.rs
+++ b/vendor/tracing-tree/src/format.rs
@@ -1,4 +1,4 @@
-use ansi_term::Color;
+use nu_ansi_term::Color;
use std::{
fmt::{self, Write as _},
io,
@@ -251,7 +251,7 @@ impl<'a> fmt::Display for ColorLevel<'a> {
Level::TRACE => Color::Purple.bold().paint("TRACE"),
Level::DEBUG => Color::Blue.bold().paint("DEBUG"),
Level::INFO => Color::Green.bold().paint(" INFO"),
- Level::WARN => Color::RGB(252, 234, 160).bold().paint(" WARN"), // orange
+ Level::WARN => Color::Rgb(252, 234, 160).bold().paint(" WARN"), // orange
Level::ERROR => Color::Red.bold().paint("ERROR"),
}
.fmt(f)
diff --git a/vendor/tracing-tree/src/lib.rs b/vendor/tracing-tree/src/lib.rs
index dbacaa57e..266523b73 100644
--- a/vendor/tracing-tree/src/lib.rs
+++ b/vendor/tracing-tree/src/lib.rs
@@ -1,7 +1,7 @@
pub(crate) mod format;
-use ansi_term::{Color, Style};
use format::{Buffers, ColorLevel, Config, FmtEvent, SpanMode};
+use nu_ansi_term::{Color, Style};
use std::{
fmt::{self, Write as _},
io,
diff --git a/vendor/unicode-bidi/.cargo-checksum.json b/vendor/unicode-bidi/.cargo-checksum.json
index 6dc5a203e..ab0bb707a 100644
--- a/vendor/unicode-bidi/.cargo-checksum.json
+++ b/vendor/unicode-bidi/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"AUTHORS":"1ff3a7c8519b29544bb28ba9b1e7502df0cb764051fb9a1172e60006aa2b8dcc","COPYRIGHT":"edb20b474f6cbd4f4db066b54a9e0f687d0009d309412a63431189b59b8e2a07","Cargo.toml":"c0862499053fc6c7d7c34bbcc5443d818a6d69c3e0f56a5ec1331a87895aaa1a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"81d3dc6d894a68894d982760b0a907f9dcbb0da179a8063ed9de1d2257518957","src/char_data/mod.rs":"eeef418c98739eed13f993c21cb2ed872b40c31f102d437e5b37cc7a97aeb5de","src/char_data/tables.rs":"ca8f4f579fe2ef81cb39b13ef6ea3c0bc78cdf1c51668a25ccc2447adf1c95e4","src/data_source.rs":"4c7831c47c85eaf0d7412ed3e6baea667ba3b44f87ddffef4e7b76b5bb5b1272","src/deprecated.rs":"3c8b465b827a487df7945cb928e1eae98f9929b71c4d0c99511a470704455a8c","src/explicit.rs":"808aec21bcaa86cb00721b3f61f5f98c3eba40fcc6bb3f8a18739ce6a35b7c1c","src/format_chars.rs":"678399fec3f4bfaf4093f38cfdb8956288313386dc3511dab9fb58164e8dc01b","src/implicit.rs":"167be5386315acefbe8a38918b1850b80beccc7485f759d12bc3f5b2c6870c21","src/level.rs":"9bf4943fe3f6c134640e37acbf1bfcaf8b074739c7625736823bf0bc185e1e0d","src/lib.rs":"671b8b4339f1ddcc262df0a3f774c2874e0b2428269dcecf63a003b7848fee9f","src/prepare.rs":"94b74379faa4d8d8d905ea848292a92ca3d93bef1a428fd92afe262b54912c10"},"package":"099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"} \ No newline at end of file
+{"files":{"AUTHORS":"1ff3a7c8519b29544bb28ba9b1e7502df0cb764051fb9a1172e60006aa2b8dcc","COPYRIGHT":"edb20b474f6cbd4f4db066b54a9e0f687d0009d309412a63431189b59b8e2a07","Cargo.toml":"bac3b9c34e93bfab34060c48f493cd82eece9ae99d9b1c98df1d875a8266bfff","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"81d3dc6d894a68894d982760b0a907f9dcbb0da179a8063ed9de1d2257518957","src/char_data/mod.rs":"0622df8ce4b4de60aea7e4787635d3187f79f7a3f9001e3d209f58fd07d03887","src/char_data/tables.rs":"50faf4eef73c831a38b735309ff3415e9f65992a0474ff5c055138f91c91ee16","src/data_source.rs":"36fa0785e51c549c1f72f09040cfe515b848d1b23fb30d469770a6b4b17b49df","src/deprecated.rs":"3c8b465b827a487df7945cb928e1eae98f9929b71c4d0c99511a470704455a8c","src/explicit.rs":"53428d618aef86c6790d195eb9477f09decc396772581427241d34139f886517","src/format_chars.rs":"678399fec3f4bfaf4093f38cfdb8956288313386dc3511dab9fb58164e8dc01b","src/implicit.rs":"454f35f0803ae7d1d7fdb2d1fb0cd675dd83c5dd92a8d31445847a5a2a16b6bf","src/level.rs":"9bf4943fe3f6c134640e37acbf1bfcaf8b074739c7625736823bf0bc185e1e0d","src/lib.rs":"33830f404ebdf3a0561c415ed3f0e5d8739e749db27f21f9e2644031afa511fa","src/prepare.rs":"7aa46ba8d0448a34be704cc3a1f49bc52ddfce62fa66af65618c2ac94cb88a4e"},"package":"d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"} \ No newline at end of file
diff --git a/vendor/unicode-bidi/Cargo.toml b/vendor/unicode-bidi/Cargo.toml
index 9a8ca4a32..02ea8fabf 100644
--- a/vendor/unicode-bidi/Cargo.toml
+++ b/vendor/unicode-bidi/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "unicode-bidi"
-version = "0.3.8"
+version = "0.3.10"
authors = ["The Servo Project Developers"]
exclude = [
"benches/**",
@@ -42,6 +42,11 @@ repository = "https://github.com/servo/unicode-bidi"
[lib]
name = "unicode_bidi"
+[[test]]
+name = "conformance_tests"
+path = "tests/conformance_tests.rs"
+required-features = ["hardcoded-data"]
+
[dependencies.flame]
version = "0.2"
optional = true
diff --git a/vendor/unicode-bidi/src/char_data/mod.rs b/vendor/unicode-bidi/src/char_data/mod.rs
index a8b452788..4edf5b8f4 100644
--- a/vendor/unicode-bidi/src/char_data/mod.rs
+++ b/vendor/unicode-bidi/src/char_data/mod.rs
@@ -19,10 +19,10 @@ use core::cmp::Ordering::{Equal, Greater, Less};
#[cfg(feature = "hardcoded-data")]
use self::tables::bidi_class_table;
+use crate::data_source::BidiMatchedOpeningBracket;
use crate::BidiClass::*;
#[cfg(feature = "hardcoded-data")]
use crate::BidiDataSource;
-
/// Hardcoded Bidi data that ships with the unicode-bidi crate.
///
/// This can be enabled with the default `hardcoded-data` Cargo feature.
@@ -42,6 +42,22 @@ pub fn bidi_class(c: char) -> BidiClass {
bsearch_range_value_table(c, bidi_class_table)
}
+/// If this character is a bracket according to BidiBrackets.txt,
+/// return the corresponding *normalized* *opening bracket* of the pair,
+/// and whether or not it itself is an opening bracket.
+pub(crate) fn bidi_matched_opening_bracket(c: char) -> Option<BidiMatchedOpeningBracket> {
+ for pair in self::tables::bidi_pairs_table {
+ if pair.0 == c || pair.1 == c {
+ let skeleton = pair.2.unwrap_or(pair.0);
+ return Some(BidiMatchedOpeningBracket {
+ opening: skeleton,
+ is_open: pair.0 == c,
+ });
+ }
+ }
+ None
+}
+
pub fn is_rtl(bidi_class: BidiClass) -> bool {
match bidi_class {
RLE | RLO | RLI => true,
diff --git a/vendor/unicode-bidi/src/char_data/tables.rs b/vendor/unicode-bidi/src/char_data/tables.rs
index 502ae9e90..ecdcf496d 100644
--- a/vendor/unicode-bidi/src/char_data/tables.rs
+++ b/vendor/unicode-bidi/src/char_data/tables.rs
@@ -5,7 +5,7 @@
#![cfg_attr(rustfmt, rustfmt_skip)]
/// The [Unicode version](http://www.unicode.org/versions/) of data
-pub const UNICODE_VERSION: (u64, u64, u64) = (14, 0, 0);
+pub const UNICODE_VERSION: (u64, u64, u64) = (15, 0, 0);
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -140,7 +140,7 @@ pub const bidi_class_table: &'static [(char, char, BidiClass)] = &[
'\u{cb9}', L), ('\u{cbc}', '\u{cbc}', NSM), ('\u{cbd}', '\u{cc4}', L), ('\u{cc6}', '\u{cc8}',
L), ('\u{cca}', '\u{ccb}', L), ('\u{ccc}', '\u{ccd}', NSM), ('\u{cd5}', '\u{cd6}', L),
('\u{cdd}', '\u{cde}', L), ('\u{ce0}', '\u{ce1}', L), ('\u{ce2}', '\u{ce3}', NSM), ('\u{ce6}',
- '\u{cef}', L), ('\u{cf1}', '\u{cf2}', L), ('\u{d00}', '\u{d01}', NSM), ('\u{d02}', '\u{d0c}',
+ '\u{cef}', L), ('\u{cf1}', '\u{cf3}', L), ('\u{d00}', '\u{d01}', NSM), ('\u{d02}', '\u{d0c}',
L), ('\u{d0e}', '\u{d10}', L), ('\u{d12}', '\u{d3a}', L), ('\u{d3b}', '\u{d3c}', NSM),
('\u{d3d}', '\u{d40}', L), ('\u{d41}', '\u{d44}', NSM), ('\u{d46}', '\u{d48}', L), ('\u{d4a}',
'\u{d4c}', L), ('\u{d4d}', '\u{d4d}', NSM), ('\u{d4e}', '\u{d4f}', L), ('\u{d54}', '\u{d61}',
@@ -154,7 +154,7 @@ pub const bidi_class_table: &'static [(char, char, BidiClass)] = &[
('\u{e4f}', '\u{e5b}', L), ('\u{e81}', '\u{e82}', L), ('\u{e84}', '\u{e84}', L), ('\u{e86}',
'\u{e8a}', L), ('\u{e8c}', '\u{ea3}', L), ('\u{ea5}', '\u{ea5}', L), ('\u{ea7}', '\u{eb0}', L),
('\u{eb1}', '\u{eb1}', NSM), ('\u{eb2}', '\u{eb3}', L), ('\u{eb4}', '\u{ebc}', NSM), ('\u{ebd}',
- '\u{ebd}', L), ('\u{ec0}', '\u{ec4}', L), ('\u{ec6}', '\u{ec6}', L), ('\u{ec8}', '\u{ecd}',
+ '\u{ebd}', L), ('\u{ec0}', '\u{ec4}', L), ('\u{ec6}', '\u{ec6}', L), ('\u{ec8}', '\u{ece}',
NSM), ('\u{ed0}', '\u{ed9}', L), ('\u{edc}', '\u{edf}', L), ('\u{f00}', '\u{f17}', L),
('\u{f18}', '\u{f19}', NSM), ('\u{f1a}', '\u{f34}', L), ('\u{f35}', '\u{f35}', NSM), ('\u{f36}',
'\u{f36}', L), ('\u{f37}', '\u{f37}', NSM), ('\u{f38}', '\u{f38}', L), ('\u{f39}', '\u{f39}',
@@ -361,96 +361,102 @@ pub const bidi_class_table: &'static [(char, char, BidiClass)] = &[
'\u{10cff}', R), ('\u{10d00}', '\u{10d23}', AL), ('\u{10d24}', '\u{10d27}', NSM), ('\u{10d28}',
'\u{10d2f}', R), ('\u{10d30}', '\u{10d39}', AN), ('\u{10d3a}', '\u{10e5f}', R), ('\u{10e60}',
'\u{10e7e}', AN), ('\u{10e7f}', '\u{10eaa}', R), ('\u{10eab}', '\u{10eac}', NSM), ('\u{10ead}',
- '\u{10f2f}', R), ('\u{10f30}', '\u{10f45}', AL), ('\u{10f46}', '\u{10f50}', NSM), ('\u{10f51}',
- '\u{10f59}', AL), ('\u{10f5a}', '\u{10f81}', R), ('\u{10f82}', '\u{10f85}', NSM), ('\u{10f86}',
- '\u{10fff}', R), ('\u{11000}', '\u{11000}', L), ('\u{11001}', '\u{11001}', NSM), ('\u{11002}',
- '\u{11037}', L), ('\u{11038}', '\u{11046}', NSM), ('\u{11047}', '\u{1104d}', L), ('\u{11052}',
- '\u{11065}', ON), ('\u{11066}', '\u{1106f}', L), ('\u{11070}', '\u{11070}', NSM), ('\u{11071}',
- '\u{11072}', L), ('\u{11073}', '\u{11074}', NSM), ('\u{11075}', '\u{11075}', L), ('\u{1107f}',
- '\u{11081}', NSM), ('\u{11082}', '\u{110b2}', L), ('\u{110b3}', '\u{110b6}', NSM), ('\u{110b7}',
- '\u{110b8}', L), ('\u{110b9}', '\u{110ba}', NSM), ('\u{110bb}', '\u{110c1}', L), ('\u{110c2}',
- '\u{110c2}', NSM), ('\u{110cd}', '\u{110cd}', L), ('\u{110d0}', '\u{110e8}', L), ('\u{110f0}',
- '\u{110f9}', L), ('\u{11100}', '\u{11102}', NSM), ('\u{11103}', '\u{11126}', L), ('\u{11127}',
- '\u{1112b}', NSM), ('\u{1112c}', '\u{1112c}', L), ('\u{1112d}', '\u{11134}', NSM), ('\u{11136}',
- '\u{11147}', L), ('\u{11150}', '\u{11172}', L), ('\u{11173}', '\u{11173}', NSM), ('\u{11174}',
- '\u{11176}', L), ('\u{11180}', '\u{11181}', NSM), ('\u{11182}', '\u{111b5}', L), ('\u{111b6}',
- '\u{111be}', NSM), ('\u{111bf}', '\u{111c8}', L), ('\u{111c9}', '\u{111cc}', NSM), ('\u{111cd}',
- '\u{111ce}', L), ('\u{111cf}', '\u{111cf}', NSM), ('\u{111d0}', '\u{111df}', L), ('\u{111e1}',
- '\u{111f4}', L), ('\u{11200}', '\u{11211}', L), ('\u{11213}', '\u{1122e}', L), ('\u{1122f}',
- '\u{11231}', NSM), ('\u{11232}', '\u{11233}', L), ('\u{11234}', '\u{11234}', NSM), ('\u{11235}',
- '\u{11235}', L), ('\u{11236}', '\u{11237}', NSM), ('\u{11238}', '\u{1123d}', L), ('\u{1123e}',
- '\u{1123e}', NSM), ('\u{11280}', '\u{11286}', L), ('\u{11288}', '\u{11288}', L), ('\u{1128a}',
- '\u{1128d}', L), ('\u{1128f}', '\u{1129d}', L), ('\u{1129f}', '\u{112a9}', L), ('\u{112b0}',
- '\u{112de}', L), ('\u{112df}', '\u{112df}', NSM), ('\u{112e0}', '\u{112e2}', L), ('\u{112e3}',
- '\u{112ea}', NSM), ('\u{112f0}', '\u{112f9}', L), ('\u{11300}', '\u{11301}', NSM), ('\u{11302}',
- '\u{11303}', L), ('\u{11305}', '\u{1130c}', L), ('\u{1130f}', '\u{11310}', L), ('\u{11313}',
- '\u{11328}', L), ('\u{1132a}', '\u{11330}', L), ('\u{11332}', '\u{11333}', L), ('\u{11335}',
- '\u{11339}', L), ('\u{1133b}', '\u{1133c}', NSM), ('\u{1133d}', '\u{1133f}', L), ('\u{11340}',
- '\u{11340}', NSM), ('\u{11341}', '\u{11344}', L), ('\u{11347}', '\u{11348}', L), ('\u{1134b}',
- '\u{1134d}', L), ('\u{11350}', '\u{11350}', L), ('\u{11357}', '\u{11357}', L), ('\u{1135d}',
- '\u{11363}', L), ('\u{11366}', '\u{1136c}', NSM), ('\u{11370}', '\u{11374}', NSM), ('\u{11400}',
- '\u{11437}', L), ('\u{11438}', '\u{1143f}', NSM), ('\u{11440}', '\u{11441}', L), ('\u{11442}',
- '\u{11444}', NSM), ('\u{11445}', '\u{11445}', L), ('\u{11446}', '\u{11446}', NSM), ('\u{11447}',
- '\u{1145b}', L), ('\u{1145d}', '\u{1145d}', L), ('\u{1145e}', '\u{1145e}', NSM), ('\u{1145f}',
- '\u{11461}', L), ('\u{11480}', '\u{114b2}', L), ('\u{114b3}', '\u{114b8}', NSM), ('\u{114b9}',
- '\u{114b9}', L), ('\u{114ba}', '\u{114ba}', NSM), ('\u{114bb}', '\u{114be}', L), ('\u{114bf}',
- '\u{114c0}', NSM), ('\u{114c1}', '\u{114c1}', L), ('\u{114c2}', '\u{114c3}', NSM), ('\u{114c4}',
- '\u{114c7}', L), ('\u{114d0}', '\u{114d9}', L), ('\u{11580}', '\u{115b1}', L), ('\u{115b2}',
- '\u{115b5}', NSM), ('\u{115b8}', '\u{115bb}', L), ('\u{115bc}', '\u{115bd}', NSM), ('\u{115be}',
- '\u{115be}', L), ('\u{115bf}', '\u{115c0}', NSM), ('\u{115c1}', '\u{115db}', L), ('\u{115dc}',
- '\u{115dd}', NSM), ('\u{11600}', '\u{11632}', L), ('\u{11633}', '\u{1163a}', NSM), ('\u{1163b}',
- '\u{1163c}', L), ('\u{1163d}', '\u{1163d}', NSM), ('\u{1163e}', '\u{1163e}', L), ('\u{1163f}',
- '\u{11640}', NSM), ('\u{11641}', '\u{11644}', L), ('\u{11650}', '\u{11659}', L), ('\u{11660}',
- '\u{1166c}', ON), ('\u{11680}', '\u{116aa}', L), ('\u{116ab}', '\u{116ab}', NSM), ('\u{116ac}',
- '\u{116ac}', L), ('\u{116ad}', '\u{116ad}', NSM), ('\u{116ae}', '\u{116af}', L), ('\u{116b0}',
- '\u{116b5}', NSM), ('\u{116b6}', '\u{116b6}', L), ('\u{116b7}', '\u{116b7}', NSM), ('\u{116b8}',
- '\u{116b9}', L), ('\u{116c0}', '\u{116c9}', L), ('\u{11700}', '\u{1171a}', L), ('\u{1171d}',
- '\u{1171f}', NSM), ('\u{11720}', '\u{11721}', L), ('\u{11722}', '\u{11725}', NSM), ('\u{11726}',
- '\u{11726}', L), ('\u{11727}', '\u{1172b}', NSM), ('\u{11730}', '\u{11746}', L), ('\u{11800}',
- '\u{1182e}', L), ('\u{1182f}', '\u{11837}', NSM), ('\u{11838}', '\u{11838}', L), ('\u{11839}',
- '\u{1183a}', NSM), ('\u{1183b}', '\u{1183b}', L), ('\u{118a0}', '\u{118f2}', L), ('\u{118ff}',
- '\u{11906}', L), ('\u{11909}', '\u{11909}', L), ('\u{1190c}', '\u{11913}', L), ('\u{11915}',
- '\u{11916}', L), ('\u{11918}', '\u{11935}', L), ('\u{11937}', '\u{11938}', L), ('\u{1193b}',
- '\u{1193c}', NSM), ('\u{1193d}', '\u{1193d}', L), ('\u{1193e}', '\u{1193e}', NSM), ('\u{1193f}',
- '\u{11942}', L), ('\u{11943}', '\u{11943}', NSM), ('\u{11944}', '\u{11946}', L), ('\u{11950}',
- '\u{11959}', L), ('\u{119a0}', '\u{119a7}', L), ('\u{119aa}', '\u{119d3}', L), ('\u{119d4}',
- '\u{119d7}', NSM), ('\u{119da}', '\u{119db}', NSM), ('\u{119dc}', '\u{119df}', L), ('\u{119e0}',
- '\u{119e0}', NSM), ('\u{119e1}', '\u{119e4}', L), ('\u{11a00}', '\u{11a00}', L), ('\u{11a01}',
- '\u{11a06}', NSM), ('\u{11a07}', '\u{11a08}', L), ('\u{11a09}', '\u{11a0a}', NSM), ('\u{11a0b}',
- '\u{11a32}', L), ('\u{11a33}', '\u{11a38}', NSM), ('\u{11a39}', '\u{11a3a}', L), ('\u{11a3b}',
- '\u{11a3e}', NSM), ('\u{11a3f}', '\u{11a46}', L), ('\u{11a47}', '\u{11a47}', NSM), ('\u{11a50}',
- '\u{11a50}', L), ('\u{11a51}', '\u{11a56}', NSM), ('\u{11a57}', '\u{11a58}', L), ('\u{11a59}',
- '\u{11a5b}', NSM), ('\u{11a5c}', '\u{11a89}', L), ('\u{11a8a}', '\u{11a96}', NSM), ('\u{11a97}',
- '\u{11a97}', L), ('\u{11a98}', '\u{11a99}', NSM), ('\u{11a9a}', '\u{11aa2}', L), ('\u{11ab0}',
- '\u{11af8}', L), ('\u{11c00}', '\u{11c08}', L), ('\u{11c0a}', '\u{11c2f}', L), ('\u{11c30}',
- '\u{11c36}', NSM), ('\u{11c38}', '\u{11c3d}', NSM), ('\u{11c3e}', '\u{11c45}', L), ('\u{11c50}',
- '\u{11c6c}', L), ('\u{11c70}', '\u{11c8f}', L), ('\u{11c92}', '\u{11ca7}', NSM), ('\u{11ca9}',
- '\u{11ca9}', L), ('\u{11caa}', '\u{11cb0}', NSM), ('\u{11cb1}', '\u{11cb1}', L), ('\u{11cb2}',
- '\u{11cb3}', NSM), ('\u{11cb4}', '\u{11cb4}', L), ('\u{11cb5}', '\u{11cb6}', NSM), ('\u{11d00}',
- '\u{11d06}', L), ('\u{11d08}', '\u{11d09}', L), ('\u{11d0b}', '\u{11d30}', L), ('\u{11d31}',
- '\u{11d36}', NSM), ('\u{11d3a}', '\u{11d3a}', NSM), ('\u{11d3c}', '\u{11d3d}', NSM),
- ('\u{11d3f}', '\u{11d45}', NSM), ('\u{11d46}', '\u{11d46}', L), ('\u{11d47}', '\u{11d47}', NSM),
- ('\u{11d50}', '\u{11d59}', L), ('\u{11d60}', '\u{11d65}', L), ('\u{11d67}', '\u{11d68}', L),
- ('\u{11d6a}', '\u{11d8e}', L), ('\u{11d90}', '\u{11d91}', NSM), ('\u{11d93}', '\u{11d94}', L),
- ('\u{11d95}', '\u{11d95}', NSM), ('\u{11d96}', '\u{11d96}', L), ('\u{11d97}', '\u{11d97}', NSM),
- ('\u{11d98}', '\u{11d98}', L), ('\u{11da0}', '\u{11da9}', L), ('\u{11ee0}', '\u{11ef2}', L),
- ('\u{11ef3}', '\u{11ef4}', NSM), ('\u{11ef5}', '\u{11ef8}', L), ('\u{11fb0}', '\u{11fb0}', L),
- ('\u{11fc0}', '\u{11fd4}', L), ('\u{11fd5}', '\u{11fdc}', ON), ('\u{11fdd}', '\u{11fe0}', ET),
- ('\u{11fe1}', '\u{11ff1}', ON), ('\u{11fff}', '\u{12399}', L), ('\u{12400}', '\u{1246e}', L),
- ('\u{12470}', '\u{12474}', L), ('\u{12480}', '\u{12543}', L), ('\u{12f90}', '\u{12ff2}', L),
- ('\u{13000}', '\u{1342e}', L), ('\u{13430}', '\u{13438}', L), ('\u{14400}', '\u{14646}', L),
- ('\u{16800}', '\u{16a38}', L), ('\u{16a40}', '\u{16a5e}', L), ('\u{16a60}', '\u{16a69}', L),
- ('\u{16a6e}', '\u{16abe}', L), ('\u{16ac0}', '\u{16ac9}', L), ('\u{16ad0}', '\u{16aed}', L),
- ('\u{16af0}', '\u{16af4}', NSM), ('\u{16af5}', '\u{16af5}', L), ('\u{16b00}', '\u{16b2f}', L),
- ('\u{16b30}', '\u{16b36}', NSM), ('\u{16b37}', '\u{16b45}', L), ('\u{16b50}', '\u{16b59}', L),
- ('\u{16b5b}', '\u{16b61}', L), ('\u{16b63}', '\u{16b77}', L), ('\u{16b7d}', '\u{16b8f}', L),
- ('\u{16e40}', '\u{16e9a}', L), ('\u{16f00}', '\u{16f4a}', L), ('\u{16f4f}', '\u{16f4f}', NSM),
- ('\u{16f50}', '\u{16f87}', L), ('\u{16f8f}', '\u{16f92}', NSM), ('\u{16f93}', '\u{16f9f}', L),
- ('\u{16fe0}', '\u{16fe1}', L), ('\u{16fe2}', '\u{16fe2}', ON), ('\u{16fe3}', '\u{16fe3}', L),
- ('\u{16fe4}', '\u{16fe4}', NSM), ('\u{16ff0}', '\u{16ff1}', L), ('\u{17000}', '\u{187f7}', L),
- ('\u{18800}', '\u{18cd5}', L), ('\u{18d00}', '\u{18d08}', L), ('\u{1aff0}', '\u{1aff3}', L),
- ('\u{1aff5}', '\u{1affb}', L), ('\u{1affd}', '\u{1affe}', L), ('\u{1b000}', '\u{1b122}', L),
- ('\u{1b150}', '\u{1b152}', L), ('\u{1b164}', '\u{1b167}', L), ('\u{1b170}', '\u{1b2fb}', L),
+ '\u{10efc}', R), ('\u{10efd}', '\u{10eff}', NSM), ('\u{10f00}', '\u{10f2f}', R), ('\u{10f30}',
+ '\u{10f45}', AL), ('\u{10f46}', '\u{10f50}', NSM), ('\u{10f51}', '\u{10f59}', AL), ('\u{10f5a}',
+ '\u{10f81}', R), ('\u{10f82}', '\u{10f85}', NSM), ('\u{10f86}', '\u{10fff}', R), ('\u{11000}',
+ '\u{11000}', L), ('\u{11001}', '\u{11001}', NSM), ('\u{11002}', '\u{11037}', L), ('\u{11038}',
+ '\u{11046}', NSM), ('\u{11047}', '\u{1104d}', L), ('\u{11052}', '\u{11065}', ON), ('\u{11066}',
+ '\u{1106f}', L), ('\u{11070}', '\u{11070}', NSM), ('\u{11071}', '\u{11072}', L), ('\u{11073}',
+ '\u{11074}', NSM), ('\u{11075}', '\u{11075}', L), ('\u{1107f}', '\u{11081}', NSM), ('\u{11082}',
+ '\u{110b2}', L), ('\u{110b3}', '\u{110b6}', NSM), ('\u{110b7}', '\u{110b8}', L), ('\u{110b9}',
+ '\u{110ba}', NSM), ('\u{110bb}', '\u{110c1}', L), ('\u{110c2}', '\u{110c2}', NSM), ('\u{110cd}',
+ '\u{110cd}', L), ('\u{110d0}', '\u{110e8}', L), ('\u{110f0}', '\u{110f9}', L), ('\u{11100}',
+ '\u{11102}', NSM), ('\u{11103}', '\u{11126}', L), ('\u{11127}', '\u{1112b}', NSM), ('\u{1112c}',
+ '\u{1112c}', L), ('\u{1112d}', '\u{11134}', NSM), ('\u{11136}', '\u{11147}', L), ('\u{11150}',
+ '\u{11172}', L), ('\u{11173}', '\u{11173}', NSM), ('\u{11174}', '\u{11176}', L), ('\u{11180}',
+ '\u{11181}', NSM), ('\u{11182}', '\u{111b5}', L), ('\u{111b6}', '\u{111be}', NSM), ('\u{111bf}',
+ '\u{111c8}', L), ('\u{111c9}', '\u{111cc}', NSM), ('\u{111cd}', '\u{111ce}', L), ('\u{111cf}',
+ '\u{111cf}', NSM), ('\u{111d0}', '\u{111df}', L), ('\u{111e1}', '\u{111f4}', L), ('\u{11200}',
+ '\u{11211}', L), ('\u{11213}', '\u{1122e}', L), ('\u{1122f}', '\u{11231}', NSM), ('\u{11232}',
+ '\u{11233}', L), ('\u{11234}', '\u{11234}', NSM), ('\u{11235}', '\u{11235}', L), ('\u{11236}',
+ '\u{11237}', NSM), ('\u{11238}', '\u{1123d}', L), ('\u{1123e}', '\u{1123e}', NSM), ('\u{1123f}',
+ '\u{11240}', L), ('\u{11241}', '\u{11241}', NSM), ('\u{11280}', '\u{11286}', L), ('\u{11288}',
+ '\u{11288}', L), ('\u{1128a}', '\u{1128d}', L), ('\u{1128f}', '\u{1129d}', L), ('\u{1129f}',
+ '\u{112a9}', L), ('\u{112b0}', '\u{112de}', L), ('\u{112df}', '\u{112df}', NSM), ('\u{112e0}',
+ '\u{112e2}', L), ('\u{112e3}', '\u{112ea}', NSM), ('\u{112f0}', '\u{112f9}', L), ('\u{11300}',
+ '\u{11301}', NSM), ('\u{11302}', '\u{11303}', L), ('\u{11305}', '\u{1130c}', L), ('\u{1130f}',
+ '\u{11310}', L), ('\u{11313}', '\u{11328}', L), ('\u{1132a}', '\u{11330}', L), ('\u{11332}',
+ '\u{11333}', L), ('\u{11335}', '\u{11339}', L), ('\u{1133b}', '\u{1133c}', NSM), ('\u{1133d}',
+ '\u{1133f}', L), ('\u{11340}', '\u{11340}', NSM), ('\u{11341}', '\u{11344}', L), ('\u{11347}',
+ '\u{11348}', L), ('\u{1134b}', '\u{1134d}', L), ('\u{11350}', '\u{11350}', L), ('\u{11357}',
+ '\u{11357}', L), ('\u{1135d}', '\u{11363}', L), ('\u{11366}', '\u{1136c}', NSM), ('\u{11370}',
+ '\u{11374}', NSM), ('\u{11400}', '\u{11437}', L), ('\u{11438}', '\u{1143f}', NSM), ('\u{11440}',
+ '\u{11441}', L), ('\u{11442}', '\u{11444}', NSM), ('\u{11445}', '\u{11445}', L), ('\u{11446}',
+ '\u{11446}', NSM), ('\u{11447}', '\u{1145b}', L), ('\u{1145d}', '\u{1145d}', L), ('\u{1145e}',
+ '\u{1145e}', NSM), ('\u{1145f}', '\u{11461}', L), ('\u{11480}', '\u{114b2}', L), ('\u{114b3}',
+ '\u{114b8}', NSM), ('\u{114b9}', '\u{114b9}', L), ('\u{114ba}', '\u{114ba}', NSM), ('\u{114bb}',
+ '\u{114be}', L), ('\u{114bf}', '\u{114c0}', NSM), ('\u{114c1}', '\u{114c1}', L), ('\u{114c2}',
+ '\u{114c3}', NSM), ('\u{114c4}', '\u{114c7}', L), ('\u{114d0}', '\u{114d9}', L), ('\u{11580}',
+ '\u{115b1}', L), ('\u{115b2}', '\u{115b5}', NSM), ('\u{115b8}', '\u{115bb}', L), ('\u{115bc}',
+ '\u{115bd}', NSM), ('\u{115be}', '\u{115be}', L), ('\u{115bf}', '\u{115c0}', NSM), ('\u{115c1}',
+ '\u{115db}', L), ('\u{115dc}', '\u{115dd}', NSM), ('\u{11600}', '\u{11632}', L), ('\u{11633}',
+ '\u{1163a}', NSM), ('\u{1163b}', '\u{1163c}', L), ('\u{1163d}', '\u{1163d}', NSM), ('\u{1163e}',
+ '\u{1163e}', L), ('\u{1163f}', '\u{11640}', NSM), ('\u{11641}', '\u{11644}', L), ('\u{11650}',
+ '\u{11659}', L), ('\u{11660}', '\u{1166c}', ON), ('\u{11680}', '\u{116aa}', L), ('\u{116ab}',
+ '\u{116ab}', NSM), ('\u{116ac}', '\u{116ac}', L), ('\u{116ad}', '\u{116ad}', NSM), ('\u{116ae}',
+ '\u{116af}', L), ('\u{116b0}', '\u{116b5}', NSM), ('\u{116b6}', '\u{116b6}', L), ('\u{116b7}',
+ '\u{116b7}', NSM), ('\u{116b8}', '\u{116b9}', L), ('\u{116c0}', '\u{116c9}', L), ('\u{11700}',
+ '\u{1171a}', L), ('\u{1171d}', '\u{1171f}', NSM), ('\u{11720}', '\u{11721}', L), ('\u{11722}',
+ '\u{11725}', NSM), ('\u{11726}', '\u{11726}', L), ('\u{11727}', '\u{1172b}', NSM), ('\u{11730}',
+ '\u{11746}', L), ('\u{11800}', '\u{1182e}', L), ('\u{1182f}', '\u{11837}', NSM), ('\u{11838}',
+ '\u{11838}', L), ('\u{11839}', '\u{1183a}', NSM), ('\u{1183b}', '\u{1183b}', L), ('\u{118a0}',
+ '\u{118f2}', L), ('\u{118ff}', '\u{11906}', L), ('\u{11909}', '\u{11909}', L), ('\u{1190c}',
+ '\u{11913}', L), ('\u{11915}', '\u{11916}', L), ('\u{11918}', '\u{11935}', L), ('\u{11937}',
+ '\u{11938}', L), ('\u{1193b}', '\u{1193c}', NSM), ('\u{1193d}', '\u{1193d}', L), ('\u{1193e}',
+ '\u{1193e}', NSM), ('\u{1193f}', '\u{11942}', L), ('\u{11943}', '\u{11943}', NSM), ('\u{11944}',
+ '\u{11946}', L), ('\u{11950}', '\u{11959}', L), ('\u{119a0}', '\u{119a7}', L), ('\u{119aa}',
+ '\u{119d3}', L), ('\u{119d4}', '\u{119d7}', NSM), ('\u{119da}', '\u{119db}', NSM), ('\u{119dc}',
+ '\u{119df}', L), ('\u{119e0}', '\u{119e0}', NSM), ('\u{119e1}', '\u{119e4}', L), ('\u{11a00}',
+ '\u{11a00}', L), ('\u{11a01}', '\u{11a06}', NSM), ('\u{11a07}', '\u{11a08}', L), ('\u{11a09}',
+ '\u{11a0a}', NSM), ('\u{11a0b}', '\u{11a32}', L), ('\u{11a33}', '\u{11a38}', NSM), ('\u{11a39}',
+ '\u{11a3a}', L), ('\u{11a3b}', '\u{11a3e}', NSM), ('\u{11a3f}', '\u{11a46}', L), ('\u{11a47}',
+ '\u{11a47}', NSM), ('\u{11a50}', '\u{11a50}', L), ('\u{11a51}', '\u{11a56}', NSM), ('\u{11a57}',
+ '\u{11a58}', L), ('\u{11a59}', '\u{11a5b}', NSM), ('\u{11a5c}', '\u{11a89}', L), ('\u{11a8a}',
+ '\u{11a96}', NSM), ('\u{11a97}', '\u{11a97}', L), ('\u{11a98}', '\u{11a99}', NSM), ('\u{11a9a}',
+ '\u{11aa2}', L), ('\u{11ab0}', '\u{11af8}', L), ('\u{11b00}', '\u{11b09}', L), ('\u{11c00}',
+ '\u{11c08}', L), ('\u{11c0a}', '\u{11c2f}', L), ('\u{11c30}', '\u{11c36}', NSM), ('\u{11c38}',
+ '\u{11c3d}', NSM), ('\u{11c3e}', '\u{11c45}', L), ('\u{11c50}', '\u{11c6c}', L), ('\u{11c70}',
+ '\u{11c8f}', L), ('\u{11c92}', '\u{11ca7}', NSM), ('\u{11ca9}', '\u{11ca9}', L), ('\u{11caa}',
+ '\u{11cb0}', NSM), ('\u{11cb1}', '\u{11cb1}', L), ('\u{11cb2}', '\u{11cb3}', NSM), ('\u{11cb4}',
+ '\u{11cb4}', L), ('\u{11cb5}', '\u{11cb6}', NSM), ('\u{11d00}', '\u{11d06}', L), ('\u{11d08}',
+ '\u{11d09}', L), ('\u{11d0b}', '\u{11d30}', L), ('\u{11d31}', '\u{11d36}', NSM), ('\u{11d3a}',
+ '\u{11d3a}', NSM), ('\u{11d3c}', '\u{11d3d}', NSM), ('\u{11d3f}', '\u{11d45}', NSM),
+ ('\u{11d46}', '\u{11d46}', L), ('\u{11d47}', '\u{11d47}', NSM), ('\u{11d50}', '\u{11d59}', L),
+ ('\u{11d60}', '\u{11d65}', L), ('\u{11d67}', '\u{11d68}', L), ('\u{11d6a}', '\u{11d8e}', L),
+ ('\u{11d90}', '\u{11d91}', NSM), ('\u{11d93}', '\u{11d94}', L), ('\u{11d95}', '\u{11d95}', NSM),
+ ('\u{11d96}', '\u{11d96}', L), ('\u{11d97}', '\u{11d97}', NSM), ('\u{11d98}', '\u{11d98}', L),
+ ('\u{11da0}', '\u{11da9}', L), ('\u{11ee0}', '\u{11ef2}', L), ('\u{11ef3}', '\u{11ef4}', NSM),
+ ('\u{11ef5}', '\u{11ef8}', L), ('\u{11f00}', '\u{11f01}', NSM), ('\u{11f02}', '\u{11f10}', L),
+ ('\u{11f12}', '\u{11f35}', L), ('\u{11f36}', '\u{11f3a}', NSM), ('\u{11f3e}', '\u{11f3f}', L),
+ ('\u{11f40}', '\u{11f40}', NSM), ('\u{11f41}', '\u{11f41}', L), ('\u{11f42}', '\u{11f42}', NSM),
+ ('\u{11f43}', '\u{11f59}', L), ('\u{11fb0}', '\u{11fb0}', L), ('\u{11fc0}', '\u{11fd4}', L),
+ ('\u{11fd5}', '\u{11fdc}', ON), ('\u{11fdd}', '\u{11fe0}', ET), ('\u{11fe1}', '\u{11ff1}', ON),
+ ('\u{11fff}', '\u{12399}', L), ('\u{12400}', '\u{1246e}', L), ('\u{12470}', '\u{12474}', L),
+ ('\u{12480}', '\u{12543}', L), ('\u{12f90}', '\u{12ff2}', L), ('\u{13000}', '\u{1343f}', L),
+ ('\u{13440}', '\u{13440}', NSM), ('\u{13441}', '\u{13446}', L), ('\u{13447}', '\u{13455}', NSM),
+ ('\u{14400}', '\u{14646}', L), ('\u{16800}', '\u{16a38}', L), ('\u{16a40}', '\u{16a5e}', L),
+ ('\u{16a60}', '\u{16a69}', L), ('\u{16a6e}', '\u{16abe}', L), ('\u{16ac0}', '\u{16ac9}', L),
+ ('\u{16ad0}', '\u{16aed}', L), ('\u{16af0}', '\u{16af4}', NSM), ('\u{16af5}', '\u{16af5}', L),
+ ('\u{16b00}', '\u{16b2f}', L), ('\u{16b30}', '\u{16b36}', NSM), ('\u{16b37}', '\u{16b45}', L),
+ ('\u{16b50}', '\u{16b59}', L), ('\u{16b5b}', '\u{16b61}', L), ('\u{16b63}', '\u{16b77}', L),
+ ('\u{16b7d}', '\u{16b8f}', L), ('\u{16e40}', '\u{16e9a}', L), ('\u{16f00}', '\u{16f4a}', L),
+ ('\u{16f4f}', '\u{16f4f}', NSM), ('\u{16f50}', '\u{16f87}', L), ('\u{16f8f}', '\u{16f92}', NSM),
+ ('\u{16f93}', '\u{16f9f}', L), ('\u{16fe0}', '\u{16fe1}', L), ('\u{16fe2}', '\u{16fe2}', ON),
+ ('\u{16fe3}', '\u{16fe3}', L), ('\u{16fe4}', '\u{16fe4}', NSM), ('\u{16ff0}', '\u{16ff1}', L),
+ ('\u{17000}', '\u{187f7}', L), ('\u{18800}', '\u{18cd5}', L), ('\u{18d00}', '\u{18d08}', L),
+ ('\u{1aff0}', '\u{1aff3}', L), ('\u{1aff5}', '\u{1affb}', L), ('\u{1affd}', '\u{1affe}', L),
+ ('\u{1b000}', '\u{1b122}', L), ('\u{1b132}', '\u{1b132}', L), ('\u{1b150}', '\u{1b152}', L),
+ ('\u{1b155}', '\u{1b155}', L), ('\u{1b164}', '\u{1b167}', L), ('\u{1b170}', '\u{1b2fb}', L),
('\u{1bc00}', '\u{1bc6a}', L), ('\u{1bc70}', '\u{1bc7c}', L), ('\u{1bc80}', '\u{1bc88}', L),
('\u{1bc90}', '\u{1bc99}', L), ('\u{1bc9c}', '\u{1bc9c}', L), ('\u{1bc9d}', '\u{1bc9e}', NSM),
('\u{1bc9f}', '\u{1bc9f}', L), ('\u{1bca0}', '\u{1bca3}', BN), ('\u{1cf00}', '\u{1cf2d}', NSM),
@@ -460,51 +466,78 @@ pub const bidi_class_table: &'static [(char, char, BidiClass)] = &[
('\u{1d183}', '\u{1d184}', L), ('\u{1d185}', '\u{1d18b}', NSM), ('\u{1d18c}', '\u{1d1a9}', L),
('\u{1d1aa}', '\u{1d1ad}', NSM), ('\u{1d1ae}', '\u{1d1e8}', L), ('\u{1d1e9}', '\u{1d1ea}', ON),
('\u{1d200}', '\u{1d241}', ON), ('\u{1d242}', '\u{1d244}', NSM), ('\u{1d245}', '\u{1d245}', ON),
- ('\u{1d2e0}', '\u{1d2f3}', L), ('\u{1d300}', '\u{1d356}', ON), ('\u{1d360}', '\u{1d378}', L),
- ('\u{1d400}', '\u{1d454}', L), ('\u{1d456}', '\u{1d49c}', L), ('\u{1d49e}', '\u{1d49f}', L),
- ('\u{1d4a2}', '\u{1d4a2}', L), ('\u{1d4a5}', '\u{1d4a6}', L), ('\u{1d4a9}', '\u{1d4ac}', L),
- ('\u{1d4ae}', '\u{1d4b9}', L), ('\u{1d4bb}', '\u{1d4bb}', L), ('\u{1d4bd}', '\u{1d4c3}', L),
- ('\u{1d4c5}', '\u{1d505}', L), ('\u{1d507}', '\u{1d50a}', L), ('\u{1d50d}', '\u{1d514}', L),
- ('\u{1d516}', '\u{1d51c}', L), ('\u{1d51e}', '\u{1d539}', L), ('\u{1d53b}', '\u{1d53e}', L),
- ('\u{1d540}', '\u{1d544}', L), ('\u{1d546}', '\u{1d546}', L), ('\u{1d54a}', '\u{1d550}', L),
- ('\u{1d552}', '\u{1d6a5}', L), ('\u{1d6a8}', '\u{1d6da}', L), ('\u{1d6db}', '\u{1d6db}', ON),
- ('\u{1d6dc}', '\u{1d714}', L), ('\u{1d715}', '\u{1d715}', ON), ('\u{1d716}', '\u{1d74e}', L),
- ('\u{1d74f}', '\u{1d74f}', ON), ('\u{1d750}', '\u{1d788}', L), ('\u{1d789}', '\u{1d789}', ON),
- ('\u{1d78a}', '\u{1d7c2}', L), ('\u{1d7c3}', '\u{1d7c3}', ON), ('\u{1d7c4}', '\u{1d7cb}', L),
- ('\u{1d7ce}', '\u{1d7ff}', EN), ('\u{1d800}', '\u{1d9ff}', L), ('\u{1da00}', '\u{1da36}', NSM),
- ('\u{1da37}', '\u{1da3a}', L), ('\u{1da3b}', '\u{1da6c}', NSM), ('\u{1da6d}', '\u{1da74}', L),
- ('\u{1da75}', '\u{1da75}', NSM), ('\u{1da76}', '\u{1da83}', L), ('\u{1da84}', '\u{1da84}', NSM),
- ('\u{1da85}', '\u{1da8b}', L), ('\u{1da9b}', '\u{1da9f}', NSM), ('\u{1daa1}', '\u{1daaf}', NSM),
- ('\u{1df00}', '\u{1df1e}', L), ('\u{1e000}', '\u{1e006}', NSM), ('\u{1e008}', '\u{1e018}', NSM),
- ('\u{1e01b}', '\u{1e021}', NSM), ('\u{1e023}', '\u{1e024}', NSM), ('\u{1e026}', '\u{1e02a}',
- NSM), ('\u{1e100}', '\u{1e12c}', L), ('\u{1e130}', '\u{1e136}', NSM), ('\u{1e137}', '\u{1e13d}',
- L), ('\u{1e140}', '\u{1e149}', L), ('\u{1e14e}', '\u{1e14f}', L), ('\u{1e290}', '\u{1e2ad}', L),
- ('\u{1e2ae}', '\u{1e2ae}', NSM), ('\u{1e2c0}', '\u{1e2eb}', L), ('\u{1e2ec}', '\u{1e2ef}', NSM),
- ('\u{1e2f0}', '\u{1e2f9}', L), ('\u{1e2ff}', '\u{1e2ff}', ET), ('\u{1e7e0}', '\u{1e7e6}', L),
- ('\u{1e7e8}', '\u{1e7eb}', L), ('\u{1e7ed}', '\u{1e7ee}', L), ('\u{1e7f0}', '\u{1e7fe}', L),
- ('\u{1e800}', '\u{1e8cf}', R), ('\u{1e8d0}', '\u{1e8d6}', NSM), ('\u{1e8d7}', '\u{1e943}', R),
- ('\u{1e944}', '\u{1e94a}', NSM), ('\u{1e94b}', '\u{1ec70}', R), ('\u{1ec71}', '\u{1ecb4}', AL),
- ('\u{1ecb5}', '\u{1ed00}', R), ('\u{1ed01}', '\u{1ed3d}', AL), ('\u{1ed3e}', '\u{1edff}', R),
- ('\u{1ee00}', '\u{1eeef}', AL), ('\u{1eef0}', '\u{1eef1}', ON), ('\u{1eef2}', '\u{1eeff}', AL),
- ('\u{1ef00}', '\u{1efff}', R), ('\u{1f000}', '\u{1f02b}', ON), ('\u{1f030}', '\u{1f093}', ON),
- ('\u{1f0a0}', '\u{1f0ae}', ON), ('\u{1f0b1}', '\u{1f0bf}', ON), ('\u{1f0c1}', '\u{1f0cf}', ON),
- ('\u{1f0d1}', '\u{1f0f5}', ON), ('\u{1f100}', '\u{1f10a}', EN), ('\u{1f10b}', '\u{1f10f}', ON),
- ('\u{1f110}', '\u{1f12e}', L), ('\u{1f12f}', '\u{1f12f}', ON), ('\u{1f130}', '\u{1f169}', L),
- ('\u{1f16a}', '\u{1f16f}', ON), ('\u{1f170}', '\u{1f1ac}', L), ('\u{1f1ad}', '\u{1f1ad}', ON),
- ('\u{1f1e6}', '\u{1f202}', L), ('\u{1f210}', '\u{1f23b}', L), ('\u{1f240}', '\u{1f248}', L),
- ('\u{1f250}', '\u{1f251}', L), ('\u{1f260}', '\u{1f265}', ON), ('\u{1f300}', '\u{1f6d7}', ON),
- ('\u{1f6dd}', '\u{1f6ec}', ON), ('\u{1f6f0}', '\u{1f6fc}', ON), ('\u{1f700}', '\u{1f773}', ON),
- ('\u{1f780}', '\u{1f7d8}', ON), ('\u{1f7e0}', '\u{1f7eb}', ON), ('\u{1f7f0}', '\u{1f7f0}', ON),
- ('\u{1f800}', '\u{1f80b}', ON), ('\u{1f810}', '\u{1f847}', ON), ('\u{1f850}', '\u{1f859}', ON),
- ('\u{1f860}', '\u{1f887}', ON), ('\u{1f890}', '\u{1f8ad}', ON), ('\u{1f8b0}', '\u{1f8b1}', ON),
- ('\u{1f900}', '\u{1fa53}', ON), ('\u{1fa60}', '\u{1fa6d}', ON), ('\u{1fa70}', '\u{1fa74}', ON),
- ('\u{1fa78}', '\u{1fa7c}', ON), ('\u{1fa80}', '\u{1fa86}', ON), ('\u{1fa90}', '\u{1faac}', ON),
- ('\u{1fab0}', '\u{1faba}', ON), ('\u{1fac0}', '\u{1fac5}', ON), ('\u{1fad0}', '\u{1fad9}', ON),
- ('\u{1fae0}', '\u{1fae7}', ON), ('\u{1faf0}', '\u{1faf6}', ON), ('\u{1fb00}', '\u{1fb92}', ON),
- ('\u{1fb94}', '\u{1fbca}', ON), ('\u{1fbf0}', '\u{1fbf9}', EN), ('\u{20000}', '\u{2a6df}', L),
- ('\u{2a700}', '\u{2b738}', L), ('\u{2b740}', '\u{2b81d}', L), ('\u{2b820}', '\u{2cea1}', L),
- ('\u{2ceb0}', '\u{2ebe0}', L), ('\u{2f800}', '\u{2fa1d}', L), ('\u{30000}', '\u{3134a}', L),
- ('\u{e0001}', '\u{e0001}', BN), ('\u{e0020}', '\u{e007f}', BN), ('\u{e0100}', '\u{e01ef}', NSM),
- ('\u{f0000}', '\u{ffffd}', L), ('\u{100000}', '\u{10fffd}', L)
+ ('\u{1d2c0}', '\u{1d2d3}', L), ('\u{1d2e0}', '\u{1d2f3}', L), ('\u{1d300}', '\u{1d356}', ON),
+ ('\u{1d360}', '\u{1d378}', L), ('\u{1d400}', '\u{1d454}', L), ('\u{1d456}', '\u{1d49c}', L),
+ ('\u{1d49e}', '\u{1d49f}', L), ('\u{1d4a2}', '\u{1d4a2}', L), ('\u{1d4a5}', '\u{1d4a6}', L),
+ ('\u{1d4a9}', '\u{1d4ac}', L), ('\u{1d4ae}', '\u{1d4b9}', L), ('\u{1d4bb}', '\u{1d4bb}', L),
+ ('\u{1d4bd}', '\u{1d4c3}', L), ('\u{1d4c5}', '\u{1d505}', L), ('\u{1d507}', '\u{1d50a}', L),
+ ('\u{1d50d}', '\u{1d514}', L), ('\u{1d516}', '\u{1d51c}', L), ('\u{1d51e}', '\u{1d539}', L),
+ ('\u{1d53b}', '\u{1d53e}', L), ('\u{1d540}', '\u{1d544}', L), ('\u{1d546}', '\u{1d546}', L),
+ ('\u{1d54a}', '\u{1d550}', L), ('\u{1d552}', '\u{1d6a5}', L), ('\u{1d6a8}', '\u{1d6da}', L),
+ ('\u{1d6db}', '\u{1d6db}', ON), ('\u{1d6dc}', '\u{1d714}', L), ('\u{1d715}', '\u{1d715}', ON),
+ ('\u{1d716}', '\u{1d74e}', L), ('\u{1d74f}', '\u{1d74f}', ON), ('\u{1d750}', '\u{1d788}', L),
+ ('\u{1d789}', '\u{1d789}', ON), ('\u{1d78a}', '\u{1d7c2}', L), ('\u{1d7c3}', '\u{1d7c3}', ON),
+ ('\u{1d7c4}', '\u{1d7cb}', L), ('\u{1d7ce}', '\u{1d7ff}', EN), ('\u{1d800}', '\u{1d9ff}', L),
+ ('\u{1da00}', '\u{1da36}', NSM), ('\u{1da37}', '\u{1da3a}', L), ('\u{1da3b}', '\u{1da6c}', NSM),
+ ('\u{1da6d}', '\u{1da74}', L), ('\u{1da75}', '\u{1da75}', NSM), ('\u{1da76}', '\u{1da83}', L),
+ ('\u{1da84}', '\u{1da84}', NSM), ('\u{1da85}', '\u{1da8b}', L), ('\u{1da9b}', '\u{1da9f}', NSM),
+ ('\u{1daa1}', '\u{1daaf}', NSM), ('\u{1df00}', '\u{1df1e}', L), ('\u{1df25}', '\u{1df2a}', L),
+ ('\u{1e000}', '\u{1e006}', NSM), ('\u{1e008}', '\u{1e018}', NSM), ('\u{1e01b}', '\u{1e021}',
+ NSM), ('\u{1e023}', '\u{1e024}', NSM), ('\u{1e026}', '\u{1e02a}', NSM), ('\u{1e030}',
+ '\u{1e06d}', L), ('\u{1e08f}', '\u{1e08f}', NSM), ('\u{1e100}', '\u{1e12c}', L), ('\u{1e130}',
+ '\u{1e136}', NSM), ('\u{1e137}', '\u{1e13d}', L), ('\u{1e140}', '\u{1e149}', L), ('\u{1e14e}',
+ '\u{1e14f}', L), ('\u{1e290}', '\u{1e2ad}', L), ('\u{1e2ae}', '\u{1e2ae}', NSM), ('\u{1e2c0}',
+ '\u{1e2eb}', L), ('\u{1e2ec}', '\u{1e2ef}', NSM), ('\u{1e2f0}', '\u{1e2f9}', L), ('\u{1e2ff}',
+ '\u{1e2ff}', ET), ('\u{1e4d0}', '\u{1e4eb}', L), ('\u{1e4ec}', '\u{1e4ef}', NSM), ('\u{1e4f0}',
+ '\u{1e4f9}', L), ('\u{1e7e0}', '\u{1e7e6}', L), ('\u{1e7e8}', '\u{1e7eb}', L), ('\u{1e7ed}',
+ '\u{1e7ee}', L), ('\u{1e7f0}', '\u{1e7fe}', L), ('\u{1e800}', '\u{1e8cf}', R), ('\u{1e8d0}',
+ '\u{1e8d6}', NSM), ('\u{1e8d7}', '\u{1e943}', R), ('\u{1e944}', '\u{1e94a}', NSM), ('\u{1e94b}',
+ '\u{1ec70}', R), ('\u{1ec71}', '\u{1ecb4}', AL), ('\u{1ecb5}', '\u{1ed00}', R), ('\u{1ed01}',
+ '\u{1ed3d}', AL), ('\u{1ed3e}', '\u{1edff}', R), ('\u{1ee00}', '\u{1eeef}', AL), ('\u{1eef0}',
+ '\u{1eef1}', ON), ('\u{1eef2}', '\u{1eeff}', AL), ('\u{1ef00}', '\u{1efff}', R), ('\u{1f000}',
+ '\u{1f02b}', ON), ('\u{1f030}', '\u{1f093}', ON), ('\u{1f0a0}', '\u{1f0ae}', ON), ('\u{1f0b1}',
+ '\u{1f0bf}', ON), ('\u{1f0c1}', '\u{1f0cf}', ON), ('\u{1f0d1}', '\u{1f0f5}', ON), ('\u{1f100}',
+ '\u{1f10a}', EN), ('\u{1f10b}', '\u{1f10f}', ON), ('\u{1f110}', '\u{1f12e}', L), ('\u{1f12f}',
+ '\u{1f12f}', ON), ('\u{1f130}', '\u{1f169}', L), ('\u{1f16a}', '\u{1f16f}', ON), ('\u{1f170}',
+ '\u{1f1ac}', L), ('\u{1f1ad}', '\u{1f1ad}', ON), ('\u{1f1e6}', '\u{1f202}', L), ('\u{1f210}',
+ '\u{1f23b}', L), ('\u{1f240}', '\u{1f248}', L), ('\u{1f250}', '\u{1f251}', L), ('\u{1f260}',
+ '\u{1f265}', ON), ('\u{1f300}', '\u{1f6d7}', ON), ('\u{1f6dc}', '\u{1f6ec}', ON), ('\u{1f6f0}',
+ '\u{1f6fc}', ON), ('\u{1f700}', '\u{1f776}', ON), ('\u{1f77b}', '\u{1f7d9}', ON), ('\u{1f7e0}',
+ '\u{1f7eb}', ON), ('\u{1f7f0}', '\u{1f7f0}', ON), ('\u{1f800}', '\u{1f80b}', ON), ('\u{1f810}',
+ '\u{1f847}', ON), ('\u{1f850}', '\u{1f859}', ON), ('\u{1f860}', '\u{1f887}', ON), ('\u{1f890}',
+ '\u{1f8ad}', ON), ('\u{1f8b0}', '\u{1f8b1}', ON), ('\u{1f900}', '\u{1fa53}', ON), ('\u{1fa60}',
+ '\u{1fa6d}', ON), ('\u{1fa70}', '\u{1fa7c}', ON), ('\u{1fa80}', '\u{1fa88}', ON), ('\u{1fa90}',
+ '\u{1fabd}', ON), ('\u{1fabf}', '\u{1fac5}', ON), ('\u{1face}', '\u{1fadb}', ON), ('\u{1fae0}',
+ '\u{1fae8}', ON), ('\u{1faf0}', '\u{1faf8}', ON), ('\u{1fb00}', '\u{1fb92}', ON), ('\u{1fb94}',
+ '\u{1fbca}', ON), ('\u{1fbf0}', '\u{1fbf9}', EN), ('\u{20000}', '\u{2a6df}', L), ('\u{2a700}',
+ '\u{2b739}', L), ('\u{2b740}', '\u{2b81d}', L), ('\u{2b820}', '\u{2cea1}', L), ('\u{2ceb0}',
+ '\u{2ebe0}', L), ('\u{2f800}', '\u{2fa1d}', L), ('\u{30000}', '\u{3134a}', L), ('\u{31350}',
+ '\u{323af}', L), ('\u{e0001}', '\u{e0001}', BN), ('\u{e0020}', '\u{e007f}', BN), ('\u{e0100}',
+ '\u{e01ef}', NSM), ('\u{f0000}', '\u{ffffd}', L), ('\u{100000}', '\u{10fffd}', L)
+];
+
+pub const bidi_pairs_table: &'static [(char, char, Option<char>)] = &[
+ ('\u{28}', '\u{29}', None), ('\u{5b}', '\u{5d}', None), ('\u{7b}', '\u{7d}', None), ('\u{f3a}',
+ '\u{f3b}', None), ('\u{f3c}', '\u{f3d}', None), ('\u{169b}', '\u{169c}', None), ('\u{2045}',
+ '\u{2046}', None), ('\u{207d}', '\u{207e}', None), ('\u{208d}', '\u{208e}', None), ('\u{2308}',
+ '\u{2309}', None), ('\u{230a}', '\u{230b}', None), ('\u{2329}', '\u{232a}', Some('\u{3008}')),
+ ('\u{2768}', '\u{2769}', None), ('\u{276a}', '\u{276b}', None), ('\u{276c}', '\u{276d}', None),
+ ('\u{276e}', '\u{276f}', None), ('\u{2770}', '\u{2771}', None), ('\u{2772}', '\u{2773}', None),
+ ('\u{2774}', '\u{2775}', None), ('\u{27c5}', '\u{27c6}', None), ('\u{27e6}', '\u{27e7}', None),
+ ('\u{27e8}', '\u{27e9}', None), ('\u{27ea}', '\u{27eb}', None), ('\u{27ec}', '\u{27ed}', None),
+ ('\u{27ee}', '\u{27ef}', None), ('\u{2983}', '\u{2984}', None), ('\u{2985}', '\u{2986}', None),
+ ('\u{2987}', '\u{2988}', None), ('\u{2989}', '\u{298a}', None), ('\u{298b}', '\u{298c}', None),
+ ('\u{298d}', '\u{2990}', None), ('\u{298f}', '\u{298e}', None), ('\u{2991}', '\u{2992}', None),
+ ('\u{2993}', '\u{2994}', None), ('\u{2995}', '\u{2996}', None), ('\u{2997}', '\u{2998}', None),
+ ('\u{29d8}', '\u{29d9}', None), ('\u{29da}', '\u{29db}', None), ('\u{29fc}', '\u{29fd}', None),
+ ('\u{2e22}', '\u{2e23}', None), ('\u{2e24}', '\u{2e25}', None), ('\u{2e26}', '\u{2e27}', None),
+ ('\u{2e28}', '\u{2e29}', None), ('\u{2e55}', '\u{2e56}', None), ('\u{2e57}', '\u{2e58}', None),
+ ('\u{2e59}', '\u{2e5a}', None), ('\u{2e5b}', '\u{2e5c}', None), ('\u{3008}', '\u{3009}', None),
+ ('\u{300a}', '\u{300b}', None), ('\u{300c}', '\u{300d}', None), ('\u{300e}', '\u{300f}', None),
+ ('\u{3010}', '\u{3011}', None), ('\u{3014}', '\u{3015}', None), ('\u{3016}', '\u{3017}', None),
+ ('\u{3018}', '\u{3019}', None), ('\u{301a}', '\u{301b}', None), ('\u{fe59}', '\u{fe5a}', None),
+ ('\u{fe5b}', '\u{fe5c}', None), ('\u{fe5d}', '\u{fe5e}', None), ('\u{ff08}', '\u{ff09}', None),
+ ('\u{ff3b}', '\u{ff3d}', None), ('\u{ff5b}', '\u{ff5d}', None), ('\u{ff5f}', '\u{ff60}', None),
+ ('\u{ff62}', '\u{ff63}', None)
];
diff --git a/vendor/unicode-bidi/src/data_source.rs b/vendor/unicode-bidi/src/data_source.rs
index 3958a255e..319ad53b0 100644
--- a/vendor/unicode-bidi/src/data_source.rs
+++ b/vendor/unicode-bidi/src/data_source.rs
@@ -9,8 +9,38 @@
use crate::BidiClass;
+/// This is the return value of [`BidiDataSource::bidi_matched_opening_bracket()`].
+///
+/// It represents the matching *normalized* opening bracket for a given bracket in a bracket pair,
+/// and whether or not that bracket is opening.
+#[derive(Debug, Copy, Clone)]
+pub struct BidiMatchedOpeningBracket {
+ /// The corresponding opening bracket in this bracket pair, normalized
+ ///
+ /// In case of opening brackets, this will be the bracket itself, except for when the bracket
+ /// is not normalized, in which case it will be the normalized form.
+ pub opening: char,
+ /// Whether or not the requested bracket was an opening bracket. True for opening
+ pub is_open: bool,
+}
+
/// This trait abstracts over a data source that is able to produce the Unicode Bidi class for a given
/// character
pub trait BidiDataSource {
fn bidi_class(&self, c: char) -> BidiClass;
+ /// If this character is a bracket according to BidiBrackets.txt,
+ /// return the corresponding *normalized* *opening bracket* of the pair,
+ /// and whether or not it itself is an opening bracket.
+ ///
+ /// This effectively buckets brackets into equivalence classes keyed on the
+ /// normalized opening bracket.
+ ///
+ /// The default implementation will pull in a small amount of hardcoded data,
+ /// regardless of the `hardcoded-data` feature. This is in part for convenience
+ /// (since this data is small and changes less often), and in part so that this method can be
+ /// added without needing a breaking version bump.
+ /// Override this method in your custom data source to prevent the use of hardcoded data.
+ fn bidi_matched_opening_bracket(&self, c: char) -> Option<BidiMatchedOpeningBracket> {
+ crate::char_data::bidi_matched_opening_bracket(c)
+ }
}
diff --git a/vendor/unicode-bidi/src/explicit.rs b/vendor/unicode-bidi/src/explicit.rs
index e9d579fd6..a9b13e89c 100644
--- a/vendor/unicode-bidi/src/explicit.rs
+++ b/vendor/unicode-bidi/src/explicit.rs
@@ -47,13 +47,17 @@ pub fn compute(
RLE | LRE | RLO | LRO | RLI | LRI | FSI => {
let last_level = stack.last().level;
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ levels[i] = last_level;
+
// X5a-X5c: Isolate initiators get the level of the last entry on the stack.
let is_isolate = match original_classes[i] {
RLI | LRI | FSI => true,
_ => false,
};
if is_isolate {
- levels[i] = last_level;
+ // Redundant due to "Retaining explicit formatting characters" step.
+ // levels[i] = last_level;
match stack.last().status {
OverrideStatus::RTL => processing_classes[i] = R,
OverrideStatus::LTR => processing_classes[i] = L,
@@ -90,6 +94,13 @@ pub fn compute(
} else if overflow_isolate_count == 0 {
overflow_embedding_count += 1;
}
+
+ if !is_isolate {
+ // X9 +
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // (PDF handled below)
+ processing_classes[i] = BN;
+ }
}
// <http://www.unicode.org/reports/tr9/#X6a>
@@ -123,31 +134,34 @@ pub fn compute(
// <http://www.unicode.org/reports/tr9/#X7>
PDF => {
if overflow_isolate_count > 0 {
- continue;
- }
- if overflow_embedding_count > 0 {
+ // do nothing
+ } else if overflow_embedding_count > 0 {
overflow_embedding_count -= 1;
- continue;
- }
- if stack.last().status != OverrideStatus::Isolate && stack.vec.len() >= 2 {
+ } else if stack.last().status != OverrideStatus::Isolate && stack.vec.len() >= 2 {
stack.vec.pop();
}
- // The spec doesn't explicitly mention this step, but it is necessary.
- // See the reference implementations for comparison.
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
levels[i] = stack.last().level;
+ // X9 part of retaining explicit formatting characters.
+ processing_classes[i] = BN;
}
- // Nothing
- B | BN => {}
+ // Nothing.
+ // BN case moved down to X6, see <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ B => {}
// <http://www.unicode.org/reports/tr9/#X6>
_ => {
let last = stack.last();
levels[i] = last.level;
- match last.status {
- OverrideStatus::RTL => processing_classes[i] = R,
- OverrideStatus::LTR => processing_classes[i] = L,
- _ => {}
+ // This condition is not in the spec, but I am pretty sure that is a spec bug.
+ // https://www.unicode.org/L2/L2023/23014-amd-to-uax9.pdf
+ if original_classes[i] != BN {
+ match last.status {
+ OverrideStatus::RTL => processing_classes[i] = R,
+ OverrideStatus::LTR => processing_classes[i] = L,
+ _ => {}
+ }
}
}
}
diff --git a/vendor/unicode-bidi/src/implicit.rs b/vendor/unicode-bidi/src/implicit.rs
index bf37f17e6..294af7cbd 100644
--- a/vendor/unicode-bidi/src/implicit.rs
+++ b/vendor/unicode-bidi/src/implicit.rs
@@ -14,104 +14,213 @@ use core::cmp::max;
use super::char_data::BidiClass::{self, *};
use super::level::Level;
-use super::prepare::{not_removed_by_x9, removed_by_x9, IsolatingRunSequence, LevelRun};
+use super::prepare::{not_removed_by_x9, removed_by_x9, IsolatingRunSequence};
+use super::BidiDataSource;
/// 3.3.4 Resolving Weak Types
///
/// <http://www.unicode.org/reports/tr9/#Resolving_Weak_Types>
#[cfg_attr(feature = "flame_it", flamer::flame)]
-pub fn resolve_weak(sequence: &IsolatingRunSequence, processing_classes: &mut [BidiClass]) {
- // FIXME (#8): This function applies steps W1-W6 in a single pass. This can produce
- // incorrect results in cases where a "later" rule changes the value of `prev_class` seen
- // by an "earlier" rule. We should either split this into separate passes, or preserve
- // extra state so each rule can see the correct previous class.
-
- // FIXME: Also, this could be the cause of increased failure for using longer-UTF-8 chars in
- // conformance tests, like BidiTest:69635 (AL ET EN)
+pub fn resolve_weak(
+ text: &str,
+ sequence: &IsolatingRunSequence,
+ processing_classes: &mut [BidiClass],
+) {
+ // Note: The spec treats these steps as individual passes that are applied one after the other
+ // on the entire IsolatingRunSequence at once. We instead collapse it into a single iteration,
+ // which is straightforward for rules that are based on the state of the current character, but not
+ // for rules that care about surrounding characters. To deal with them, we retain additional state
+ // about previous character classes that may have since been changed by later rules.
- let mut prev_class = sequence.sos;
+ // The previous class for the purposes of rule W4/W6, not tracking changes made after or during W4.
+ let mut prev_class_before_w4 = sequence.sos;
+ // The previous class for the purposes of rule W5.
+ let mut prev_class_before_w5 = sequence.sos;
+ // The previous class for the purposes of rule W1, not tracking changes from any other rules.
+ let mut prev_class_before_w1 = sequence.sos;
let mut last_strong_is_al = false;
let mut et_run_indices = Vec::new(); // for W5
+ let mut bn_run_indices = Vec::new(); // for W5 + <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+
+ for (run_index, level_run) in sequence.runs.iter().enumerate() {
+ for i in &mut level_run.clone() {
+ if processing_classes[i] == BN {
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // Keeps track of bn runs for W5 in case we see an ET.
+ bn_run_indices.push(i);
+ // BNs aren't real, skip over them.
+ continue;
+ }
+
+ // Store the processing class of all rules before W2/W1.
+ // Used to keep track of the last strong character for W2. W3 is able to insert new strong
+ // characters, so we don't want to be misled by it.
+ let mut w2_processing_class = processing_classes[i];
- // Like sequence.runs.iter().flat_map(Clone::clone), but make indices itself clonable.
- fn id(x: LevelRun) -> LevelRun {
- x
- }
- let mut indices = sequence
- .runs
- .iter()
- .cloned()
- .flat_map(id as fn(LevelRun) -> LevelRun);
-
- while let Some(i) = indices.next() {
- match processing_classes[i] {
// <http://www.unicode.org/reports/tr9/#W1>
- NSM => {
- processing_classes[i] = match prev_class {
+ //
+
+ if processing_classes[i] == NSM {
+ processing_classes[i] = match prev_class_before_w1 {
RLI | LRI | FSI | PDI => ON,
- _ => prev_class,
+ _ => prev_class_before_w1,
};
+ // W1 occurs before W2, update this.
+ w2_processing_class = processing_classes[i];
}
- EN => {
- if last_strong_is_al {
- // W2. If previous strong char was AL, change EN to AN.
- processing_classes[i] = AN;
- } else {
+
+ prev_class_before_w1 = processing_classes[i];
+
+ // <http://www.unicode.org/reports/tr9/#W2>
+ // <http://www.unicode.org/reports/tr9/#W3>
+ //
+ match processing_classes[i] {
+ EN => {
+ if last_strong_is_al {
+ // W2. If previous strong char was AL, change EN to AN.
+ processing_classes[i] = AN;
+ }
+ }
+ // W3.
+ AL => processing_classes[i] = R,
+ _ => {}
+ }
+
+ // update last_strong_is_al.
+ match w2_processing_class {
+ L | R => {
+ last_strong_is_al = false;
+ }
+ AL => {
+ last_strong_is_al = true;
+ }
+ _ => {}
+ }
+
+ let class_before_w456 = processing_classes[i];
+
+ // <http://www.unicode.org/reports/tr9/#W4>
+ // <http://www.unicode.org/reports/tr9/#W5>
+ // <http://www.unicode.org/reports/tr9/#W6> (separators only)
+ // (see below for W6 terminator code)
+ //
+ match processing_classes[i] {
+ // <http://www.unicode.org/reports/tr9/#W6>
+ EN => {
// W5. If a run of ETs is adjacent to an EN, change the ETs to EN.
for j in &et_run_indices {
processing_classes[*j] = EN;
}
et_run_indices.clear();
}
- }
- // <http://www.unicode.org/reports/tr9/#W3>
- AL => processing_classes[i] = R,
- // <http://www.unicode.org/reports/tr9/#W4>
- ES | CS => {
- let next_class = indices
- .clone()
- .map(|j| processing_classes[j])
- .find(not_removed_by_x9)
- .unwrap_or(sequence.eos);
- processing_classes[i] = match (prev_class, processing_classes[i], next_class) {
- (EN, ES, EN) | (EN, CS, EN) => EN,
- (AN, CS, AN) => AN,
- (_, _, _) => ON,
+ // <http://www.unicode.org/reports/tr9/#W4>
+ // <http://www.unicode.org/reports/tr9/#W6>
+ ES | CS => {
+ // See https://github.com/servo/unicode-bidi/issues/86 for improving this.
+ // We want to make sure we check the correct next character by skipping past the rest
+ // of this one.
+ if let Some(ch) = text.get(i..).and_then(|s| s.chars().next()) {
+ let mut next_class = sequence
+ .iter_forwards_from(i + ch.len_utf8(), run_index)
+ .map(|j| processing_classes[j])
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ .find(not_removed_by_x9)
+ .unwrap_or(sequence.eos);
+ if next_class == EN && last_strong_is_al {
+ // Apply W2 to next_class. We know that last_strong_is_al
+ // has no chance of changing on this character so we can still assume its value
+ // will be the same by the time we get to it.
+ next_class = AN;
+ }
+ processing_classes[i] =
+ match (prev_class_before_w4, processing_classes[i], next_class) {
+ // W4
+ (EN, ES, EN) | (EN, CS, EN) => EN,
+ // W4
+ (AN, CS, AN) => AN,
+ // W6 (separators only)
+ (_, _, _) => ON,
+ };
+
+ // W6 + <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // We have to do this before W5 gets its grubby hands on these characters and thinks
+ // they're part of an ET run.
+ // We check for ON to ensure that we had hit the W6 branch above, since this `ES | CS` match
+ // arm handles both W4 and W6.
+ if processing_classes[i] == ON {
+ for idx in sequence.iter_backwards_from(i, run_index) {
+ let class = &mut processing_classes[idx];
+ if *class != BN {
+ break;
+ }
+ *class = ON;
+ }
+ for idx in sequence.iter_forwards_from(i + ch.len_utf8(), run_index) {
+ let class = &mut processing_classes[idx];
+ if *class != BN {
+ break;
+ }
+ *class = ON;
+ }
+ }
+ } else {
+ // We're in the middle of a character, copy over work done for previous bytes
+ // since it's going to be the same answer.
+ processing_classes[i] = processing_classes[i - 1];
+ }
}
- }
- // <http://www.unicode.org/reports/tr9/#W5>
- ET => {
- match prev_class {
- EN => processing_classes[i] = EN,
- _ => et_run_indices.push(i), // In case this is followed by an EN.
+ // <http://www.unicode.org/reports/tr9/#W5>
+ ET => {
+ match prev_class_before_w5 {
+ EN => processing_classes[i] = EN,
+ _ => {
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // If there was a BN run before this, that's now a part of this ET run.
+ et_run_indices.extend(&bn_run_indices);
+
+ // In case this is followed by an EN.
+ et_run_indices.push(i);
+ }
+ }
}
+ _ => {}
}
- class => {
- if removed_by_x9(class) {
- continue;
+
+ // Common loop iteration code
+ //
+
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // BN runs would have already continued the loop, clear them before we get to the next one.
+ bn_run_indices.clear();
+
+ // W6 above only deals with separators, so it doesn't change anything W5 cares about,
+ // so we still can update this after running that part of W6.
+ prev_class_before_w5 = processing_classes[i];
+
+ // <http://www.unicode.org/reports/tr9/#W6> (terminators only)
+ // (see above for W6 separator code)
+ //
+ if prev_class_before_w5 != ET {
+ // W6. If we didn't find an adjacent EN, turn any ETs into ON instead.
+ for j in &et_run_indices {
+ processing_classes[*j] = ON;
}
+ et_run_indices.clear();
}
- }
- prev_class = processing_classes[i];
- match prev_class {
- L | R => {
- last_strong_is_al = false;
- }
- AL => {
- last_strong_is_al = true;
- }
- _ => {}
- }
- if prev_class != ET {
- // W6. If we didn't find an adjacent EN, turn any ETs into ON instead.
- for j in &et_run_indices {
- processing_classes[*j] = ON;
- }
- et_run_indices.clear();
+ // We stashed this before W4/5/6 could get their grubby hands on it, and it's not
+ // used in the W6 terminator code below so we can update it now.
+ prev_class_before_w4 = class_before_w456;
}
}
+ // Rerun this check in case we ended with a sequence of BNs (i.e., we'd never
+ // hit the end of the for loop above).
+ // W6. If we didn't find an adjacent EN, turn any ETs into ON instead.
+ for j in &et_run_indices {
+ processing_classes[*j] = ON;
+ }
+ et_run_indices.clear();
// W7. If the previous strong char was L, change EN to L.
let mut last_strong_is_l = sequence.sos == L;
@@ -127,6 +236,8 @@ pub fn resolve_weak(sequence: &IsolatingRunSequence, processing_classes: &mut [B
R | AL => {
last_strong_is_l = false;
}
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // Already scanning past BN here.
_ => {}
}
}
@@ -137,22 +248,172 @@ pub fn resolve_weak(sequence: &IsolatingRunSequence, processing_classes: &mut [B
///
/// <http://www.unicode.org/reports/tr9/#Resolving_Neutral_Types>
#[cfg_attr(feature = "flame_it", flamer::flame)]
-pub fn resolve_neutral(
+pub fn resolve_neutral<D: BidiDataSource>(
+ text: &str,
+ data_source: &D,
sequence: &IsolatingRunSequence,
levels: &[Level],
+ original_classes: &[BidiClass],
processing_classes: &mut [BidiClass],
) {
+ // e = embedding direction
let e: BidiClass = levels[sequence.runs[0].start].bidi_class();
+ let not_e = if e == BidiClass::L {
+ BidiClass::R
+ } else {
+ BidiClass::L
+ };
+ // N0. Process bracket pairs.
+
+ // > Identify the bracket pairs in the current isolating run sequence according to BD16.
+ // We use processing_classes, not original_classes, due to BD14/BD15
+ let bracket_pairs = identify_bracket_pairs(text, data_source, sequence, processing_classes);
+
+ // > For each bracket-pair element in the list of pairs of text positions
+ //
+ // Note: Rust ranges are interpreted as [start..end), be careful using `pair` directly
+ // for indexing as it will include the opening bracket pair but not the closing one.
+ for pair in bracket_pairs {
+ #[cfg(feature = "std")]
+ debug_assert!(
+ pair.start < processing_classes.len(),
+ "identify_bracket_pairs returned a range that is out of bounds!"
+ );
+ #[cfg(feature = "std")]
+ debug_assert!(
+ pair.end < processing_classes.len(),
+ "identify_bracket_pairs returned a range that is out of bounds!"
+ );
+ let mut found_e = false;
+ let mut found_not_e = false;
+ let mut class_to_set = None;
+
+ let start_len_utf8 = text[pair.start..].chars().next().unwrap().len_utf8();
+ // > Inspect the bidirectional types of the characters enclosed within the bracket pair.
+ //
+ // `pair` is [start, end) so we will end up processing the opening character but not the closing one.
+ //
+ for enclosed_i in sequence.iter_forwards_from(pair.start + start_len_utf8, pair.start_run) {
+ if enclosed_i >= pair.end {
+ #[cfg(feature = "std")]
+ debug_assert!(
+ enclosed_i == pair.end,
+ "If we skipped past this, the iterator is broken"
+ );
+ break;
+ }
+ let class = processing_classes[enclosed_i];
+ if class == e {
+ found_e = true;
+ } else if class == not_e {
+ found_not_e = true;
+ } else if class == BidiClass::EN || class == BidiClass::AN {
+ // > Within this scope, bidirectional types EN and AN are treated as R.
+ if e == BidiClass::L {
+ found_not_e = true;
+ } else {
+ found_e = true;
+ }
+ }
+
+ // If we have found a character with the class of the embedding direction
+ // we can bail early.
+ if found_e {
+ break;
+ }
+ }
+ // > If any strong type (either L or R) matching the embedding direction is found
+ if found_e {
+ // > .. set the type for both brackets in the pair to match the embedding direction
+ class_to_set = Some(e);
+ // > Otherwise, if there is a strong type it must be opposite the embedding direction
+ } else if found_not_e {
+ // > Therefore, test for an established context with a preceding strong type by
+ // > checking backwards before the opening paired bracket
+ // > until the first strong type (L, R, or sos) is found.
+ // (see note above about processing_classes and character boundaries)
+ let mut previous_strong = sequence
+ .iter_backwards_from(pair.start, pair.start_run)
+ .map(|i| processing_classes[i])
+ .find(|class| {
+ *class == BidiClass::L
+ || *class == BidiClass::R
+ || *class == BidiClass::EN
+ || *class == BidiClass::AN
+ })
+ .unwrap_or(sequence.sos);
+
+ // > Within this scope, bidirectional types EN and AN are treated as R.
+ if previous_strong == BidiClass::EN || previous_strong == BidiClass::AN {
+ previous_strong = BidiClass::R;
+ }
+
+ // > If the preceding strong type is also opposite the embedding direction,
+ // > context is established,
+ // > so set the type for both brackets in the pair to that direction.
+ // AND
+ // > Otherwise set the type for both brackets in the pair to the embedding direction.
+ // > Either way it gets set to previous_strong
+ //
+ // Both branches amount to setting the type to the strong type.
+ class_to_set = Some(previous_strong);
+ }
+
+ if let Some(class_to_set) = class_to_set {
+ // Update all processing classes corresponding to the start and end elements, as requested.
+ // We should include all bytes of the character, not the first one.
+ let end_len_utf8 = text[pair.end..].chars().next().unwrap().len_utf8();
+ for class in &mut processing_classes[pair.start..pair.start + start_len_utf8] {
+ *class = class_to_set;
+ }
+ for class in &mut processing_classes[pair.end..pair.end + end_len_utf8] {
+ *class = class_to_set;
+ }
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ for idx in sequence.iter_backwards_from(pair.start, pair.start_run) {
+ let class = &mut processing_classes[idx];
+ if *class != BN {
+ break;
+ }
+ *class = class_to_set;
+ }
+ // > Any number of characters that had original bidirectional character type NSM prior to the application of
+ // > W1 that immediately follow a paired bracket which changed to L or R under N0 should change to match the type of their preceding bracket.
+
+ // This rule deals with sequences of NSMs, so we can just update them all at once, we don't need to worry
+ // about character boundaries. We do need to be careful to skip the full set of bytes for the parentheses characters.
+ let nsm_start = pair.start + start_len_utf8;
+ for idx in sequence.iter_forwards_from(nsm_start, pair.start_run) {
+ let class = original_classes[idx];
+ if class == BidiClass::NSM || processing_classes[idx] == BN {
+ processing_classes[idx] = class_to_set;
+ } else {
+ break;
+ }
+ }
+ let nsm_end = pair.end + end_len_utf8;
+ for idx in sequence.iter_forwards_from(nsm_end, pair.end_run) {
+ let class = original_classes[idx];
+ if class == BidiClass::NSM || processing_classes[idx] == BN {
+ processing_classes[idx] = class_to_set;
+ } else {
+ break;
+ }
+ }
+ }
+ // > Otherwise, there are no strong types within the bracket pair
+ // > Therefore, do not set the type for that bracket pair
+ }
+
+ // N1 and N2.
+ // Indices of every byte in this isolating run sequence
let mut indices = sequence.runs.iter().flat_map(Clone::clone);
let mut prev_class = sequence.sos;
-
while let Some(mut i) = indices.next() {
- // N0. Process bracket pairs.
- // TODO
-
// Process sequences of NI characters.
let mut ni_run = Vec::new();
- if is_NI(processing_classes[i]) {
+ // The BN is for <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ if is_NI(processing_classes[i]) || processing_classes[i] == BN {
// Consume a run of consecutive NI characters.
ni_run.push(i);
let mut next_class;
@@ -160,11 +421,9 @@ pub fn resolve_neutral(
match indices.next() {
Some(j) => {
i = j;
- if removed_by_x9(processing_classes[i]) {
- continue;
- }
next_class = processing_classes[j];
- if is_NI(next_class) {
+ // The BN is for <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ if is_NI(next_class) || next_class == BN {
ni_run.push(i);
} else {
break;
@@ -176,7 +435,6 @@ pub fn resolve_neutral(
}
};
}
-
// N1-N2.
//
// <http://www.unicode.org/reports/tr9/#N1>
@@ -203,6 +461,105 @@ pub fn resolve_neutral(
}
}
+struct BracketPair {
+ /// The text-relative index of the opening bracket.
+ start: usize,
+ /// The text-relative index of the closing bracket.
+ end: usize,
+ /// The index of the run (in the run sequence) that the opening bracket is in.
+ start_run: usize,
+ /// The index of the run (in the run sequence) that the closing bracket is in.
+ end_run: usize,
+}
+/// 3.1.3 Identifying Bracket Pairs
+///
+/// Returns all paired brackets in the source, as indices into the
+/// text source.
+///
+/// <https://www.unicode.org/reports/tr9/#BD16>
+fn identify_bracket_pairs<D: BidiDataSource>(
+ text: &str,
+ data_source: &D,
+ run_sequence: &IsolatingRunSequence,
+ original_classes: &[BidiClass],
+) -> Vec<BracketPair> {
+ let mut ret = vec![];
+ let mut stack = vec![];
+
+ for (run_index, level_run) in run_sequence.runs.iter().enumerate() {
+ let slice = if let Some(slice) = text.get(level_run.clone()) {
+ slice
+ } else {
+ #[cfg(feature = "std")]
+ std::debug_assert!(
+ false,
+ "Found broken indices in level run: found indices {}..{} for string of length {}",
+ level_run.start,
+ level_run.end,
+ text.len()
+ );
+ return ret;
+ };
+
+ for (i, ch) in slice.char_indices() {
+ let actual_index = level_run.start + i;
+ // All paren characters are ON.
+ // From BidiBrackets.txt:
+ // > The Unicode property value stability policy guarantees that characters
+ // > which have bpt=o or bpt=c also have bc=ON and Bidi_M=Y
+ if original_classes[level_run.start + i] != BidiClass::ON {
+ continue;
+ }
+
+ if let Some(matched) = data_source.bidi_matched_opening_bracket(ch) {
+ if matched.is_open {
+ // > If an opening paired bracket is found ...
+
+ // > ... and there is no room in the stack,
+ // > stop processing BD16 for the remainder of the isolating run sequence.
+ if stack.len() >= 63 {
+ break;
+ }
+ // > ... push its Bidi_Paired_Bracket property value and its text position onto the stack
+ stack.push((matched.opening, actual_index, run_index))
+ } else {
+ // > If a closing paired bracket is found, do the following
+
+ // > Declare a variable that holds a reference to the current stack element
+ // > and initialize it with the top element of the stack.
+ // AND
+ // > Else, if the current stack element is not at the bottom of the stack
+ for (stack_index, element) in stack.iter().enumerate().rev() {
+ // > Compare the closing paired bracket being inspected or its canonical
+ // > equivalent to the bracket in the current stack element.
+ if element.0 == matched.opening {
+ // > If the values match, meaning the two characters form a bracket pair, then
+
+ // > Append the text position in the current stack element together with the
+ // > text position of the closing paired bracket to the list.
+ let pair = BracketPair {
+ start: element.1,
+ end: actual_index,
+ start_run: element.2,
+ end_run: run_index,
+ };
+ ret.push(pair);
+
+ // > Pop the stack through the current stack element inclusively.
+ stack.truncate(stack_index);
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ // > Sort the list of pairs of text positions in ascending order based on
+ // > the text position of the opening paired bracket.
+ ret.sort_by_key(|r| r.start);
+ ret
+}
+
/// 3.3.6 Resolving Implicit Levels
///
/// Returns the maximum embedding level in the paragraph.
@@ -211,7 +568,6 @@ pub fn resolve_neutral(
#[cfg_attr(feature = "flame_it", flamer::flame)]
pub fn resolve_levels(original_classes: &[BidiClass], levels: &mut [Level]) -> Level {
let mut max_level = Level::ltr();
-
assert_eq!(original_classes.len(), levels.len());
for i in 0..levels.len() {
match (levels[i].is_rtl(), original_classes[i]) {
@@ -219,6 +575,7 @@ pub fn resolve_levels(original_classes: &[BidiClass], levels: &mut [Level]) -> L
(false, R) | (true, L) | (true, EN) | (true, AN) => {
levels[i].raise(1).expect("Level number error")
}
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters> handled here
(_, _) => {}
}
max_level = max(max_level, levels[i]);
diff --git a/vendor/unicode-bidi/src/lib.rs b/vendor/unicode-bidi/src/lib.rs
index bda9dd8ec..81d4fb5f5 100644
--- a/vendor/unicode-bidi/src/lib.rs
+++ b/vendor/unicode-bidi/src/lib.rs
@@ -354,8 +354,15 @@ impl<'text> BidiInfo<'text> {
let sequences = prepare::isolating_run_sequences(para.level, original_classes, levels);
for sequence in &sequences {
- implicit::resolve_weak(sequence, processing_classes);
- implicit::resolve_neutral(sequence, levels, processing_classes);
+ implicit::resolve_weak(text, sequence, processing_classes);
+ implicit::resolve_neutral(
+ text,
+ data_source,
+ sequence,
+ levels,
+ original_classes,
+ processing_classes,
+ );
}
implicit::resolve_levels(processing_classes, levels);
@@ -411,6 +418,71 @@ impl<'text> BidiInfo<'text> {
result.into()
}
+ /// Reorders pre-calculated levels of a sequence of characters.
+ ///
+ /// NOTE: This is a convenience method that does not use a `Paragraph` object. It is
+ /// intended to be used when an application has determined the levels of the objects (character sequences)
+ /// and just needs to have them reordered.
+ ///
+ /// the index map will result in `indexMap[visualIndex]==logicalIndex`.
+ ///
+ /// # # Example
+ /// ```
+ /// use unicode_bidi::BidiInfo;
+ /// use unicode_bidi::Level;
+ ///
+ /// let l0 = Level::from(0);
+ /// let l1 = Level::from(1);
+ /// let l2 = Level::from(2);
+ ///
+ /// let levels = vec![l0, l0, l0, l0];
+ /// let index_map = BidiInfo::reorder_visual(&levels);
+ /// assert_eq!(levels.len(), index_map.len());
+ /// assert_eq!(index_map, [0, 1, 2, 3]);
+ ///
+ /// let levels: Vec<Level> = vec![l0, l0, l0, l1, l1, l1, l2, l2];
+ /// let index_map = BidiInfo::reorder_visual(&levels);
+ /// assert_eq!(levels.len(), index_map.len());
+ /// assert_eq!(index_map, [0, 1, 2, 5, 4, 3, 6, 7]);
+ /// ```
+ pub fn reorder_visual(levels: &[Level]) -> Vec<usize> {
+ // Gets the next range
+ fn next_range(levels: &[level::Level], start_index: usize) -> Range<usize> {
+ if levels.is_empty() || start_index >= levels.len() {
+ return start_index..start_index;
+ }
+
+ let mut end_index = start_index + 1;
+ while end_index < levels.len() {
+ if levels[start_index] != levels[end_index] {
+ return start_index..end_index;
+ }
+ end_index += 1;
+ }
+
+ start_index..end_index
+ }
+
+ if levels.is_empty() {
+ return vec![];
+ }
+ let mut result: Vec<usize> = (0..levels.len()).collect();
+
+ let mut range: Range<usize> = 0..0;
+ loop {
+ range = next_range(levels, range.end);
+ if levels[range.start].is_rtl() {
+ result[range.clone()].reverse();
+ }
+
+ if range.end >= levels.len() {
+ break;
+ }
+ }
+
+ result
+ }
+
/// Find the level runs within a line and return them in visual order.
///
/// `line` is a range of bytes indices within `levels`.
@@ -434,10 +506,9 @@ impl<'text> BidiInfo<'text> {
let line_str: &str = &self.text[line.clone()];
let mut reset_from: Option<usize> = Some(0);
let mut reset_to: Option<usize> = None;
+ let mut prev_level = para.level;
for (i, c) in line_str.char_indices() {
match line_classes[i] {
- // Ignored by X9
- RLE | LRE | RLO | LRO | PDF | BN => {}
// Segment separator, Paragraph separator
B | S => {
assert_eq!(reset_to, None);
@@ -452,6 +523,15 @@ impl<'text> BidiInfo<'text> {
reset_from = Some(i);
}
}
+ // <https://www.unicode.org/reports/tr9/#Retaining_Explicit_Formatting_Characters>
+ // same as above + set the level
+ RLE | LRE | RLO | LRO | PDF | BN => {
+ if reset_from == None {
+ reset_from = Some(i);
+ }
+ // also set the level to previous
+ line_levels[i] = prev_level;
+ }
_ => {
reset_from = None;
}
@@ -463,6 +543,7 @@ impl<'text> BidiInfo<'text> {
reset_from = None;
reset_to = None;
}
+ prev_level = line_levels[i];
}
if let Some(from) = reset_from {
for level in &mut line_levels[from..] {
@@ -874,7 +955,7 @@ mod tests {
assert_eq!(reorder_paras("א(ב)ג."), vec![".ג)ב(א"]);
// With mirrorable characters on level boundry
- assert_eq!(reorder_paras("אב(גד[&ef].)gh"), vec!["ef].)gh&[דג(בא"]);
+ assert_eq!(reorder_paras("אב(גד[&ef].)gh"), vec!["gh).]ef&[דג(בא"]);
}
fn reordered_levels_for_paras(text: &str) -> Vec<Vec<Level>> {
@@ -1023,7 +1104,7 @@ mod tests {
}
}
-#[cfg(all(feature = "serde", test))]
+#[cfg(all(feature = "serde", feature = "hardcoded-data", test))]
mod serde_tests {
use super::*;
use serde_test::{assert_tokens, Token};
diff --git a/vendor/unicode-bidi/src/prepare.rs b/vendor/unicode-bidi/src/prepare.rs
index 7b952361a..21675e6d1 100644
--- a/vendor/unicode-bidi/src/prepare.rs
+++ b/vendor/unicode-bidi/src/prepare.rs
@@ -89,12 +89,35 @@ pub fn isolating_run_sequences(
.map(|sequence: Vec<LevelRun>| {
assert!(!sequence.is_empty());
- let start_of_seq = sequence[0].start;
- let end_of_seq = sequence[sequence.len() - 1].end;
- let seq_level = levels[start_of_seq];
+ let mut result = IsolatingRunSequence {
+ runs: sequence,
+ sos: L,
+ eos: L,
+ };
+
+ let start_of_seq = result.runs[0].start;
+ let runs_len = result.runs.len();
+ let end_of_seq = result.runs[runs_len - 1].end;
+
+ // > (not counting characters removed by X9)
+ let seq_level = result
+ .iter_forwards_from(start_of_seq, 0)
+ .filter(|i| not_removed_by_x9(&original_classes[*i]))
+ .map(|i| levels[i])
+ .next()
+ .unwrap_or(levels[start_of_seq]);
+
+ // XXXManishearth the spec talks of a start and end level,
+ // but for a given IRS the two should be equivalent, yes?
+ let end_level = result
+ .iter_backwards_from(end_of_seq, runs_len - 1)
+ .filter(|i| not_removed_by_x9(&original_classes[*i]))
+ .map(|i| levels[i])
+ .next()
+ .unwrap_or(levels[end_of_seq - 1]);
#[cfg(test)]
- for run in sequence.clone() {
+ for run in result.runs.clone() {
for idx in run {
if not_removed_by_x9(&original_classes[idx]) {
assert_eq!(seq_level, levels[idx]);
@@ -111,8 +134,19 @@ pub fn isolating_run_sequences(
None => para_level,
};
+ // Get the last non-removed character to check if it is an isolate initiator.
+ // The spec calls for an unmatched one, but matched isolate initiators
+ // will never be at the end of a level run (otherwise there would be more to the run).
+ // We unwrap_or(BN) because BN marks removed classes and it won't matter for the check.
+ let last_non_removed = original_classes[..end_of_seq]
+ .iter()
+ .copied()
+ .rev()
+ .find(not_removed_by_x9)
+ .unwrap_or(BN);
+
// Get the level of the next non-removed char after the runs.
- let succ_level = if let RLI | LRI | FSI = original_classes[end_of_seq - 1] {
+ let succ_level = if let RLI | LRI | FSI = last_non_removed {
para_level
} else {
match original_classes[end_of_seq..]
@@ -124,15 +158,63 @@ pub fn isolating_run_sequences(
}
};
- IsolatingRunSequence {
- runs: sequence,
- sos: max(seq_level, pred_level).bidi_class(),
- eos: max(seq_level, succ_level).bidi_class(),
- }
+ result.sos = max(seq_level, pred_level).bidi_class();
+ result.eos = max(end_level, succ_level).bidi_class();
+ result
})
.collect()
}
+impl IsolatingRunSequence {
+ /// Returns the full range of text represented by this isolating run sequence
+ pub(crate) fn text_range(&self) -> Range<usize> {
+ if let (Some(start), Some(end)) = (self.runs.first(), self.runs.last()) {
+ start.start..end.end
+ } else {
+ return 0..0;
+ }
+ }
+
+ /// Given a text-relative position `pos` and an index of the level run it is in,
+ /// produce an iterator of all characters after and pos (`pos..`) that are in this
+ /// run sequence
+ pub(crate) fn iter_forwards_from(
+ &self,
+ pos: usize,
+ level_run_index: usize,
+ ) -> impl Iterator<Item = usize> + '_ {
+ let runs = &self.runs[level_run_index..];
+
+ // Check that it is in range
+ // (we can't use contains() since we want an inclusive range)
+ #[cfg(feature = "std")]
+ debug_assert!(runs[0].start <= pos && pos <= runs[0].end);
+
+ (pos..runs[0].end).chain(runs[1..].iter().flat_map(Clone::clone))
+ }
+
+ /// Given a text-relative position `pos` and an index of the level run it is in,
+ /// produce an iterator of all characters before and excludingpos (`..pos`) that are in this
+ /// run sequence
+ pub(crate) fn iter_backwards_from(
+ &self,
+ pos: usize,
+ level_run_index: usize,
+ ) -> impl Iterator<Item = usize> + '_ {
+ let prev_runs = &self.runs[..level_run_index];
+ let current = &self.runs[level_run_index];
+
+ // Check that it is in range
+ // (we can't use contains() since we want an inclusive range)
+ #[cfg(feature = "std")]
+ debug_assert!(current.start <= pos && pos <= current.end);
+
+ (current.start..pos)
+ .rev()
+ .chain(prev_runs.iter().rev().flat_map(Clone::clone))
+ }
+}
+
/// Finds the level runs in a paragraph.
///
/// <http://www.unicode.org/reports/tr9/#BD7>
diff --git a/vendor/unicode-ident/.cargo-checksum.json b/vendor/unicode-ident/.cargo-checksum.json
index 1fcb95c86..776275d9e 100644
--- a/vendor/unicode-ident/.cargo-checksum.json
+++ b/vendor/unicode-ident/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"4589e7f695ce2ae3c0dbb7a79647d044b8f2ef71183bf478fe01922966c54556","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","LICENSE-UNICODE":"68f5b9f5ea36881a0942ba02f558e9e1faf76cc09cb165ad801744c61b738844","README.md":"3dc1793fcaf87c77c5ed467c4a76cb696883f2f4329c011a869fbd34c4404382","benches/xid.rs":"a61f61ecc7d5124c759cdeb55ab74470ab69f2f3ca37613da65f16e0e5e33487","src/lib.rs":"d0030259a628125669ad6c02d3eb791526e6d6ae35d8a858a87f90245162666c","src/tables.rs":"4a84cc7a1a391abebe5672db993c519b9f8fe462690d7e5a8bdd43be8481c10b","tests/compare.rs":"89c4dc4f745064a9f734667b1d960596a10b8cb019a8ed1c5b9512678a866ad5","tests/fst/mod.rs":"69a3aaf59acd8bca962ecc6234be56be8c0934ab79b253162f10eb881523901f","tests/fst/xid_continue.fst":"0624500413ac318fee8424eecdad70397f911e3beae52231bfca295bb1bb9e04","tests/fst/xid_start.fst":"cc36f4f1149a4004ea7e2075cfb54756328b571946fda526be508cf5ed53dbdb","tests/roaring/mod.rs":"784f65a48477fab7549620c7843c7ad6da533f69a18abca1172f6acb95045e53","tests/static_size.rs":"6686edc08a6718cb4be03916b87a2594a2d2f2c779dbac6372fd27d5d7f7d8b6","tests/trie/mod.rs":"d4acbb716bcbaf80660039797f45e138ed8bbd66749fa3b19b1a971574679cc9","tests/trie/trie.rs":"dbd7de5fe601159643a4c6febed06793f812e8d71010b0ec78f2557353a976b2"},"package":"6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"} \ No newline at end of file
+{"files":{"Cargo.toml":"29d1c02ffc7c1f34067b04bcc6b38ec41918838d553176d805644d8c03ab3f62","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","LICENSE-UNICODE":"68f5b9f5ea36881a0942ba02f558e9e1faf76cc09cb165ad801744c61b738844","README.md":"4e3b8b33ce66d038b932b0824e9c5f459893c2004ce68065fb8b68c98692d298","benches/xid.rs":"a61f61ecc7d5124c759cdeb55ab74470ab69f2f3ca37613da65f16e0e5e33487","src/lib.rs":"d0030259a628125669ad6c02d3eb791526e6d6ae35d8a858a87f90245162666c","src/tables.rs":"b4609d6c2e2ba44fba8cdbcec271325ff196afba8001dee805be95424219f01b","tests/compare.rs":"89c4dc4f745064a9f734667b1d960596a10b8cb019a8ed1c5b9512678a866ad5","tests/fst/mod.rs":"69a3aaf59acd8bca962ecc6234be56be8c0934ab79b253162f10eb881523901f","tests/fst/xid_continue.fst":"0624500413ac318fee8424eecdad70397f911e3beae52231bfca295bb1bb9e04","tests/fst/xid_start.fst":"cc36f4f1149a4004ea7e2075cfb54756328b571946fda526be508cf5ed53dbdb","tests/roaring/mod.rs":"784f65a48477fab7549620c7843c7ad6da533f69a18abca1172f6acb95045e53","tests/static_size.rs":"f1275c2bc59e85b5a7c2ce500b50c00a479d314b925538083917dc001c41b187","tests/tables/mod.rs":"e6949172d10fc4b2431ce7546269bfd4f9146454c8c3e31faf5e5d80c16a8ab6","tests/tables/tables.rs":"5194ac98137a3b61322213f2f8e8b83ff925ffcdd79e93a2ec414ef944dc63a3","tests/trie/mod.rs":"d4acbb716bcbaf80660039797f45e138ed8bbd66749fa3b19b1a971574679cc9","tests/trie/trie.rs":"dbd7de5fe601159643a4c6febed06793f812e8d71010b0ec78f2557353a976b2"},"package":"84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"} \ No newline at end of file
diff --git a/vendor/unicode-ident/Cargo.toml b/vendor/unicode-ident/Cargo.toml
index 03e1871fc..735a4ff43 100644
--- a/vendor/unicode-ident/Cargo.toml
+++ b/vendor/unicode-ident/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "unicode-ident"
-version = "1.0.5"
+version = "1.0.6"
authors = ["David Tolnay <dtolnay@gmail.com>"]
description = "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31"
documentation = "https://docs.rs/unicode-ident"
diff --git a/vendor/unicode-ident/README.md b/vendor/unicode-ident/README.md
index 6c590b06c..dfb943bfe 100644
--- a/vendor/unicode-ident/README.md
+++ b/vendor/unicode-ident/README.md
@@ -4,7 +4,7 @@ Unicode ident
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/unicode--ident-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/unicode-ident)
[<img alt="crates.io" src="https://img.shields.io/crates/v/unicode-ident.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/unicode-ident)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-unicode--ident-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/unicode-ident)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/unicode-ident/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/unicode-ident/actions?query=branch%3Amaster)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/unicode-ident/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/unicode-ident/actions?query=branch%3Amaster)
Implementation of [Unicode Standard Annex #31][tr31] for determining which
`char` values are valid in programming language identifiers.
diff --git a/vendor/unicode-ident/src/tables.rs b/vendor/unicode-ident/src/tables.rs
index 380c798d1..b355f3096 100644
--- a/vendor/unicode-ident/src/tables.rs
+++ b/vendor/unicode-ident/src/tables.rs
@@ -1,6 +1,6 @@
// @generated by ../generate. To regenerate, run the following in the repo root:
//
-// $ curl -LO https://www.unicode.org/Public/zipped/14.0.0/UCD.zip
+// $ curl -LO https://www.unicode.org/Public/zipped/15.0.0/UCD.zip
// $ unzip UCD.zip -d UCD
// $ cargo run --manifest-path generate/Cargo.toml
diff --git a/vendor/unicode-ident/tests/static_size.rs b/vendor/unicode-ident/tests/static_size.rs
index df65f45dd..24effb489 100644
--- a/vendor/unicode-ident/tests/static_size.rs
+++ b/vendor/unicode-ident/tests/static_size.rs
@@ -19,14 +19,13 @@ fn test_size() {
#[test]
fn test_xid_size() {
#[deny(dead_code)]
- #[allow(clippy::redundant_static_lifetimes)]
- #[path = "../generate/src/ucd.rs"]
- mod ucd;
+ #[path = "tables/mod.rs"]
+ mod tables;
- let size = size_of_val(ucd::XID_START) + size_of_val(ucd::XID_CONTINUE);
+ let size = size_of_val(tables::XID_START) + size_of_val(tables::XID_CONTINUE);
assert_eq!(11528, size);
- let _ = ucd::BY_NAME;
+ let _ = tables::BY_NAME;
}
#[cfg(target_pointer_width = "64")]
diff --git a/vendor/unicode-ident/tests/tables/mod.rs b/vendor/unicode-ident/tests/tables/mod.rs
new file mode 100644
index 000000000..72bfd8bd7
--- /dev/null
+++ b/vendor/unicode-ident/tests/tables/mod.rs
@@ -0,0 +1,7 @@
+#![allow(clippy::module_inception)]
+
+#[allow(clippy::redundant_static_lifetimes)]
+#[rustfmt::skip]
+mod tables;
+
+pub(crate) use self::tables::*;
diff --git a/vendor/unicode-ident/tests/tables/tables.rs b/vendor/unicode-ident/tests/tables/tables.rs
new file mode 100644
index 000000000..30aeee973
--- /dev/null
+++ b/vendor/unicode-ident/tests/tables/tables.rs
@@ -0,0 +1,347 @@
+// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY:
+//
+// ucd-generate property-bool UCD --include XID_Start,XID_Continue
+//
+// Unicode version: 15.0.0.
+//
+// ucd-generate 0.2.13 is available on crates.io.
+
+pub const BY_NAME: &'static [(&'static str, &'static [(u32, u32)])] = &[
+ ("XID_Continue", XID_CONTINUE), ("XID_Start", XID_START),
+];
+
+pub const XID_CONTINUE: &'static [(u32, u32)] = &[
+ (48, 57), (65, 90), (95, 95), (97, 122), (170, 170), (181, 181), (183, 183),
+ (186, 186), (192, 214), (216, 246), (248, 705), (710, 721), (736, 740),
+ (748, 748), (750, 750), (768, 884), (886, 887), (891, 893), (895, 895),
+ (902, 906), (908, 908), (910, 929), (931, 1013), (1015, 1153), (1155, 1159),
+ (1162, 1327), (1329, 1366), (1369, 1369), (1376, 1416), (1425, 1469),
+ (1471, 1471), (1473, 1474), (1476, 1477), (1479, 1479), (1488, 1514),
+ (1519, 1522), (1552, 1562), (1568, 1641), (1646, 1747), (1749, 1756),
+ (1759, 1768), (1770, 1788), (1791, 1791), (1808, 1866), (1869, 1969),
+ (1984, 2037), (2042, 2042), (2045, 2045), (2048, 2093), (2112, 2139),
+ (2144, 2154), (2160, 2183), (2185, 2190), (2200, 2273), (2275, 2403),
+ (2406, 2415), (2417, 2435), (2437, 2444), (2447, 2448), (2451, 2472),
+ (2474, 2480), (2482, 2482), (2486, 2489), (2492, 2500), (2503, 2504),
+ (2507, 2510), (2519, 2519), (2524, 2525), (2527, 2531), (2534, 2545),
+ (2556, 2556), (2558, 2558), (2561, 2563), (2565, 2570), (2575, 2576),
+ (2579, 2600), (2602, 2608), (2610, 2611), (2613, 2614), (2616, 2617),
+ (2620, 2620), (2622, 2626), (2631, 2632), (2635, 2637), (2641, 2641),
+ (2649, 2652), (2654, 2654), (2662, 2677), (2689, 2691), (2693, 2701),
+ (2703, 2705), (2707, 2728), (2730, 2736), (2738, 2739), (2741, 2745),
+ (2748, 2757), (2759, 2761), (2763, 2765), (2768, 2768), (2784, 2787),
+ (2790, 2799), (2809, 2815), (2817, 2819), (2821, 2828), (2831, 2832),
+ (2835, 2856), (2858, 2864), (2866, 2867), (2869, 2873), (2876, 2884),
+ (2887, 2888), (2891, 2893), (2901, 2903), (2908, 2909), (2911, 2915),
+ (2918, 2927), (2929, 2929), (2946, 2947), (2949, 2954), (2958, 2960),
+ (2962, 2965), (2969, 2970), (2972, 2972), (2974, 2975), (2979, 2980),
+ (2984, 2986), (2990, 3001), (3006, 3010), (3014, 3016), (3018, 3021),
+ (3024, 3024), (3031, 3031), (3046, 3055), (3072, 3084), (3086, 3088),
+ (3090, 3112), (3114, 3129), (3132, 3140), (3142, 3144), (3146, 3149),
+ (3157, 3158), (3160, 3162), (3165, 3165), (3168, 3171), (3174, 3183),
+ (3200, 3203), (3205, 3212), (3214, 3216), (3218, 3240), (3242, 3251),
+ (3253, 3257), (3260, 3268), (3270, 3272), (3274, 3277), (3285, 3286),
+ (3293, 3294), (3296, 3299), (3302, 3311), (3313, 3315), (3328, 3340),
+ (3342, 3344), (3346, 3396), (3398, 3400), (3402, 3406), (3412, 3415),
+ (3423, 3427), (3430, 3439), (3450, 3455), (3457, 3459), (3461, 3478),
+ (3482, 3505), (3507, 3515), (3517, 3517), (3520, 3526), (3530, 3530),
+ (3535, 3540), (3542, 3542), (3544, 3551), (3558, 3567), (3570, 3571),
+ (3585, 3642), (3648, 3662), (3664, 3673), (3713, 3714), (3716, 3716),
+ (3718, 3722), (3724, 3747), (3749, 3749), (3751, 3773), (3776, 3780),
+ (3782, 3782), (3784, 3790), (3792, 3801), (3804, 3807), (3840, 3840),
+ (3864, 3865), (3872, 3881), (3893, 3893), (3895, 3895), (3897, 3897),
+ (3902, 3911), (3913, 3948), (3953, 3972), (3974, 3991), (3993, 4028),
+ (4038, 4038), (4096, 4169), (4176, 4253), (4256, 4293), (4295, 4295),
+ (4301, 4301), (4304, 4346), (4348, 4680), (4682, 4685), (4688, 4694),
+ (4696, 4696), (4698, 4701), (4704, 4744), (4746, 4749), (4752, 4784),
+ (4786, 4789), (4792, 4798), (4800, 4800), (4802, 4805), (4808, 4822),
+ (4824, 4880), (4882, 4885), (4888, 4954), (4957, 4959), (4969, 4977),
+ (4992, 5007), (5024, 5109), (5112, 5117), (5121, 5740), (5743, 5759),
+ (5761, 5786), (5792, 5866), (5870, 5880), (5888, 5909), (5919, 5940),
+ (5952, 5971), (5984, 5996), (5998, 6000), (6002, 6003), (6016, 6099),
+ (6103, 6103), (6108, 6109), (6112, 6121), (6155, 6157), (6159, 6169),
+ (6176, 6264), (6272, 6314), (6320, 6389), (6400, 6430), (6432, 6443),
+ (6448, 6459), (6470, 6509), (6512, 6516), (6528, 6571), (6576, 6601),
+ (6608, 6618), (6656, 6683), (6688, 6750), (6752, 6780), (6783, 6793),
+ (6800, 6809), (6823, 6823), (6832, 6845), (6847, 6862), (6912, 6988),
+ (6992, 7001), (7019, 7027), (7040, 7155), (7168, 7223), (7232, 7241),
+ (7245, 7293), (7296, 7304), (7312, 7354), (7357, 7359), (7376, 7378),
+ (7380, 7418), (7424, 7957), (7960, 7965), (7968, 8005), (8008, 8013),
+ (8016, 8023), (8025, 8025), (8027, 8027), (8029, 8029), (8031, 8061),
+ (8064, 8116), (8118, 8124), (8126, 8126), (8130, 8132), (8134, 8140),
+ (8144, 8147), (8150, 8155), (8160, 8172), (8178, 8180), (8182, 8188),
+ (8255, 8256), (8276, 8276), (8305, 8305), (8319, 8319), (8336, 8348),
+ (8400, 8412), (8417, 8417), (8421, 8432), (8450, 8450), (8455, 8455),
+ (8458, 8467), (8469, 8469), (8472, 8477), (8484, 8484), (8486, 8486),
+ (8488, 8488), (8490, 8505), (8508, 8511), (8517, 8521), (8526, 8526),
+ (8544, 8584), (11264, 11492), (11499, 11507), (11520, 11557),
+ (11559, 11559), (11565, 11565), (11568, 11623), (11631, 11631),
+ (11647, 11670), (11680, 11686), (11688, 11694), (11696, 11702),
+ (11704, 11710), (11712, 11718), (11720, 11726), (11728, 11734),
+ (11736, 11742), (11744, 11775), (12293, 12295), (12321, 12335),
+ (12337, 12341), (12344, 12348), (12353, 12438), (12441, 12442),
+ (12445, 12447), (12449, 12538), (12540, 12543), (12549, 12591),
+ (12593, 12686), (12704, 12735), (12784, 12799), (13312, 19903),
+ (19968, 42124), (42192, 42237), (42240, 42508), (42512, 42539),
+ (42560, 42607), (42612, 42621), (42623, 42737), (42775, 42783),
+ (42786, 42888), (42891, 42954), (42960, 42961), (42963, 42963),
+ (42965, 42969), (42994, 43047), (43052, 43052), (43072, 43123),
+ (43136, 43205), (43216, 43225), (43232, 43255), (43259, 43259),
+ (43261, 43309), (43312, 43347), (43360, 43388), (43392, 43456),
+ (43471, 43481), (43488, 43518), (43520, 43574), (43584, 43597),
+ (43600, 43609), (43616, 43638), (43642, 43714), (43739, 43741),
+ (43744, 43759), (43762, 43766), (43777, 43782), (43785, 43790),
+ (43793, 43798), (43808, 43814), (43816, 43822), (43824, 43866),
+ (43868, 43881), (43888, 44010), (44012, 44013), (44016, 44025),
+ (44032, 55203), (55216, 55238), (55243, 55291), (63744, 64109),
+ (64112, 64217), (64256, 64262), (64275, 64279), (64285, 64296),
+ (64298, 64310), (64312, 64316), (64318, 64318), (64320, 64321),
+ (64323, 64324), (64326, 64433), (64467, 64605), (64612, 64829),
+ (64848, 64911), (64914, 64967), (65008, 65017), (65024, 65039),
+ (65056, 65071), (65075, 65076), (65101, 65103), (65137, 65137),
+ (65139, 65139), (65143, 65143), (65145, 65145), (65147, 65147),
+ (65149, 65149), (65151, 65276), (65296, 65305), (65313, 65338),
+ (65343, 65343), (65345, 65370), (65382, 65470), (65474, 65479),
+ (65482, 65487), (65490, 65495), (65498, 65500), (65536, 65547),
+ (65549, 65574), (65576, 65594), (65596, 65597), (65599, 65613),
+ (65616, 65629), (65664, 65786), (65856, 65908), (66045, 66045),
+ (66176, 66204), (66208, 66256), (66272, 66272), (66304, 66335),
+ (66349, 66378), (66384, 66426), (66432, 66461), (66464, 66499),
+ (66504, 66511), (66513, 66517), (66560, 66717), (66720, 66729),
+ (66736, 66771), (66776, 66811), (66816, 66855), (66864, 66915),
+ (66928, 66938), (66940, 66954), (66956, 66962), (66964, 66965),
+ (66967, 66977), (66979, 66993), (66995, 67001), (67003, 67004),
+ (67072, 67382), (67392, 67413), (67424, 67431), (67456, 67461),
+ (67463, 67504), (67506, 67514), (67584, 67589), (67592, 67592),
+ (67594, 67637), (67639, 67640), (67644, 67644), (67647, 67669),
+ (67680, 67702), (67712, 67742), (67808, 67826), (67828, 67829),
+ (67840, 67861), (67872, 67897), (67968, 68023), (68030, 68031),
+ (68096, 68099), (68101, 68102), (68108, 68115), (68117, 68119),
+ (68121, 68149), (68152, 68154), (68159, 68159), (68192, 68220),
+ (68224, 68252), (68288, 68295), (68297, 68326), (68352, 68405),
+ (68416, 68437), (68448, 68466), (68480, 68497), (68608, 68680),
+ (68736, 68786), (68800, 68850), (68864, 68903), (68912, 68921),
+ (69248, 69289), (69291, 69292), (69296, 69297), (69373, 69404),
+ (69415, 69415), (69424, 69456), (69488, 69509), (69552, 69572),
+ (69600, 69622), (69632, 69702), (69734, 69749), (69759, 69818),
+ (69826, 69826), (69840, 69864), (69872, 69881), (69888, 69940),
+ (69942, 69951), (69956, 69959), (69968, 70003), (70006, 70006),
+ (70016, 70084), (70089, 70092), (70094, 70106), (70108, 70108),
+ (70144, 70161), (70163, 70199), (70206, 70209), (70272, 70278),
+ (70280, 70280), (70282, 70285), (70287, 70301), (70303, 70312),
+ (70320, 70378), (70384, 70393), (70400, 70403), (70405, 70412),
+ (70415, 70416), (70419, 70440), (70442, 70448), (70450, 70451),
+ (70453, 70457), (70459, 70468), (70471, 70472), (70475, 70477),
+ (70480, 70480), (70487, 70487), (70493, 70499), (70502, 70508),
+ (70512, 70516), (70656, 70730), (70736, 70745), (70750, 70753),
+ (70784, 70853), (70855, 70855), (70864, 70873), (71040, 71093),
+ (71096, 71104), (71128, 71133), (71168, 71232), (71236, 71236),
+ (71248, 71257), (71296, 71352), (71360, 71369), (71424, 71450),
+ (71453, 71467), (71472, 71481), (71488, 71494), (71680, 71738),
+ (71840, 71913), (71935, 71942), (71945, 71945), (71948, 71955),
+ (71957, 71958), (71960, 71989), (71991, 71992), (71995, 72003),
+ (72016, 72025), (72096, 72103), (72106, 72151), (72154, 72161),
+ (72163, 72164), (72192, 72254), (72263, 72263), (72272, 72345),
+ (72349, 72349), (72368, 72440), (72704, 72712), (72714, 72758),
+ (72760, 72768), (72784, 72793), (72818, 72847), (72850, 72871),
+ (72873, 72886), (72960, 72966), (72968, 72969), (72971, 73014),
+ (73018, 73018), (73020, 73021), (73023, 73031), (73040, 73049),
+ (73056, 73061), (73063, 73064), (73066, 73102), (73104, 73105),
+ (73107, 73112), (73120, 73129), (73440, 73462), (73472, 73488),
+ (73490, 73530), (73534, 73538), (73552, 73561), (73648, 73648),
+ (73728, 74649), (74752, 74862), (74880, 75075), (77712, 77808),
+ (77824, 78895), (78912, 78933), (82944, 83526), (92160, 92728),
+ (92736, 92766), (92768, 92777), (92784, 92862), (92864, 92873),
+ (92880, 92909), (92912, 92916), (92928, 92982), (92992, 92995),
+ (93008, 93017), (93027, 93047), (93053, 93071), (93760, 93823),
+ (93952, 94026), (94031, 94087), (94095, 94111), (94176, 94177),
+ (94179, 94180), (94192, 94193), (94208, 100343), (100352, 101589),
+ (101632, 101640), (110576, 110579), (110581, 110587), (110589, 110590),
+ (110592, 110882), (110898, 110898), (110928, 110930), (110933, 110933),
+ (110948, 110951), (110960, 111355), (113664, 113770), (113776, 113788),
+ (113792, 113800), (113808, 113817), (113821, 113822), (118528, 118573),
+ (118576, 118598), (119141, 119145), (119149, 119154), (119163, 119170),
+ (119173, 119179), (119210, 119213), (119362, 119364), (119808, 119892),
+ (119894, 119964), (119966, 119967), (119970, 119970), (119973, 119974),
+ (119977, 119980), (119982, 119993), (119995, 119995), (119997, 120003),
+ (120005, 120069), (120071, 120074), (120077, 120084), (120086, 120092),
+ (120094, 120121), (120123, 120126), (120128, 120132), (120134, 120134),
+ (120138, 120144), (120146, 120485), (120488, 120512), (120514, 120538),
+ (120540, 120570), (120572, 120596), (120598, 120628), (120630, 120654),
+ (120656, 120686), (120688, 120712), (120714, 120744), (120746, 120770),
+ (120772, 120779), (120782, 120831), (121344, 121398), (121403, 121452),
+ (121461, 121461), (121476, 121476), (121499, 121503), (121505, 121519),
+ (122624, 122654), (122661, 122666), (122880, 122886), (122888, 122904),
+ (122907, 122913), (122915, 122916), (122918, 122922), (122928, 122989),
+ (123023, 123023), (123136, 123180), (123184, 123197), (123200, 123209),
+ (123214, 123214), (123536, 123566), (123584, 123641), (124112, 124153),
+ (124896, 124902), (124904, 124907), (124909, 124910), (124912, 124926),
+ (124928, 125124), (125136, 125142), (125184, 125259), (125264, 125273),
+ (126464, 126467), (126469, 126495), (126497, 126498), (126500, 126500),
+ (126503, 126503), (126505, 126514), (126516, 126519), (126521, 126521),
+ (126523, 126523), (126530, 126530), (126535, 126535), (126537, 126537),
+ (126539, 126539), (126541, 126543), (126545, 126546), (126548, 126548),
+ (126551, 126551), (126553, 126553), (126555, 126555), (126557, 126557),
+ (126559, 126559), (126561, 126562), (126564, 126564), (126567, 126570),
+ (126572, 126578), (126580, 126583), (126585, 126588), (126590, 126590),
+ (126592, 126601), (126603, 126619), (126625, 126627), (126629, 126633),
+ (126635, 126651), (130032, 130041), (131072, 173791), (173824, 177977),
+ (177984, 178205), (178208, 183969), (183984, 191456), (194560, 195101),
+ (196608, 201546), (201552, 205743), (917760, 917999),
+];
+
+pub const XID_START: &'static [(u32, u32)] = &[
+ (65, 90), (97, 122), (170, 170), (181, 181), (186, 186), (192, 214),
+ (216, 246), (248, 705), (710, 721), (736, 740), (748, 748), (750, 750),
+ (880, 884), (886, 887), (891, 893), (895, 895), (902, 902), (904, 906),
+ (908, 908), (910, 929), (931, 1013), (1015, 1153), (1162, 1327),
+ (1329, 1366), (1369, 1369), (1376, 1416), (1488, 1514), (1519, 1522),
+ (1568, 1610), (1646, 1647), (1649, 1747), (1749, 1749), (1765, 1766),
+ (1774, 1775), (1786, 1788), (1791, 1791), (1808, 1808), (1810, 1839),
+ (1869, 1957), (1969, 1969), (1994, 2026), (2036, 2037), (2042, 2042),
+ (2048, 2069), (2074, 2074), (2084, 2084), (2088, 2088), (2112, 2136),
+ (2144, 2154), (2160, 2183), (2185, 2190), (2208, 2249), (2308, 2361),
+ (2365, 2365), (2384, 2384), (2392, 2401), (2417, 2432), (2437, 2444),
+ (2447, 2448), (2451, 2472), (2474, 2480), (2482, 2482), (2486, 2489),
+ (2493, 2493), (2510, 2510), (2524, 2525), (2527, 2529), (2544, 2545),
+ (2556, 2556), (2565, 2570), (2575, 2576), (2579, 2600), (2602, 2608),
+ (2610, 2611), (2613, 2614), (2616, 2617), (2649, 2652), (2654, 2654),
+ (2674, 2676), (2693, 2701), (2703, 2705), (2707, 2728), (2730, 2736),
+ (2738, 2739), (2741, 2745), (2749, 2749), (2768, 2768), (2784, 2785),
+ (2809, 2809), (2821, 2828), (2831, 2832), (2835, 2856), (2858, 2864),
+ (2866, 2867), (2869, 2873), (2877, 2877), (2908, 2909), (2911, 2913),
+ (2929, 2929), (2947, 2947), (2949, 2954), (2958, 2960), (2962, 2965),
+ (2969, 2970), (2972, 2972), (2974, 2975), (2979, 2980), (2984, 2986),
+ (2990, 3001), (3024, 3024), (3077, 3084), (3086, 3088), (3090, 3112),
+ (3114, 3129), (3133, 3133), (3160, 3162), (3165, 3165), (3168, 3169),
+ (3200, 3200), (3205, 3212), (3214, 3216), (3218, 3240), (3242, 3251),
+ (3253, 3257), (3261, 3261), (3293, 3294), (3296, 3297), (3313, 3314),
+ (3332, 3340), (3342, 3344), (3346, 3386), (3389, 3389), (3406, 3406),
+ (3412, 3414), (3423, 3425), (3450, 3455), (3461, 3478), (3482, 3505),
+ (3507, 3515), (3517, 3517), (3520, 3526), (3585, 3632), (3634, 3634),
+ (3648, 3654), (3713, 3714), (3716, 3716), (3718, 3722), (3724, 3747),
+ (3749, 3749), (3751, 3760), (3762, 3762), (3773, 3773), (3776, 3780),
+ (3782, 3782), (3804, 3807), (3840, 3840), (3904, 3911), (3913, 3948),
+ (3976, 3980), (4096, 4138), (4159, 4159), (4176, 4181), (4186, 4189),
+ (4193, 4193), (4197, 4198), (4206, 4208), (4213, 4225), (4238, 4238),
+ (4256, 4293), (4295, 4295), (4301, 4301), (4304, 4346), (4348, 4680),
+ (4682, 4685), (4688, 4694), (4696, 4696), (4698, 4701), (4704, 4744),
+ (4746, 4749), (4752, 4784), (4786, 4789), (4792, 4798), (4800, 4800),
+ (4802, 4805), (4808, 4822), (4824, 4880), (4882, 4885), (4888, 4954),
+ (4992, 5007), (5024, 5109), (5112, 5117), (5121, 5740), (5743, 5759),
+ (5761, 5786), (5792, 5866), (5870, 5880), (5888, 5905), (5919, 5937),
+ (5952, 5969), (5984, 5996), (5998, 6000), (6016, 6067), (6103, 6103),
+ (6108, 6108), (6176, 6264), (6272, 6312), (6314, 6314), (6320, 6389),
+ (6400, 6430), (6480, 6509), (6512, 6516), (6528, 6571), (6576, 6601),
+ (6656, 6678), (6688, 6740), (6823, 6823), (6917, 6963), (6981, 6988),
+ (7043, 7072), (7086, 7087), (7098, 7141), (7168, 7203), (7245, 7247),
+ (7258, 7293), (7296, 7304), (7312, 7354), (7357, 7359), (7401, 7404),
+ (7406, 7411), (7413, 7414), (7418, 7418), (7424, 7615), (7680, 7957),
+ (7960, 7965), (7968, 8005), (8008, 8013), (8016, 8023), (8025, 8025),
+ (8027, 8027), (8029, 8029), (8031, 8061), (8064, 8116), (8118, 8124),
+ (8126, 8126), (8130, 8132), (8134, 8140), (8144, 8147), (8150, 8155),
+ (8160, 8172), (8178, 8180), (8182, 8188), (8305, 8305), (8319, 8319),
+ (8336, 8348), (8450, 8450), (8455, 8455), (8458, 8467), (8469, 8469),
+ (8472, 8477), (8484, 8484), (8486, 8486), (8488, 8488), (8490, 8505),
+ (8508, 8511), (8517, 8521), (8526, 8526), (8544, 8584), (11264, 11492),
+ (11499, 11502), (11506, 11507), (11520, 11557), (11559, 11559),
+ (11565, 11565), (11568, 11623), (11631, 11631), (11648, 11670),
+ (11680, 11686), (11688, 11694), (11696, 11702), (11704, 11710),
+ (11712, 11718), (11720, 11726), (11728, 11734), (11736, 11742),
+ (12293, 12295), (12321, 12329), (12337, 12341), (12344, 12348),
+ (12353, 12438), (12445, 12447), (12449, 12538), (12540, 12543),
+ (12549, 12591), (12593, 12686), (12704, 12735), (12784, 12799),
+ (13312, 19903), (19968, 42124), (42192, 42237), (42240, 42508),
+ (42512, 42527), (42538, 42539), (42560, 42606), (42623, 42653),
+ (42656, 42735), (42775, 42783), (42786, 42888), (42891, 42954),
+ (42960, 42961), (42963, 42963), (42965, 42969), (42994, 43009),
+ (43011, 43013), (43015, 43018), (43020, 43042), (43072, 43123),
+ (43138, 43187), (43250, 43255), (43259, 43259), (43261, 43262),
+ (43274, 43301), (43312, 43334), (43360, 43388), (43396, 43442),
+ (43471, 43471), (43488, 43492), (43494, 43503), (43514, 43518),
+ (43520, 43560), (43584, 43586), (43588, 43595), (43616, 43638),
+ (43642, 43642), (43646, 43695), (43697, 43697), (43701, 43702),
+ (43705, 43709), (43712, 43712), (43714, 43714), (43739, 43741),
+ (43744, 43754), (43762, 43764), (43777, 43782), (43785, 43790),
+ (43793, 43798), (43808, 43814), (43816, 43822), (43824, 43866),
+ (43868, 43881), (43888, 44002), (44032, 55203), (55216, 55238),
+ (55243, 55291), (63744, 64109), (64112, 64217), (64256, 64262),
+ (64275, 64279), (64285, 64285), (64287, 64296), (64298, 64310),
+ (64312, 64316), (64318, 64318), (64320, 64321), (64323, 64324),
+ (64326, 64433), (64467, 64605), (64612, 64829), (64848, 64911),
+ (64914, 64967), (65008, 65017), (65137, 65137), (65139, 65139),
+ (65143, 65143), (65145, 65145), (65147, 65147), (65149, 65149),
+ (65151, 65276), (65313, 65338), (65345, 65370), (65382, 65437),
+ (65440, 65470), (65474, 65479), (65482, 65487), (65490, 65495),
+ (65498, 65500), (65536, 65547), (65549, 65574), (65576, 65594),
+ (65596, 65597), (65599, 65613), (65616, 65629), (65664, 65786),
+ (65856, 65908), (66176, 66204), (66208, 66256), (66304, 66335),
+ (66349, 66378), (66384, 66421), (66432, 66461), (66464, 66499),
+ (66504, 66511), (66513, 66517), (66560, 66717), (66736, 66771),
+ (66776, 66811), (66816, 66855), (66864, 66915), (66928, 66938),
+ (66940, 66954), (66956, 66962), (66964, 66965), (66967, 66977),
+ (66979, 66993), (66995, 67001), (67003, 67004), (67072, 67382),
+ (67392, 67413), (67424, 67431), (67456, 67461), (67463, 67504),
+ (67506, 67514), (67584, 67589), (67592, 67592), (67594, 67637),
+ (67639, 67640), (67644, 67644), (67647, 67669), (67680, 67702),
+ (67712, 67742), (67808, 67826), (67828, 67829), (67840, 67861),
+ (67872, 67897), (67968, 68023), (68030, 68031), (68096, 68096),
+ (68112, 68115), (68117, 68119), (68121, 68149), (68192, 68220),
+ (68224, 68252), (68288, 68295), (68297, 68324), (68352, 68405),
+ (68416, 68437), (68448, 68466), (68480, 68497), (68608, 68680),
+ (68736, 68786), (68800, 68850), (68864, 68899), (69248, 69289),
+ (69296, 69297), (69376, 69404), (69415, 69415), (69424, 69445),
+ (69488, 69505), (69552, 69572), (69600, 69622), (69635, 69687),
+ (69745, 69746), (69749, 69749), (69763, 69807), (69840, 69864),
+ (69891, 69926), (69956, 69956), (69959, 69959), (69968, 70002),
+ (70006, 70006), (70019, 70066), (70081, 70084), (70106, 70106),
+ (70108, 70108), (70144, 70161), (70163, 70187), (70207, 70208),
+ (70272, 70278), (70280, 70280), (70282, 70285), (70287, 70301),
+ (70303, 70312), (70320, 70366), (70405, 70412), (70415, 70416),
+ (70419, 70440), (70442, 70448), (70450, 70451), (70453, 70457),
+ (70461, 70461), (70480, 70480), (70493, 70497), (70656, 70708),
+ (70727, 70730), (70751, 70753), (70784, 70831), (70852, 70853),
+ (70855, 70855), (71040, 71086), (71128, 71131), (71168, 71215),
+ (71236, 71236), (71296, 71338), (71352, 71352), (71424, 71450),
+ (71488, 71494), (71680, 71723), (71840, 71903), (71935, 71942),
+ (71945, 71945), (71948, 71955), (71957, 71958), (71960, 71983),
+ (71999, 71999), (72001, 72001), (72096, 72103), (72106, 72144),
+ (72161, 72161), (72163, 72163), (72192, 72192), (72203, 72242),
+ (72250, 72250), (72272, 72272), (72284, 72329), (72349, 72349),
+ (72368, 72440), (72704, 72712), (72714, 72750), (72768, 72768),
+ (72818, 72847), (72960, 72966), (72968, 72969), (72971, 73008),
+ (73030, 73030), (73056, 73061), (73063, 73064), (73066, 73097),
+ (73112, 73112), (73440, 73458), (73474, 73474), (73476, 73488),
+ (73490, 73523), (73648, 73648), (73728, 74649), (74752, 74862),
+ (74880, 75075), (77712, 77808), (77824, 78895), (78913, 78918),
+ (82944, 83526), (92160, 92728), (92736, 92766), (92784, 92862),
+ (92880, 92909), (92928, 92975), (92992, 92995), (93027, 93047),
+ (93053, 93071), (93760, 93823), (93952, 94026), (94032, 94032),
+ (94099, 94111), (94176, 94177), (94179, 94179), (94208, 100343),
+ (100352, 101589), (101632, 101640), (110576, 110579), (110581, 110587),
+ (110589, 110590), (110592, 110882), (110898, 110898), (110928, 110930),
+ (110933, 110933), (110948, 110951), (110960, 111355), (113664, 113770),
+ (113776, 113788), (113792, 113800), (113808, 113817), (119808, 119892),
+ (119894, 119964), (119966, 119967), (119970, 119970), (119973, 119974),
+ (119977, 119980), (119982, 119993), (119995, 119995), (119997, 120003),
+ (120005, 120069), (120071, 120074), (120077, 120084), (120086, 120092),
+ (120094, 120121), (120123, 120126), (120128, 120132), (120134, 120134),
+ (120138, 120144), (120146, 120485), (120488, 120512), (120514, 120538),
+ (120540, 120570), (120572, 120596), (120598, 120628), (120630, 120654),
+ (120656, 120686), (120688, 120712), (120714, 120744), (120746, 120770),
+ (120772, 120779), (122624, 122654), (122661, 122666), (122928, 122989),
+ (123136, 123180), (123191, 123197), (123214, 123214), (123536, 123565),
+ (123584, 123627), (124112, 124139), (124896, 124902), (124904, 124907),
+ (124909, 124910), (124912, 124926), (124928, 125124), (125184, 125251),
+ (125259, 125259), (126464, 126467), (126469, 126495), (126497, 126498),
+ (126500, 126500), (126503, 126503), (126505, 126514), (126516, 126519),
+ (126521, 126521), (126523, 126523), (126530, 126530), (126535, 126535),
+ (126537, 126537), (126539, 126539), (126541, 126543), (126545, 126546),
+ (126548, 126548), (126551, 126551), (126553, 126553), (126555, 126555),
+ (126557, 126557), (126559, 126559), (126561, 126562), (126564, 126564),
+ (126567, 126570), (126572, 126578), (126580, 126583), (126585, 126588),
+ (126590, 126590), (126592, 126601), (126603, 126619), (126625, 126627),
+ (126629, 126633), (126635, 126651), (131072, 173791), (173824, 177977),
+ (177984, 178205), (178208, 183969), (183984, 191456), (194560, 195101),
+ (196608, 201546), (201552, 205743),
+];
diff --git a/vendor/writeable/.cargo-checksum.json b/vendor/writeable/.cargo-checksum.json
index fe32e7569..35e2eb919 100644
--- a/vendor/writeable/.cargo-checksum.json
+++ b/vendor/writeable/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"efd045d3270108f64d75d49125e15940cf26b54c2944a13354698223d0c4ce17","Cargo.toml":"32b4d9ebb1a5c179aafb27b212e94203a1a2158b7c0387c712f25de31188b7a6","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"a47b1fa1adda09e2ca28f7f49878c61d8f2057b8f3c42af7d39d646fedbd9934","benches/writeable.rs":"edc81c5524f98e77f0b3a45545606c4b526f63791e6dd8f90c88679c758c4834","examples/writeable_message.rs":"58bf4007f54f9f80428af7b687531837a294aecde533395a0a4c4cf55c9cad7d","src/impls.rs":"c13310eaf5ecb2f4bb87896ea0ee4f060b7828e7ea07b333e69a397a03ef39ae","src/lib.rs":"056631819550bd2845084f97fa1ee6ae130927c7021af86c8cf83337ae638f77","src/ops.rs":"4e49b1e8a8da46c3bcfecbdcb91f6e2d87afab47b76fcc2c4f005bf3cef675f1","tests/writeable.rs":"6dc3db45174180bcbf8980e640525b441c31b0b9db238721888d8cc0bd998ded"},"package":"f8e6ab4f5da1b24daf2c590cfac801bacb27b15b4f050e84eb60149ea726f06b"} \ No newline at end of file
+{"files":{"Cargo.lock":"4d2a250761f97e955d5c1839cb828fe364db73a6f8e25b4cf3a3e3bc00570cec","Cargo.toml":"e52bc721b0357827fb17713b5eb64791cb152eac5d8abdff35bb1b7a825f524d","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"a47b1fa1adda09e2ca28f7f49878c61d8f2057b8f3c42af7d39d646fedbd9934","benches/writeable.rs":"edc81c5524f98e77f0b3a45545606c4b526f63791e6dd8f90c88679c758c4834","examples/writeable_message.rs":"58bf4007f54f9f80428af7b687531837a294aecde533395a0a4c4cf55c9cad7d","src/impls.rs":"c8fcad8d4374b28441442fa62c8a1e4c31739fbb3f31ed74a176e7ddd0e08cd0","src/lib.rs":"75edc4baf7f69a20c7d6af54f318f904e798b3d6c58eeb624ddd198114aef09a","src/ops.rs":"f201b36dc6a74d4137cca876b99a871555fe7bed7c784917ede49758a1717359","tests/writeable.rs":"6dc3db45174180bcbf8980e640525b441c31b0b9db238721888d8cc0bd998ded"},"package":"92d74a687e3b9a7a129db0a8c82b4d464eb9c36f5a66ca68572a7e5f1cfdb5bc"} \ No newline at end of file
diff --git a/vendor/writeable/Cargo.lock b/vendor/writeable/Cargo.lock
index c4110d507..e6b901786 100644
--- a/vendor/writeable/Cargo.lock
+++ b/vendor/writeable/Cargo.lock
@@ -8,7 +8,7 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
"winapi",
]
@@ -39,9 +39,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.11.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "cast"
@@ -125,26 +125,24 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.10"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
- "once_cell",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.11"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "once_cell",
]
[[package]]
@@ -177,9 +175,9 @@ checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
[[package]]
name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -202,10 +200,13 @@ dependencies = [
]
[[package]]
-name = "icu_benchmark_macros"
-version = "0.7.0"
+name = "hermit-abi"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c867656f2d9c90b13709ac88e710a9d6afe33998c1dfa22384bab8804e8b3d4"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
[[package]]
name = "itertools"
@@ -224,9 +225,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "js-sys"
@@ -245,9 +246,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.133"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "log"
@@ -266,9 +267,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
-version = "0.6.5"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
@@ -284,19 +285,19 @@ dependencies = [
[[package]]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.2.6",
"libc",
]
[[package]]
name = "once_cell"
-version = "1.15.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
[[package]]
name = "oorandom"
@@ -334,24 +335,24 @@ dependencies = [
[[package]]
name = "ppv-lite86"
-version = "0.2.16"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
@@ -388,21 +389,19 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.5.3"
+version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
- "autocfg",
- "crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
-version = "1.9.3"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -412,9 +411,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [
"regex-syntax",
]
@@ -427,15 +426,15 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "same-file"
@@ -454,9 +453,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
[[package]]
name = "serde_cbor"
@@ -470,9 +469,9 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -481,20 +480,20 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
- "itoa 1.0.3",
+ "itoa 1.0.5",
"ryu",
"serde",
]
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -522,9 +521,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-width"
@@ -646,9 +645,8 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "writeable"
-version = "0.5.0"
+version = "0.5.1"
dependencies = [
"criterion",
- "icu_benchmark_macros",
"rand",
]
diff --git a/vendor/writeable/Cargo.toml b/vendor/writeable/Cargo.toml
index 1c9d41303..0ec582e48 100644
--- a/vendor/writeable/Cargo.toml
+++ b/vendor/writeable/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "writeable"
-version = "0.5.0"
+version = "0.5.1"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -29,9 +29,15 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
+
[lib]
path = "src/lib.rs"
bench = false
@@ -43,13 +49,9 @@ harness = false
[dev-dependencies.criterion]
version = "0.3"
-[dev-dependencies.icu_benchmark_macros]
-version = "0.7"
-
[dev-dependencies.rand]
version = "0.8"
features = ["small_rng"]
[features]
bench = []
-default = []
diff --git a/vendor/writeable/src/impls.rs b/vendor/writeable/src/impls.rs
index 649ede4d5..a2b5201e0 100644
--- a/vendor/writeable/src/impls.rs
+++ b/vendor/writeable/src/impls.rs
@@ -7,12 +7,12 @@ use alloc::borrow::Cow;
use core::fmt;
macro_rules! impl_write_num {
- ($u:ty, $i:ty, $test:ident, $log10:ident) => {
+ ($u:ty, $i:ty, $test:ident, $max_ilog_10:expr) => {
impl $crate::Writeable for $u {
fn write_to<W: core::fmt::Write + ?Sized>(&self, sink: &mut W) -> core::fmt::Result {
- let mut buf = [b'0'; $log10(<$u>::MAX) as usize + 1];
+ let mut buf = [b'0'; $max_ilog_10 + 1];
let mut n = *self;
- let mut i = buf.len();
+ let mut i = $max_ilog_10 + 1;
#[allow(clippy::indexing_slicing)] // n < 10^i
while n != 0 {
i -= 1;
@@ -29,40 +29,38 @@ macro_rules! impl_write_num {
}
fn writeable_length_hint(&self) -> $crate::LengthHint {
- $crate::LengthHint::exact(if *self == 0 {
- 1
- } else {
- $log10(*self) as usize + 1
- })
+ LengthHint::exact(self.checked_ilog10().unwrap_or(0) as usize + 1)
}
}
- // TODO: use the library functions once stabilized.
- // https://github.com/unicode-org/icu4x/issues/1428
- #[inline]
- const fn $log10(s: $u) -> u32 {
- let b = (<$u>::BITS - 1) - s.leading_zeros();
- // s ∈ [2ᵇ, 2ᵇ⁺¹-1] => ⌊log₁₀(s)⌋ ∈ [⌊log₁₀(2ᵇ)⌋, ⌊log₁₀(2ᵇ⁺¹-1)⌋]
- // <=> ⌊log₁₀(s)⌋ ∈ [⌊log₁₀(2ᵇ)⌋, ⌊log₁₀(2ᵇ⁺¹)⌋]
- // <=> ⌊log₁₀(s)⌋ ∈ [⌊b log₁₀(2)⌋, ⌊(b+1) log₁₀(2)⌋]
- // The second line holds because there is no integer in
- // [log₁₀(2ᶜ-1), log₁₀(2ᶜ)], if there were, there'd be some 10ⁿ in
- // [2ᶜ-1, 2ᶜ], but it can't be 2ᶜ-1 due to parity nor 2ᶜ due to prime
- // factors.
-
- const M: u32 = (core::f64::consts::LOG10_2 * (1 << 26) as f64) as u32;
- let low = (b * M) >> 26;
- let high = ((b + 1) * M) >> 26;
-
- // If the bounds aren't tight (e.g. 87 ∈ [64, 127] ⟹ ⌊log₁₀(87)⌋ ∈ [1,2]),
- // compare to 10ʰ (100). This shouldn't happen too often as there are more
- // powers of 2 than 10 (it happens for 14% of u32s).
- if high == low {
- low
- } else if s < (10 as $u).pow(high) {
- low
- } else {
- high
+ impl ILog10Ext for $u {
+ fn checked_ilog10(self) -> Option<u32> {
+ if self == 0 {
+ return None;
+ }
+ let b = (<$u>::BITS - 1) - self.leading_zeros();
+ // self ∈ [2ᵇ, 2ᵇ⁺¹-1] => ⌊log₁₀(self)⌋ ∈ [⌊log₁₀(2ᵇ)⌋, ⌊log₁₀(2ᵇ⁺¹-1)⌋]
+ // <=> ⌊log₁₀(self)⌋ ∈ [⌊log₁₀(2ᵇ)⌋, ⌊log₁₀(2ᵇ⁺¹)⌋]
+ // <=> ⌊log₁₀(self)⌋ ∈ [⌊b log₁₀(2)⌋, ⌊(b+1) log₁₀(2)⌋]
+ // The second line holds because there is no integer in
+ // [log₁₀(2ᶜ-1), log₁₀(2ᶜ)], if there were, there'd be some 10ⁿ in
+ // [2ᶜ-1, 2ᶜ], but it can't be 2ᶜ-1 due to parity nor 2ᶜ due to prime
+ // factors.
+
+ const M: u32 = (core::f64::consts::LOG10_2 * (1 << 26) as f64) as u32;
+ let low = (b * M) >> 26;
+ let high = ((b + 1) * M) >> 26;
+
+ // If the bounds aren't tight (e.g. 87 ∈ [64, 127] ⟹ ⌊log₁₀(87)⌋ ∈ [1,2]),
+ // compare to 10ʰ (100). This shouldn't happen too often as there are more
+ // powers of 2 than 10 (it happens for 14% of u32s).
+ Some(if high == low {
+ low
+ } else if self < (10 as $u).pow(high) {
+ low
+ } else {
+ high
+ })
}
}
@@ -84,11 +82,14 @@ macro_rules! impl_write_num {
fn $test() {
use $crate::assert_writeable_eq;
assert_writeable_eq!(&(0 as $u), "0");
- assert_writeable_eq!(&(0 as $u), "0");
+ assert_writeable_eq!(&(0 as $i), "0");
assert_writeable_eq!(&(-0 as $i), "0");
assert_writeable_eq!(&(1 as $u), "1");
assert_writeable_eq!(&(1 as $i), "1");
assert_writeable_eq!(&(-1 as $i), "-1");
+ assert_writeable_eq!(&(9 as $u), "9");
+ assert_writeable_eq!(&(9 as $i), "9");
+ assert_writeable_eq!(&(-9 as $i), "-9");
assert_writeable_eq!(&(10 as $u), "10");
assert_writeable_eq!(&(10 as $i), "10");
assert_writeable_eq!(&(-10 as $i), "-10");
@@ -112,19 +113,24 @@ macro_rules! impl_write_num {
};
}
-impl_write_num!(u8, i8, test_u8, log10_u8);
-impl_write_num!(u16, i16, test_u16, log10_u16);
-impl_write_num!(u32, i32, test_u32, log10_u32);
-impl_write_num!(u64, i64, test_u64, log10_u64);
-impl_write_num!(u128, i128, test_u128, log10_u128);
-
-#[test]
-fn assert_log10_approximation() {
- for i in 1..u128::BITS {
- assert_eq!(i * 59 / 196, 2f64.powf(i.into()).log10().floor() as u32);
- }
+/// `checked_ilog10` is added as a method on integer types in 1.67.
+/// This extension trait provides it for older compilers.
+trait ILog10Ext: Sized {
+ fn checked_ilog10(self) -> Option<u32>;
}
+impl_write_num!(u8, i8, test_u8, 2);
+impl_write_num!(u16, i16, test_u16, 4);
+impl_write_num!(u32, i32, test_u32, 9);
+impl_write_num!(u64, i64, test_u64, 19);
+impl_write_num!(u128, i128, test_u128, 38);
+impl_write_num!(
+ usize,
+ isize,
+ test_usize,
+ if usize::MAX as u64 == u64::MAX { 19 } else { 9 }
+);
+
impl Writeable for str {
#[inline]
fn write_to<W: fmt::Write + ?Sized>(&self, sink: &mut W) -> fmt::Result {
@@ -194,7 +200,7 @@ impl<'a, T: Writeable + ?Sized> Writeable for &T {
#[test]
fn test_string_impls() {
- fn check_writeable_slice<W: Writeable>(writeables: &[W]) {
+ fn check_writeable_slice<W: Writeable + core::fmt::Display>(writeables: &[W]) {
assert_writeable_eq!(&writeables[0], "");
assert_writeable_eq!(&writeables[1], "abc");
}
@@ -204,10 +210,10 @@ fn test_string_impls() {
check_writeable_slice(arr);
// test String impl
- let arr: &[String] = &["".to_string(), "abc".to_string()];
+ let arr: &[String] = &[String::new(), "abc".to_owned()];
check_writeable_slice(arr);
// test &T impl
- let arr: &[&String] = &[&"".to_string(), &"abc".to_string()];
+ let arr: &[&String] = &[&String::new(), &"abc".to_owned()];
check_writeable_slice(arr);
}
diff --git a/vendor/writeable/src/lib.rs b/vendor/writeable/src/lib.rs
index 66be7f33b..0eb6be8d6 100644
--- a/vendor/writeable/src/lib.rs
+++ b/vendor/writeable/src/lib.rs
@@ -136,6 +136,28 @@ impl LengthHint {
}
}
+/// [`Part`]s are used as annotations for formatted strings. For example, a string like
+/// `Alice, Bob` could assign a `NAME` part to the substrings `Alice` and `Bob`, and a
+/// `PUNCTUATION` part to `, `. This allows for example to apply styling only to names.
+///
+/// `Part` contains two fields, whose usage is left up to the producer of the [`Writeable`].
+/// Conventionally, the `category` field will identify the formatting logic that produces
+/// the string/parts, whereas the `value` field will have semantic meaning. `NAME` and
+/// `PUNCTUATION` could thus be defined as
+/// ```
+/// # use writeable::Part;
+/// const NAME: Part = Part {
+/// category: "userlist",
+/// value: "name",
+/// };
+/// const PUNCTUATION: Part = Part {
+/// category: "userlist",
+/// value: "punctuation",
+/// };
+/// ```
+///
+/// That said, consumers should not usually have to inspect `Part` internals. Instead,
+/// formatters should expose the `Part`s they produces as constants.
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(clippy::exhaustive_structs)] // stable
pub struct Part {
@@ -240,7 +262,11 @@ pub trait Writeable {
/// }
/// ```
fn write_to_string(&self) -> Cow<str> {
- let mut output = String::with_capacity(self.writeable_length_hint().capacity());
+ let hint = self.writeable_length_hint();
+ if hint.is_zero() {
+ return Cow::Borrowed("");
+ }
+ let mut output = String::with_capacity(hint.capacity());
let _ = self.write_to(&mut output);
Cow::Owned(output)
}
@@ -289,7 +315,10 @@ macro_rules! impl_display_with_writeable {
///
/// struct Demo;
/// impl Writeable for Demo {
-/// fn write_to_parts<S: writeable::PartsWrite + ?Sized>(&self, sink: &mut S) -> fmt::Result {
+/// fn write_to_parts<S: writeable::PartsWrite + ?Sized>(
+/// &self,
+/// sink: &mut S,
+/// ) -> fmt::Result {
/// sink.with_part(WORD, |w| w.write_str("foo"))
/// }
/// fn writeable_length_hint(&self) -> LengthHint {
@@ -303,7 +332,13 @@ macro_rules! impl_display_with_writeable {
/// assert_writeable_eq!(&Demo, "foo", "Message: {}", "Hello World");
///
/// assert_writeable_parts_eq!(&Demo, "foo", [(0, 3, WORD)]);
-/// assert_writeable_parts_eq!(&Demo, "foo", [(0, 3, WORD)], "Message: {}", "Hello World");
+/// assert_writeable_parts_eq!(
+/// &Demo,
+/// "foo",
+/// [(0, 3, WORD)],
+/// "Message: {}",
+/// "Hello World"
+/// );
/// ```
#[macro_export]
macro_rules! assert_writeable_eq {
@@ -316,10 +351,19 @@ macro_rules! assert_writeable_eq {
assert_eq!(actual_str, $expected_str, $($arg)*);
assert_eq!(actual_str, $crate::Writeable::write_to_string(actual_writeable), $($arg)+);
let length_hint = $crate::Writeable::writeable_length_hint(actual_writeable);
- assert!(length_hint.0 <= actual_str.len(), $($arg)*);
+ assert!(
+ length_hint.0 <= actual_str.len(),
+ "hint lower bound {} larger than actual length {}: {}",
+ length_hint.0, actual_str.len(), format!($($arg)*),
+ );
if let Some(upper) = length_hint.1 {
- assert!(actual_str.len() <= upper, $($arg)*);
+ assert!(
+ actual_str.len() <= upper,
+ "hint upper bound {} smaller than actual length {}: {}",
+ length_hint.0, actual_str.len(), format!($($arg)*),
+ );
}
+ assert_eq!(actual_writeable.to_string(), $expected_str);
}};
}
@@ -340,6 +384,7 @@ macro_rules! assert_writeable_parts_eq {
if let Some(upper) = length_hint.1 {
assert!(actual_str.len() <= upper, $($arg)+);
}
+ assert_eq!(actual_writeable.to_string(), $expected_str);
}};
}
@@ -371,7 +416,10 @@ pub fn writeable_to_parts_for_test<W: Writeable>(
) -> fmt::Result {
let start = self.string.len();
f(self)?;
- self.parts.push((start, self.string.len(), part));
+ let end = self.string.len();
+ if start < end {
+ self.parts.push((start, end, part));
+ }
Ok(())
}
}
diff --git a/vendor/writeable/src/ops.rs b/vendor/writeable/src/ops.rs
index 3ed4406d7..2ccad7d6d 100644
--- a/vendor/writeable/src/ops.rs
+++ b/vendor/writeable/src/ops.rs
@@ -82,7 +82,10 @@ impl core::ops::BitOr<LengthHint> for LengthHint {
/// struct NonDeterministicWriteable(String, String);
///
/// impl Writeable for NonDeterministicWriteable {
- /// fn write_to<W: fmt::Write + ?Sized>(&self, sink: &mut W) -> fmt::Result {
+ /// fn write_to<W: fmt::Write + ?Sized>(
+ /// &self,
+ /// sink: &mut W,
+ /// ) -> fmt::Result {
/// sink.write_str(if coin_flip() { &self.0 } else { &self.1 })
/// }
///
diff --git a/vendor/xflags-macros/.cargo-checksum.json b/vendor/xflags-macros/.cargo-checksum.json
index fcc17d504..9f20539ff 100644
--- a/vendor/xflags-macros/.cargo-checksum.json
+++ b/vendor/xflags-macros/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"dff52eef5d3a126eefb38eb66ecb194dd9ccd22a6e010df2b672d83264ac65a5","src/ast.rs":"1f3be0fa1c340dbd98f56fd1c10a43b081d4b9179b1666a3dd3f32d8082e452c","src/emit.rs":"03e692330adc927c1d7ea36faebec0df997dd2e6bfb4816661988078b59fdb0b","src/lib.rs":"ce57137b3db248a73b201a5add6b1ac5c80e431e2bc2947962d4b0b6f397a449","src/parse.rs":"15e860ce666b17228ae537b322c096a64696c83b620468a14112bab30c0432a8","src/update.rs":"133dbc864182808ea7679b815863117b51ccc59a0e43796321f9d5a2edcb3ed6","tests/data/help.rs":"a952a2f641fa6db5c7bd25d8480c1dff4f1146db21a0629e0506e4bc87c46d0c","tests/data/repeated_pos.rs":"60b7d6378583765ddda1be127d3f40aa34cf802c64008e9d3a456f5e20100290","tests/data/smoke.rs":"c5d626382c22f147deb0be9396bf4f8256d64618451f8c9b85002d3525c7fff6","tests/data/subcommands.rs":"c2020f895380c95ca6c97f124615ed64f838a6ee3d1bcce8842029c6cef55188","tests/it/help.rs":"d12aba6869f7d40984c9294591fb97fea7d8caf062969e5ae640ba21957a26c3","tests/it/main.rs":"08311bc06020d980867bd696040c6ddd4893700c7a181d4b9d256ef100945a46","tests/it/repeated_pos.rs":"f9acef062eda5ad43722cade6ebde4c0077174a0fae4e2c3c9555ad6dd490599","tests/it/smoke.rs":"3324c20e79258fca2dfe30a7312b5e192d09c104a30142b1362dd89e746d1306","tests/it/subcommands.rs":"900726a1309d753aa3b666c9b3051ec0b26cb1bf59c1d072704d8e497cfa18c4"},"package":"2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809"} \ No newline at end of file
+{"files":{"Cargo.toml":"922faf706912dc58a85a5b8b6a22a7e2dbe17a8682776d25aa8f88c06a3d3bc2","src/ast.rs":"1f3be0fa1c340dbd98f56fd1c10a43b081d4b9179b1666a3dd3f32d8082e452c","src/emit.rs":"03e692330adc927c1d7ea36faebec0df997dd2e6bfb4816661988078b59fdb0b","src/lib.rs":"ce57137b3db248a73b201a5add6b1ac5c80e431e2bc2947962d4b0b6f397a449","src/parse.rs":"15e860ce666b17228ae537b322c096a64696c83b620468a14112bab30c0432a8","src/update.rs":"133dbc864182808ea7679b815863117b51ccc59a0e43796321f9d5a2edcb3ed6","tests/data/help.rs":"a952a2f641fa6db5c7bd25d8480c1dff4f1146db21a0629e0506e4bc87c46d0c","tests/data/repeated_pos.rs":"60b7d6378583765ddda1be127d3f40aa34cf802c64008e9d3a456f5e20100290","tests/data/smoke.rs":"c5d626382c22f147deb0be9396bf4f8256d64618451f8c9b85002d3525c7fff6","tests/data/subcommands.rs":"c2020f895380c95ca6c97f124615ed64f838a6ee3d1bcce8842029c6cef55188","tests/it/help.rs":"d12aba6869f7d40984c9294591fb97fea7d8caf062969e5ae640ba21957a26c3","tests/it/main.rs":"bbae0bff66ccd22f5fb1bcff08d7acadc3c93644bdb90b6f9d9b6ff76dbb9d1a","tests/it/repeated_pos.rs":"f9acef062eda5ad43722cade6ebde4c0077174a0fae4e2c3c9555ad6dd490599","tests/it/smoke.rs":"3324c20e79258fca2dfe30a7312b5e192d09c104a30142b1362dd89e746d1306","tests/it/subcommands.rs":"900726a1309d753aa3b666c9b3051ec0b26cb1bf59c1d072704d8e497cfa18c4"},"package":"f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8"} \ No newline at end of file
diff --git a/vendor/xflags-macros/Cargo.toml b/vendor/xflags-macros/Cargo.toml
index 70b748f0a..45ce51de8 100644
--- a/vendor/xflags-macros/Cargo.toml
+++ b/vendor/xflags-macros/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "xflags-macros"
-version = "0.3.0"
+version = "0.3.1"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
description = "Private implementation details of xflags."
license = "MIT OR Apache-2.0"
diff --git a/vendor/xflags-macros/tests/it/main.rs b/vendor/xflags-macros/tests/it/main.rs
index 7d77bda36..bee65b015 100644
--- a/vendor/xflags-macros/tests/it/main.rs
+++ b/vendor/xflags-macros/tests/it/main.rs
@@ -230,3 +230,73 @@ fn subcommand_flag_inheritance() {
expect!["unexpected flag: `--dir`"],
);
}
+
+#[test]
+fn edge_cases() {
+ check(
+ subcommands::RustAnalyzer::from_vec,
+ "server --dir --log",
+ expect![[r#"
+ RustAnalyzer {
+ verbose: 0,
+ subcommand: Server(
+ Server {
+ dir: Some(
+ "--log",
+ ),
+ subcommand: Launch(
+ Launch {
+ log: false,
+ },
+ ),
+ },
+ ),
+ }
+ "#]],
+ );
+ check(
+ subcommands::RustAnalyzer::from_vec,
+ "server --dir -- --log",
+ expect![[r#"
+ RustAnalyzer {
+ verbose: 0,
+ subcommand: Server(
+ Server {
+ dir: Some(
+ "--",
+ ),
+ subcommand: Launch(
+ Launch {
+ log: true,
+ },
+ ),
+ },
+ ),
+ }
+ "#]],
+ );
+ check(
+ subcommands::RustAnalyzer::from_vec,
+ "-- -v server",
+ expect![[r#"unexpected argument: "-v""#]],
+ );
+ check(repeated_pos::RepeatedPos::from_vec, "pos 1 prog -j", expect!["unexpected flag: `-j`"]);
+ check(
+ repeated_pos::RepeatedPos::from_vec,
+ "pos 1 -- prog -j",
+ expect![[r#"
+ RepeatedPos {
+ a: "pos",
+ b: Some(
+ 1,
+ ),
+ c: Some(
+ "prog",
+ ),
+ rest: [
+ "-j",
+ ],
+ }
+ "#]],
+ );
+}
diff --git a/vendor/xflags/.cargo-checksum.json b/vendor/xflags/.cargo-checksum.json
index 1fa4dd9e5..ccb82afb6 100644
--- a/vendor/xflags/.cargo-checksum.json
+++ b/vendor/xflags/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"bff2ffbef3ca253bcde1624bfc2c2afd244ca39b44e7dd3f4b463873904129c7","Cargo.toml":"faa866c2c32635dfa6d600103595777c330520df5e73aa0461d6f34eb21bbd9a","examples/hello-generated.rs":"3bf7922435ae84e2b8ae022aba1d693fc4e523b354d3141cc62a183864531f31","examples/hello.rs":"235426d1a69eeba790d1e4fc2ac4fa8f30ca16952edb2a026cf6f4b8c78d6fec","examples/immediate-mode.rs":"dcae135769b6104801de7b91fd220fb28ddae097ba7b0474fe32c28a90c4dfef","examples/longer.rs":"b55684fdde16a8462e1c1afaa9f30e75e3472abfcabe950403733c767dd7de96","examples/non-utf8.rs":"23b563180d1d616f81e224890192799a06fdbc21f3ccb7e4234ca51b3c467a9a","src/lib.rs":"780d7be977abefaa4c7229733a916d88480c9228ddb0f9bc2a25aaa626f7f7d7","src/rt.rs":"b80e6b1e180b0bd0fd523a6bc74d6bba40da7af461de37c33c3735bfb3c66254"},"package":"cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268"} \ No newline at end of file
+{"files":{"Cargo.lock":"29daddfd833005b0bce977a938c88f5d48e1e7f2112950b266719c0d9c30de87","Cargo.toml":"027201abeeab5b32f12827e94aa78a9e96e43e9c59aa99b1f038949103e1fdd9","examples/hello-generated.rs":"3bf7922435ae84e2b8ae022aba1d693fc4e523b354d3141cc62a183864531f31","examples/hello.rs":"235426d1a69eeba790d1e4fc2ac4fa8f30ca16952edb2a026cf6f4b8c78d6fec","examples/immediate-mode.rs":"dcae135769b6104801de7b91fd220fb28ddae097ba7b0474fe32c28a90c4dfef","examples/longer.rs":"b55684fdde16a8462e1c1afaa9f30e75e3472abfcabe950403733c767dd7de96","examples/non-utf8.rs":"23b563180d1d616f81e224890192799a06fdbc21f3ccb7e4234ca51b3c467a9a","src/lib.rs":"780d7be977abefaa4c7229733a916d88480c9228ddb0f9bc2a25aaa626f7f7d7","src/rt.rs":"be5393c690afde9c8c31b01471100eec21742f735983ef9074c3675daf942ac7"},"package":"c4554b580522d0ca238369c16b8f6ce34524d61dafe7244993754bbd05f2c2ea"} \ No newline at end of file
diff --git a/vendor/xflags/Cargo.lock b/vendor/xflags/Cargo.lock
index e2020eb16..d4df44759 100644
--- a/vendor/xflags/Cargo.lock
+++ b/vendor/xflags/Cargo.lock
@@ -4,13 +4,13 @@ version = 3
[[package]]
name = "xflags"
-version = "0.3.0"
+version = "0.3.1"
dependencies = [
"xflags-macros",
]
[[package]]
name = "xflags-macros"
-version = "0.3.0"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809"
+checksum = "f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8"
diff --git a/vendor/xflags/Cargo.toml b/vendor/xflags/Cargo.toml
index 091002385..3db90b6be 100644
--- a/vendor/xflags/Cargo.toml
+++ b/vendor/xflags/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "xflags"
-version = "0.3.0"
+version = "0.3.1"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
description = "Moderately simple command line arguments parser."
categories = ["command-line-interface"]
@@ -21,4 +21,4 @@ repository = "https://github.com/matklad/xflags"
resolver = "1"
[dependencies.xflags-macros]
-version = "=0.3.0"
+version = "=0.3.1"
diff --git a/vendor/xflags/src/rt.rs b/vendor/xflags/src/rt.rs
index 988e6cc23..0b3d98c2b 100644
--- a/vendor/xflags/src/rt.rs
+++ b/vendor/xflags/src/rt.rs
@@ -15,36 +15,41 @@ macro_rules! bail {
}
pub struct Parser {
+ after_double_dash: bool,
rargs: Vec<OsString>,
}
impl Parser {
pub fn new(mut args: Vec<OsString>) -> Self {
args.reverse();
- Self { rargs: args }
+ Self { after_double_dash: false, rargs: args }
}
pub fn new_from_env() -> Self {
- let mut args = std::env::args_os().collect::<Vec<_>>();
- args.reverse();
- args.pop();
- Self { rargs: args }
- }
-
- pub fn is_empty(&self) -> bool {
- self.rargs.is_empty()
+ let args = std::env::args_os().collect::<Vec<_>>();
+ let mut res = Parser::new(args);
+ let _progn = res.next();
+ res
}
- pub fn peek_flag(&self) -> Option<&str> {
- self.rargs.last().and_then(|it| it.to_str()).filter(|it| it.starts_with('-'))
- }
pub fn pop_flag(&mut self) -> Option<Result<String, OsString>> {
- if self.peek_flag().is_some() {
- self.next().map(|it| it.into_string())
- } else {
+ if self.after_double_dash {
self.next().map(Err)
+ } else {
+ let arg = self.next()?;
+ let arg_str = arg.to_str().unwrap_or_default();
+ if arg_str.starts_with('-') {
+ if arg_str == "--" {
+ self.after_double_dash = true;
+ return self.next().map(Err);
+ }
+ Some(arg.into_string())
+ } else {
+ Some(Err(arg))
+ }
}
}
+
pub fn push_back(&mut self, arg: Result<String, OsString>) {
let arg = match arg {
Ok(it) => it.into(),
@@ -53,15 +58,12 @@ impl Parser {
self.rargs.push(arg)
}
- pub fn next(&mut self) -> Option<OsString> {
+ fn next(&mut self) -> Option<OsString> {
self.rargs.pop()
}
pub fn next_value(&mut self, flag: &str) -> Result<OsString> {
- if self.peek_flag().is_some() {
- bail!("expected a value for `{}`", flag)
- }
- self.next().ok_or_else(|| format_err!("expected a value for `{}`", flag))
+ self.next().ok_or_else(|| format_err!("expected a value for `{flag}`"))
}
pub fn next_value_from_str<T: FromStr>(&mut self, flag: &str) -> Result<T>
@@ -77,21 +79,19 @@ impl Parser {
T::Err: fmt::Display,
{
match value.into_string() {
- Ok(str) => {
- str.parse::<T>().map_err(|err| format_err!("can't parse `{}`, {}", flag, err))
- }
+ Ok(str) => str.parse::<T>().map_err(|err| format_err!("can't parse `{flag}`, {err}")),
Err(it) => {
- bail!("can't parse `{}`, invalid utf8: {:?}", flag, it)
+ bail!("can't parse `{flag}`, invalid utf8: {it:?}")
}
}
}
pub fn unexpected_flag(&self, flag: &str) -> Error {
- format_err!("unexpected flag: `{}`", flag)
+ format_err!("unexpected flag: `{flag}`")
}
pub fn unexpected_arg(&self, arg: OsString) -> Error {
- format_err!("unexpected argument: {:?}", arg)
+ format_err!("unexpected argument: {arg:?}")
}
pub fn subcommand_required(&self) -> Error {
@@ -104,15 +104,15 @@ impl Parser {
pub fn optional<T>(&self, flag: &str, mut vals: Vec<T>) -> Result<Option<T>> {
if vals.len() > 1 {
- bail!("flag specified more than once: `{}`", flag)
+ bail!("flag specified more than once: `{flag}`")
}
Ok(vals.pop())
}
pub fn required<T>(&self, flag: &str, mut vals: Vec<T>) -> Result<T> {
if vals.len() > 1 {
- bail!("flag specified more than once: `{}`", flag)
+ bail!("flag specified more than once: `{flag}`")
}
- vals.pop().ok_or_else(|| format_err!("flag is required: `{}`", flag))
+ vals.pop().ok_or_else(|| format_err!("flag is required: `{flag}`"))
}
}
diff --git a/vendor/xshell-macros/.cargo-checksum.json b/vendor/xshell-macros/.cargo-checksum.json
index 5198d9797..d909058ce 100644
--- a/vendor/xshell-macros/.cargo-checksum.json
+++ b/vendor/xshell-macros/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"4c4a333e83824134161380aa9efb26335313dbca9f09e6a7160d53ce4c2f6918","src/lib.rs":"d91daf4306a265ab7343600f8bab807e059de30c04b9e0494faec45404a7efaa"},"package":"88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"} \ No newline at end of file
+{"files":{"Cargo.toml":"710b08bcced4154e543123ae84063e615d944d16746ec63c9c55a842223f8aaf","src/lib.rs":"d91daf4306a265ab7343600f8bab807e059de30c04b9e0494faec45404a7efaa"},"package":"1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c"} \ No newline at end of file
diff --git a/vendor/xshell-macros/Cargo.toml b/vendor/xshell-macros/Cargo.toml
index c95f0be33..190b1bf88 100644
--- a/vendor/xshell-macros/Cargo.toml
+++ b/vendor/xshell-macros/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.59"
name = "xshell-macros"
-version = "0.2.2"
+version = "0.2.3"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
description = "Private implementation detail of xshell crate"
license = "MIT OR Apache-2.0"
diff --git a/vendor/xshell/.cargo-checksum.json b/vendor/xshell/.cargo-checksum.json
index dd079af53..bb849f85b 100644
--- a/vendor/xshell/.cargo-checksum.json
+++ b/vendor/xshell/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CHANGELOG.md":"dd08a5079cd2e437eb1269f7c60be9c05ce97db407dbb95a631d64c0f1c478a1","Cargo.lock":"f8f5019959ed5fcd772c3cf199220be41ce1e7b2b9a35b153c52f9c8f3a35171","Cargo.toml":"04a9cad4d6a6e7c756c35d3ba776334f0c47b895e54f7006a27617430c84b25f","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"a480b1b8b943c633c64ce9c215f8ecea24e58c7242fe4605d37e7a3be810ab9f","examples/ci.rs":"d5fbfc199469c08f3d459164c05a85c1a0a8f1bef625b347533ad7a43c1e97fb","examples/clone_and_publish.rs":"94568ef665e65527417bb5d50b0404bc60d6a72942d70260c8f3ce1a99820077","src/error.rs":"9222d0b21a889c9fbac1d285c6d43573be9c94f0ea5c02b4bd692bdc02753b49","src/lib.rs":"61d7c1dcd569e60188baba51521661c177bff5a27bcf5272bd83f06542c2304a","tests/compile_time.rs":"224f3476eff4070fbd62c1974e7e69996efd92263bc12f7ddbd59823d85484da","tests/data/xecho.rs":"7a82252daade541bc3843fffa617fc50bf2faf7eebc55e4442a0bc9bb59182fd","tests/it/compile_failures.rs":"c87a438583c9f4b4e45a7422df3ee7c6bd5e69150eba5468425c63aa70fa47d3","tests/it/env.rs":"e863965669378e603c36186e1c738914e3d2300cbe3b04288a9ed689edcf09fb","tests/it/main.rs":"1bb089455f92d6486bf80502ebbbd1f136248194b9c454949f21ed279ea58028","tests/it/tidy.rs":"f530cf51504d43716e849ac96fb64a3a6ef80bd3e56b6eb1bd7b5325dc2f2de9"},"package":"6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"c6e15e05c13991d47bbbd21c1fad9005c14de8dfca06a10c860fa1c70f1e069b","Cargo.lock":"1ee8c4b1409e8fecca09396a23d5199a29b04e6f710d04f0d4b5264279a6cb3c","Cargo.toml":"a4617f00daab17b57f080dff8b331d3847433a4fa65b30945d109df0aad6680b","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"a480b1b8b943c633c64ce9c215f8ecea24e58c7242fe4605d37e7a3be810ab9f","examples/ci.rs":"d5fbfc199469c08f3d459164c05a85c1a0a8f1bef625b347533ad7a43c1e97fb","examples/clone_and_publish.rs":"94568ef665e65527417bb5d50b0404bc60d6a72942d70260c8f3ce1a99820077","src/error.rs":"9222d0b21a889c9fbac1d285c6d43573be9c94f0ea5c02b4bd692bdc02753b49","src/lib.rs":"8bc6f4eab9734a46a717718001a79825a50874da324bb405b29adb17c6e2f220","tests/compile_time.rs":"224f3476eff4070fbd62c1974e7e69996efd92263bc12f7ddbd59823d85484da","tests/data/xecho.rs":"7a82252daade541bc3843fffa617fc50bf2faf7eebc55e4442a0bc9bb59182fd","tests/it/compile_failures.rs":"c87a438583c9f4b4e45a7422df3ee7c6bd5e69150eba5468425c63aa70fa47d3","tests/it/env.rs":"e863965669378e603c36186e1c738914e3d2300cbe3b04288a9ed689edcf09fb","tests/it/main.rs":"1bb089455f92d6486bf80502ebbbd1f136248194b9c454949f21ed279ea58028","tests/it/tidy.rs":"f530cf51504d43716e849ac96fb64a3a6ef80bd3e56b6eb1bd7b5325dc2f2de9"},"package":"962c039b3a7b16cf4e9a4248397c6585c07547412e7d6a6e035389a802dcfe90"} \ No newline at end of file
diff --git a/vendor/xshell/CHANGELOG.md b/vendor/xshell/CHANGELOG.md
index cbef62a3e..997a28d3c 100644
--- a/vendor/xshell/CHANGELOG.md
+++ b/vendor/xshell/CHANGELOG.md
@@ -1,5 +1,9 @@
# Changelog
+## 0.2.3
+
+- Fix bug where `Cmd::run` would ignore specified stdin.
+
## 0.2.2
- Add `Shell::path_exists`.
diff --git a/vendor/xshell/Cargo.lock b/vendor/xshell/Cargo.lock
index 1ea0d8ff2..bcea12812 100644
--- a/vendor/xshell/Cargo.lock
+++ b/vendor/xshell/Cargo.lock
@@ -4,13 +4,13 @@ version = 3
[[package]]
name = "anyhow"
-version = "1.0.57"
+version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
+checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
[[package]]
name = "xshell"
-version = "0.2.2"
+version = "0.2.3"
dependencies = [
"anyhow",
"xshell-macros",
@@ -18,6 +18,6 @@ dependencies = [
[[package]]
name = "xshell-macros"
-version = "0.2.2"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
+checksum = "1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c"
diff --git a/vendor/xshell/Cargo.toml b/vendor/xshell/Cargo.toml
index f4fb7cff2..781152f51 100644
--- a/vendor/xshell/Cargo.toml
+++ b/vendor/xshell/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.59"
name = "xshell"
-version = "0.2.2"
+version = "0.2.3"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
exclude = [".github/", "bors.toml", "rustfmt.toml", "cbench", "mock_bin/"]
description = "Utilities for quick shell scripting in Rust"
@@ -22,6 +22,6 @@ license = "MIT OR Apache-2.0"
repository = "https://github.com/matklad/xshell"
resolver = "2"
[dependencies.xshell-macros]
-version = "=0.2.2"
+version = "=0.2.3"
[dev-dependencies.anyhow]
version = "1.0.56"
diff --git a/vendor/xshell/src/lib.rs b/vendor/xshell/src/lib.rs
index 6b980ba22..02d6276f1 100644
--- a/vendor/xshell/src/lib.rs
+++ b/vendor/xshell/src/lib.rs
@@ -960,10 +960,7 @@ impl<'a> Cmd<'a> {
if !self.data.quiet {
eprintln!("$ {}", self);
}
- let mut command = self.to_command();
- let status = command.status().map_err(|err| Error::new_cmd_io(self, err))?;
- self.check_status(status)?;
- Ok(())
+ self.output_impl(false, false).map(|_| ())
}
/// Run the command and return its stdout as a string.
diff --git a/vendor/yoke-derive/.cargo-checksum.json b/vendor/yoke-derive/.cargo-checksum.json
index 00b384ec1..5ab332567 100644
--- a/vendor/yoke-derive/.cargo-checksum.json
+++ b/vendor/yoke-derive/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"749614197321ed7a12f39f677ac7ff5f47b38bd0b610b0448d31eb0acbbfad2d","Cargo.toml":"efb71924dd343a2bc1c174a5068b9178f96d63967d32b0668d27cdc5c3e53332","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"a386dfc08e98106071c34adf2cdfea6a60c5a1d71ca9c9ed383a4922148d14d7","examples/yoke_derive.rs":"787ad9872040733c243ec81e67e0b9651937d4e01670b6f050c13e82f1c24a4e","src/lib.rs":"f2fd1a01134d82d34c67539929ad7119f29795b719b534309ef39cbb0b2bb4de","src/visitor.rs":"24545c1e81fd35c1d2bd38a1c8d1e684dd08faed4d10d75b103c371df4446c21"},"package":"1346e4cd025ae818b88566eac7eb65ab33a994ea55f355c86889af2e7e56b14e"} \ No newline at end of file
+{"files":{"Cargo.lock":"745a4d99005483f6b9b4eae8db5ea6b260c1913df23ce26d27ad8885c2707003","Cargo.toml":"e4302428531ee62c223811af40a8a73f8c3e8b9d2da51c83b97d1c80a442f69d","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"a386dfc08e98106071c34adf2cdfea6a60c5a1d71ca9c9ed383a4922148d14d7","examples/yoke_derive.rs":"787ad9872040733c243ec81e67e0b9651937d4e01670b6f050c13e82f1c24a4e","src/lib.rs":"a320049225282da281b8b5fdb2513f8e4bf81b6b784a21dc8c2bd0f53baed85f","src/visitor.rs":"24545c1e81fd35c1d2bd38a1c8d1e684dd08faed4d10d75b103c371df4446c21"},"package":"ca800d73d6b7a7ee54f2608205c98b549fca71c9500c1abcb3abdc7708b4a8cb"} \ No newline at end of file
diff --git a/vendor/yoke-derive/Cargo.lock b/vendor/yoke-derive/Cargo.lock
index 2822aee8f..6ee02cebf 100644
--- a/vendor/yoke-derive/Cargo.lock
+++ b/vendor/yoke-derive/Cargo.lock
@@ -4,39 +4,27 @@ version = 3
[[package]]
name = "proc-macro2"
-version = "1.0.47"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
-name = "serde"
-version = "1.0.145"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
-
-[[package]]
-name = "stable_deref_trait"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
-
-[[package]]
name = "syn"
-version = "1.0.103"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -57,9 +45,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.5"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-xid"
@@ -68,68 +56,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
-name = "yoke"
-version = "0.6.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fe1d55ca72c32d573bfbd5cb2f0ca65a497854c44762957a6d3da96041a5184"
-dependencies = [
- "serde",
- "stable_deref_trait",
- "yoke-derive 0.6.0",
- "zerofrom",
-]
-
-[[package]]
-name = "yoke-derive"
-version = "0.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58c2c5bb7c929b85c1b9ec69091b0d835f0878b4fd9eb67973b25936e06c4374"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
-]
-
-[[package]]
name = "yoke-derive"
-version = "0.6.1"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
- "yoke",
- "zerovec",
-]
-
-[[package]]
-name = "zerofrom"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "79e9355fccf72b04b7deaa99ce7a0f6630530acf34045391b74460fcd714de54"
-dependencies = [
- "zerofrom-derive",
-]
-
-[[package]]
-name = "zerofrom-derive"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8785f47d6062c1932866147f91297286a9f350b3070e9d9f0b6078e37d623c1a"
+version = "0.7.0"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
-
-[[package]]
-name = "zerovec"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9d919a74c17749ccb17beaf6405562e413cd94e98ba52ca1e64bbe7eefbd8b8"
-dependencies = [
- "yoke",
- "zerofrom",
-]
diff --git a/vendor/yoke-derive/Cargo.toml b/vendor/yoke-derive/Cargo.toml
index 4d1eedac9..85e25ffa0 100644
--- a/vendor/yoke-derive/Cargo.toml
+++ b/vendor/yoke-derive/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "yoke-derive"
-version = "0.6.1"
+version = "0.7.0"
authors = ["Manish Goregaokar <manishsmail@gmail.com>"]
description = "Custom derive for the yoke crate"
keywords = [
@@ -32,6 +32,9 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[lib]
path = "src/lib.rs"
proc_macro = true
@@ -52,10 +55,4 @@ features = [
[dependencies.synstructure]
version = "0.12.4"
-[dev-dependencies.yoke]
-version = "0.6.0"
-features = ["derive"]
-
-[dev-dependencies.zerovec]
-version = "0.9"
-features = ["yoke"]
+[dev-dependencies]
diff --git a/vendor/yoke-derive/src/lib.rs b/vendor/yoke-derive/src/lib.rs
index 615669d84..4c53bef68 100644
--- a/vendor/yoke-derive/src/lib.rs
+++ b/vendor/yoke-derive/src/lib.rs
@@ -81,8 +81,6 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
f(self)
}
}
- // This is safe because there are no lifetime parameters.
- unsafe impl<'a, #(#tybounds),*> yoke::IsCovariant<'a> for #name<#(#typarams),*> where #(#static_bounds),* {}
}
} else {
if lts != 1 {
@@ -110,11 +108,11 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
.collect();
let mut yoke_bounds: Vec<WherePredicate> = vec![];
structure.bind_with(|_| synstructure::BindStyle::Move);
- let body = structure.each_variant(|vi| {
+ let owned_body = structure.each_variant(|vi| {
vi.construct(|f, i| {
let binding = format!("__binding_{}", i);
let field = Ident::new(&binding, Span::call_site());
- let fty = replace_lifetime(&f.ty, static_lt());
+ let fty_static = replace_lifetime(&f.ty, static_lt());
let (has_ty, has_lt) = visitor::check_type_for_parameters(&f.ty, &generics_env);
if has_ty {
@@ -123,11 +121,14 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
// to `FieldTy: Yokeable` that need to be satisfied. We get them to be satisfied by requiring
// `FieldTy<'static>: Yokeable<FieldTy<'a>>`
if has_lt {
- let a_ty = replace_lifetime(&f.ty, custom_lt("'a"));
- yoke_bounds
- .push(parse_quote!(#fty: yoke::Yokeable<'a, Output = #a_ty>));
+ let fty_a = replace_lifetime(&f.ty, custom_lt("'a"));
+ yoke_bounds.push(
+ parse_quote!(#fty_static: yoke::Yokeable<'a, Output = #fty_a>),
+ );
} else {
- yoke_bounds.push(parse_quote!(#fty: yoke::Yokeable<'a, Output = #fty>));
+ yoke_bounds.push(
+ parse_quote!(#fty_static: yoke::Yokeable<'a, Output = #fty_static>),
+ );
}
}
if has_ty || has_lt {
@@ -135,7 +136,7 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
// that the lifetimes are covariant, since this requirement
// must already be true for the type that implements transform_owned().
quote! {
- <#fty as yoke::Yokeable<'a>>::transform_owned(#field)
+ <#fty_static as yoke::Yokeable<'a>>::transform_owned(#field)
}
} else {
// No nested lifetimes, so nothing to be done
@@ -143,6 +144,30 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
}
})
});
+ let borrowed_body = structure.each(|binding| {
+ let f = binding.ast();
+ let field = &binding.binding;
+
+ let (has_ty, has_lt) = visitor::check_type_for_parameters(&f.ty, &generics_env);
+
+ if has_ty || has_lt {
+ let fty_static = replace_lifetime(&f.ty, static_lt());
+ let fty_a = replace_lifetime(&f.ty, custom_lt("'a"));
+ // We also must assert that each individual field can `transform()` correctly
+ //
+ // Even though transform_owned() does such an assertion already, CoerceUnsized
+ // can cause type transformations that allow it to succeed where this would fail.
+ // We need to check both.
+ //
+ // https://github.com/unicode-org/icu4x/issues/2928
+ quote! {
+ let _: &#fty_a = &<#fty_static as yoke::Yokeable<'a>>::transform(#field);
+ }
+ } else {
+ // No nested lifetimes, so nothing to be done
+ quote! {}
+ }
+ });
return quote! {
unsafe impl<'a, #(#tybounds),*> yoke::Yokeable<'a> for #name<'static, #(#typarams),*>
where #(#static_bounds,)*
@@ -150,6 +175,12 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
type Output = #name<'a, #(#typarams),*>;
#[inline]
fn transform(&'a self) -> &'a Self::Output {
+ // These are just type asserts, we don't need them for anything
+ if false {
+ match self {
+ #borrowed_body
+ }
+ }
unsafe {
// safety: we have asserted covariance in
// transform_owned
@@ -158,7 +189,7 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
}
#[inline]
fn transform_owned(self) -> Self::Output {
- match self { #body }
+ match self { #owned_body }
}
#[inline]
unsafe fn make(this: Self::Output) -> Self {
@@ -206,6 +237,7 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
// are the same
debug_assert!(mem::size_of::<Self::Output>() == mem::size_of::<Self>());
let ptr: *const Self = (&this as *const Self::Output).cast();
+ #[allow(clippy::forget_copy)] // This is a noop if the struct is copy, which Clippy doesn't like
mem::forget(this);
ptr::read(ptr)
}
@@ -216,9 +248,6 @@ fn yokeable_derive_impl(input: &DeriveInput) -> TokenStream2 {
unsafe { f(core::mem::transmute::<&'a mut Self, &'a mut Self::Output>(self)) }
}
}
- // This is safe because it is in the same block as the above impl, which only compiles
- // if 'a is a covariant lifetime.
- unsafe impl<'a, #(#tybounds),*> yoke::IsCovariant<'a> for #name<'a, #(#typarams),*> where #(#static_bounds),* {}
}
}
}
diff --git a/vendor/yoke/.cargo-checksum.json b/vendor/yoke/.cargo-checksum.json
index 1052beac5..4b8d4839c 100644
--- a/vendor/yoke/.cargo-checksum.json
+++ b/vendor/yoke/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"a26eaa79fc44d47a95490b583429bc252338eafe17172ddecc5fed5b90709748","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"30321954bbc84770e2b50e8cc6da2a9e98f97698cbf1fe829747d4f3c6f6f791","src/either.rs":"028bb043392d1f3d608589d65d997a241b5ba7bb5114cf023179a38bf1c1cf2b","src/erased.rs":"97e545e421f08ae22c6bd8b4d89c9f91876a0b502cc74984dce77f0506decc4d","src/is_covariant.rs":"2d7f92083473be10847b4471917994eee060bf9e31f5d2672ac78cfe64be91b9","src/lib.rs":"5cf0694e0168c8240de935a2187d3f34d2b1ebe03429662df45b5bd97d3226ff","src/macro_impls.rs":"7d626660f4f7a0148710c5b308511604bff1010b813f124ba43ec4e591057981","src/trait_hack.rs":"d3a8b93e0a984febabd288af558d25e5a93019e2bf9209bc023762c9182aa7fc","src/yoke.rs":"8ec18fbbeaf9a87d87a3b5cbc859c49d5929a0bdbf43586d70000294c6dae26f","src/yokeable.rs":"1b2e04f620ab7c06c557d23c44b7ebb67736ae239c44277f4fbefbacc011e549","src/zero_from.rs":"c863d016c1e73bbac25f189c78c544ae65649d20fc9412f385a023d30c9a16f5"},"package":"1fe1d55ca72c32d573bfbd5cb2f0ca65a497854c44762957a6d3da96041a5184"} \ No newline at end of file
+{"files":{"Cargo.toml":"73b21b61d53b8b31994e0a5f9509aa0d26212a28bcf2ac1606deb83c6e975197","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"30321954bbc84770e2b50e8cc6da2a9e98f97698cbf1fe829747d4f3c6f6f791","src/either.rs":"028bb043392d1f3d608589d65d997a241b5ba7bb5114cf023179a38bf1c1cf2b","src/erased.rs":"a97a1be3436314e34903a59330a8f61f96f5543c73d96853c09c32392dd09898","src/lib.rs":"9168593ad938bb19c867a3fde27911003b108ae40efed87741bd5d036c914a39","src/macro_impls.rs":"f81ac8af77ac3641bfee116d6295f5f756f92d4b3b6c7d43a228cd313174d355","src/trait_hack.rs":"d3a8b93e0a984febabd288af558d25e5a93019e2bf9209bc023762c9182aa7fc","src/yoke.rs":"91f8f8b5ea15b42d79c0790eaf1e08607a42bd92e9773db05281e3e1eb9dbb82","src/yokeable.rs":"1b2e04f620ab7c06c557d23c44b7ebb67736ae239c44277f4fbefbacc011e549","src/zero_from.rs":"71d97f87e003db0eb0e97064509bdf9355622ccd655549f926b6a0d9119db3ee"},"package":"222180af14a6b54ef2c33493c1eff77ae95a3687a21b243e752624006fb8f26e"} \ No newline at end of file
diff --git a/vendor/yoke/Cargo.toml b/vendor/yoke/Cargo.toml
index d8629127d..a20e850b6 100644
--- a/vendor/yoke/Cargo.toml
+++ b/vendor/yoke/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "yoke"
-version = "0.6.2"
+version = "0.7.0"
authors = ["Manish Goregaokar <manishsmail@gmail.com>"]
include = [
"src/**/*",
@@ -40,6 +40,9 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[package.metadata.docs.rs]
all-features = true
@@ -53,11 +56,11 @@ version = "1.2.0"
default-features = false
[dependencies.yoke-derive]
-version = "0.6.0"
+version = "0.7.0"
optional = true
[dependencies.zerofrom]
-version = "0.1.0"
+version = "0.1.1"
optional = true
default-features = false
@@ -70,7 +73,7 @@ version = "1.0.125"
[features]
alloc = [
"stable_deref_trait/alloc",
- "serde/alloc",
+ "serde?/alloc",
"zerofrom/alloc",
]
default = [
@@ -78,6 +81,6 @@ default = [
"zerofrom",
]
derive = [
- "yoke-derive",
+ "dep:yoke-derive",
"zerofrom/derive",
]
diff --git a/vendor/yoke/src/erased.rs b/vendor/yoke/src/erased.rs
index c314d5186..bc4de9791 100644
--- a/vendor/yoke/src/erased.rs
+++ b/vendor/yoke/src/erased.rs
@@ -7,7 +7,7 @@
//! See the docs of [`Yoke::erase_rc_cart()`](crate::Yoke::erase_rc_cart)
//! and [`Yoke::erase_box_cart()`](crate::Yoke::erase_box_cart) for more info.
//!
-//! Available with the `"alloc"` feature enabled.
+//! Available with the `"alloc"` Cargo feature enabled.
use alloc::boxed::Box;
use alloc::rc::Rc;
@@ -25,17 +25,17 @@ impl<T: 'static> ErasedDestructor for T {}
///
/// See the docs of [`Yoke::erase_arc_cart()`](crate::Yoke::erase_rc_cart) for more info.
///
-/// Available with the `"alloc"` feature enabled.
+/// Available with the `"alloc"` Cargo feature enabled.
pub type ErasedArcCart = Arc<dyn ErasedDestructor + Send + Sync>;
/// A type-erased Cart that has `Rc` semantics
///
/// See the docs of [`Yoke::erase_rc_cart()`](crate::Yoke::erase_rc_cart) for more info.
///
-/// Available with the `"alloc"` feature enabled.
+/// Available with the `"alloc"` Cargo feature enabled.
pub type ErasedRcCart = Rc<dyn ErasedDestructor>;
/// A type-erased Cart that has `Box` semantics
///
/// See the docs of [`Yoke::erase_box_cart()`](crate::Yoke::erase_box_cart) for more info.
///
-/// Available with the `"alloc"` feature enabled.
+/// Available with the `"alloc"` Cargo feature enabled.
pub type ErasedBoxCart = Box<dyn ErasedDestructor>;
diff --git a/vendor/yoke/src/is_covariant.rs b/vendor/yoke/src/is_covariant.rs
deleted file mode 100644
index 75d123c84..000000000
--- a/vendor/yoke/src/is_covariant.rs
+++ /dev/null
@@ -1,142 +0,0 @@
-// This file is part of ICU4X. For terms of use, please see the file
-// called LICENSE at the top level of the ICU4X source tree
-// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
-
-#[cfg(feature = "alloc")]
-use alloc::{
- borrow::{Cow, ToOwned},
- boxed::Box,
- rc::Rc,
- string::String,
-};
-
-/// A type implementing `IsCovariant<'a>` is covariant with respect to lifetime `'a`.
-///
-/// Lifetime parameters that are safely cast in [`Yokeable`] are also valid for `IsCovariant`.
-///
-/// `IsCovariant` exists primarily to serve in trait bounds. The primary use case is to safely
-/// perform lifetime casting on trait objects (`dyn Trait`). This enables a type-erased [`Yoke`]
-/// consisting of only trait objects. See the examples.
-///
-/// `IsCovariant` is auto-implemented in [`#[derive(Yokeable)]`](macro@crate::Yokeable).
-///
-/// # Safety
-///
-/// This trait is safe to implement on types with a _covariant_ lifetime parameter. This will
-/// occur when the lifetime parameter is used within references, but not in the arguments of
-/// function pointers or in mutable positions (either in `&mut` or via interior mutability).
-///
-/// If a struct has multiple lifetime parameters, only the one used in `IsCovariant<'a>` needs to
-/// be covariant.
-///
-/// # Examples
-///
-/// Implementing on a simple struct with a single covariant lifetime:
-///
-/// ```
-/// # use yoke::*;
-/// struct MyStruct<'a>(&'a str);
-///
-/// // This is safe because 'a is covariant
-/// unsafe impl<'a> IsCovariant<'a> for MyStruct<'a> {}
-/// ```
-///
-/// By constraining the trait `ExampleTrait<'a>` on `IsCovariant<'a>`, we can safely implement
-/// [`Yokeable`] and [`ZeroFrom`] on its trait object:
-///
-/// ```
-/// # use yoke::*;
-/// # use zerofrom::*;
-/// # use core::mem;
-/// trait ExampleTrait<'a>: IsCovariant<'a> {
-/// fn get_message(&self) -> &'a str;
-/// }
-///
-/// // This wrapper is required because of the blanket Yokeable impl on &'static T
-/// pub struct ExampleTraitDynRef<'a>(pub &'a dyn ExampleTrait<'a>);
-///
-/// // The following impl is safe because the trait object requires IsCovariant.
-/// unsafe impl<'a> Yokeable<'a> for ExampleTraitDynRef<'static> {
-/// type Output = ExampleTraitDynRef<'a>;
-/// fn transform(&'a self) -> &'a Self::Output {
-/// unsafe { mem::transmute(self) }
-/// }
-///
-/// fn transform_owned(self) -> Self::Output {
-/// unsafe { mem::transmute(self) }
-/// }
-///
-/// unsafe fn make(from: Self::Output) -> Self {
-/// unsafe { mem::transmute(from) }
-/// }
-///
-/// fn transform_mut<F>(&'a mut self, f: F)
-/// where
-/// F: 'static + FnOnce(&'a mut Self::Output),
-/// {
-/// unsafe { f(mem::transmute::<&mut Self, &mut Self::Output>(self)) }
-/// }
-/// }
-///
-/// impl<'zf, 'a> ZeroFrom<'zf, dyn ExampleTrait<'a> + 'a> for ExampleTraitDynRef<'zf> {
-/// fn zero_from(this: &'zf (dyn ExampleTrait<'a> + 'a)) -> ExampleTraitDynRef<'zf> {
-/// // This is safe because the trait object requires IsCovariant.
-/// ExampleTraitDynRef(unsafe { core::mem::transmute(this) })
-/// }
-/// }
-///
-/// // Implement ExampleTrait on the struct from the previous example
-/// # struct MyStruct<'a>(&'a str);
-/// # unsafe impl<'a> IsCovariant<'a> for MyStruct<'a> {}
-/// impl<'a> ExampleTrait<'a> for MyStruct<'a> {
-/// fn get_message(&self) -> &'a str {
-/// self.0
-/// }
-/// }
-///
-/// // Example usage: a Yoke of a trait object
-/// let s = "Hello World".to_string();
-/// let yoke: Yoke<ExampleTraitDynRef<'static>, Box<dyn ExampleTrait>> =
-/// Yoke::attach_to_zero_copy_cart(Box::new(MyStruct(&s)));
-///
-/// assert_eq!(yoke.get().0.get_message(), "Hello World");
-/// ```
-///
-/// [`Yoke`]: crate::Yoke
-/// [`Yokeable`]: crate::Yokeable
-/// [`ZeroFrom`]: crate::ZeroFrom
-pub unsafe trait IsCovariant<'a>: 'a {}
-
-// IsCovariant is implemented on the standard library Copy types in macro_impls.rs
-
-// The following impls are safe because there is only one lifetime, 'a, and 'a is covariant
-
-unsafe impl<'a> IsCovariant<'a> for () {}
-
-unsafe impl<'a> IsCovariant<'a> for str {}
-#[cfg(feature = "alloc")]
-unsafe impl<'a> IsCovariant<'a> for String {}
-
-unsafe impl<'a, T: IsCovariant<'a>> IsCovariant<'a> for Option<T> {}
-
-unsafe impl<'a, T1: IsCovariant<'a>, T2: IsCovariant<'a>> IsCovariant<'a> for (T1, T2) {}
-
-unsafe impl<'a, T: IsCovariant<'a>> IsCovariant<'a> for [T] {}
-
-unsafe impl<'a, T: IsCovariant<'a>, const N: usize> IsCovariant<'a> for [T; N] {}
-
-#[cfg(feature = "alloc")]
-unsafe impl<'a, T: IsCovariant<'a> + ?Sized> IsCovariant<'a> for Box<T> {}
-
-#[cfg(feature = "alloc")]
-unsafe impl<'a, T: IsCovariant<'a> + ?Sized> IsCovariant<'a> for Rc<T> {}
-
-// This is safe because T has a covariant lifetime, and Cow's lifetime is also covariant
-#[cfg(feature = "alloc")]
-unsafe impl<'a, T: IsCovariant<'a> + ToOwned + ?Sized> IsCovariant<'a> for Cow<'a, T> where
- <T as ToOwned>::Owned: Sized
-{
-}
-
-// This is safe because T has a covariant lifetime, and the reference lifetime is also covariant
-unsafe impl<'a, T: IsCovariant<'a> + ?Sized> IsCovariant<'a> for &'a T {}
diff --git a/vendor/yoke/src/lib.rs b/vendor/yoke/src/lib.rs
index 666e179e9..1524c067c 100644
--- a/vendor/yoke/src/lib.rs
+++ b/vendor/yoke/src/lib.rs
@@ -48,7 +48,6 @@ extern crate alloc;
pub mod either;
#[cfg(feature = "alloc")]
pub mod erased;
-mod is_covariant;
mod macro_impls;
pub mod trait_hack;
mod yoke;
@@ -59,7 +58,6 @@ mod zero_from;
#[cfg(feature = "derive")]
pub use yoke_derive::Yokeable;
-pub use crate::is_covariant::IsCovariant;
pub use crate::yoke::{CloneableCart, Yoke};
pub use crate::yokeable::Yokeable;
diff --git a/vendor/yoke/src/macro_impls.rs b/vendor/yoke/src/macro_impls.rs
index 664816818..060061b20 100644
--- a/vendor/yoke/src/macro_impls.rs
+++ b/vendor/yoke/src/macro_impls.rs
@@ -6,7 +6,7 @@
// than using pointer casts
#![allow(clippy::transmute_ptr_to_ptr)]
-use crate::{IsCovariant, Yokeable};
+use crate::Yokeable;
use core::{mem, ptr};
macro_rules! copy_yoke_impl {
@@ -38,7 +38,6 @@ macro_rules! impl_copy_type {
type Output = Self;
copy_yoke_impl!();
}
- unsafe impl<'a> IsCovariant<'a> for $ty {}
};
}
diff --git a/vendor/yoke/src/yoke.rs b/vendor/yoke/src/yoke.rs
index c3d8c37d9..7468b4d99 100644
--- a/vendor/yoke/src/yoke.rs
+++ b/vendor/yoke/src/yoke.rs
@@ -6,7 +6,6 @@ use crate::either::EitherCart;
#[cfg(feature = "alloc")]
use crate::erased::{ErasedArcCart, ErasedBoxCart, ErasedRcCart};
use crate::trait_hack::YokeTraitHack;
-use crate::IsCovariant;
use crate::Yokeable;
use core::marker::PhantomData;
use core::ops::Deref;
@@ -48,8 +47,7 @@ use alloc::sync::Arc;
/// into another `Yoke` containing a different type that may contain elements of the original yoked
/// value. See the [`Yoke::map_project()`] docs for more details.
///
-/// In general, `C` is a concrete type, but it is also possible for it to be a trait object;
-/// for more information, see [`IsCovariant`].
+/// In general, `C` is a concrete type, but it is also possible for it to be a trait object.
///
/// # Example
///
@@ -83,7 +81,10 @@ pub struct Yoke<Y: for<'a> Yokeable<'a>, C> {
cart: C,
}
-impl<Y: for<'a> Yokeable<'a>, C: StableDeref> Yoke<Y, C> {
+impl<Y: for<'a> Yokeable<'a>, C: StableDeref> Yoke<Y, C>
+where
+ <C as Deref>::Target: 'static,
+{
/// Construct a [`Yoke`] by yokeing an object to a cart in a closure.
///
/// See also [`Yoke::try_attach_to_cart()`] to return a `Result` from the closure.
@@ -116,7 +117,14 @@ impl<Y: for<'a> Yokeable<'a>, C: StableDeref> Yoke<Y, C> {
/// ```
pub fn attach_to_cart<F>(cart: C, f: F) -> Self
where
+ // safety note: This works by enforcing that the *only* place the return value of F
+ // can borrow from is the cart, since `F` must be valid for all lifetimes `'de`
+ //
+ // The <C as Deref>::Target: 'static on the impl is crucial for safety as well
+ //
+ // See safety docs at the bottom of this file for more information
F: for<'de> FnOnce(&'de <C as Deref>::Target) -> <Y as Yokeable<'de>>::Output,
+ <C as Deref>::Target: 'static,
{
let deserialized = f(cart.deref());
Self {
@@ -220,8 +228,10 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// ```
/// use yoke::Yoke;
///
- /// let local_data = "foo".to_string();
- /// let yoke = Yoke::<&'static str, Box<String>>::attach_to_zero_copy_cart(Box::new(local_data));
+ /// let local_data = "foo".to_owned();
+ /// let yoke = Yoke::<&'static str, Box<String>>::attach_to_zero_copy_cart(
+ /// Box::new(local_data),
+ /// );
/// assert_eq!(*yoke.get(), "foo");
///
/// // Get back the cart
@@ -235,9 +245,11 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// use std::borrow::Cow;
/// use yoke::Yoke;
///
- /// let local_data = "foo".to_string();
+ /// let local_data = "foo".to_owned();
/// let mut yoke =
- /// Yoke::<Cow<'static, str>, Box<String>>::attach_to_zero_copy_cart(Box::new(local_data));
+ /// Yoke::<Cow<'static, str>, Box<String>>::attach_to_zero_copy_cart(
+ /// Box::new(local_data),
+ /// );
/// assert_eq!(yoke.get(), "foo");
///
/// // Override data in the cart
@@ -265,6 +277,9 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// - `f()` must not panic
/// - References from the yokeable `Y` should still be valid for the lifetime of the
/// returned cart type `C`.
+ /// - Lifetimes inside C must not be lengthened, even if they are themselves contravariant.
+ /// I.e., if C contains an `fn(&'a u8)`, it cannot be replaced with `fn(&'static u8),
+ /// even though that is typically safe.
///
/// Typically, this means implementing `f` as something which _wraps_ the inner cart type `C`.
/// `Yoke` only really cares about destructors for its carts so it's fine to erase other
@@ -506,12 +521,9 @@ where
}
}
-// This is safe because Y is 'static and C has a covariant lifetime
-unsafe impl<'b, Y: for<'a> Yokeable<'a>, C: IsCovariant<'b>> IsCovariant<'b> for Yoke<Y, C> {}
-
#[test]
fn test_clone() {
- let local_data = "foo".to_string();
+ let local_data = "foo".to_owned();
let y1 = Yoke::<alloc::borrow::Cow<'static, str>, Rc<String>>::attach_to_zero_copy_cart(
Rc::new(local_data),
);
@@ -575,7 +587,9 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// string_2: &'a str,
/// }
///
- /// fn map_project_string_1(bar: Yoke<Bar<'static>, Rc<[u8]>>) -> Yoke<&'static str, Rc<[u8]>> {
+ /// fn map_project_string_1(
+ /// bar: Yoke<Bar<'static>, Rc<[u8]>>,
+ /// ) -> Yoke<&'static str, Rc<[u8]>> {
/// bar.map_project(|bar, _| bar.string_1)
/// }
///
@@ -651,7 +665,9 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// # use yoke::Yoke;
/// # use std::str::{self, Utf8Error};
/// #
- /// fn slice(y: Yoke<&'static [u8], Rc<[u8]>>) -> Result<Yoke<&'static str, Rc<[u8]>>, Utf8Error> {
+ /// fn slice(
+ /// y: Yoke<&'static [u8], Rc<[u8]>>,
+ /// ) -> Result<Yoke<&'static str, Rc<[u8]>>, Utf8Error> {
/// y.try_map_project(move |bytes, _| str::from_utf8(bytes))
/// }
/// ```
@@ -671,7 +687,9 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// string_2: &'a str,
/// }
///
- /// fn map_project_string_1(bar: Yoke<Bar<'static>, Rc<[u8]>>) -> Result<Yoke<&'static str, Rc<[u8]>>, Utf8Error> {
+ /// fn map_project_string_1(
+ /// bar: Yoke<Bar<'static>, Rc<[u8]>>,
+ /// ) -> Result<Yoke<&'static str, Rc<[u8]>>, Utf8Error> {
/// bar.try_map_project(|bar, _| str::from_utf8(bar.bytes_1))
/// }
///
@@ -861,17 +879,19 @@ impl<Y: for<'a> Yokeable<'a>, C: 'static + Sized> Yoke<Y, Rc<C>> {
/// let buffer1: Rc<String> = Rc::new(" foo bar baz ".into());
/// let buffer2: Box<String> = Box::new(" baz quux ".into());
///
- /// let yoke1 = Yoke::<&'static str, _>::attach_to_cart(buffer1, |rc| rc.trim());
+ /// let yoke1 =
+ /// Yoke::<&'static str, _>::attach_to_cart(buffer1, |rc| rc.trim());
/// let yoke2 = Yoke::<&'static str, _>::attach_to_cart(buffer2, |b| b.trim());
///
/// let erased1: Yoke<_, ErasedRcCart> = yoke1.erase_rc_cart();
/// // Wrap the Box in an Rc to make it compatible
- /// let erased2: Yoke<_, ErasedRcCart> = yoke2.wrap_cart_in_rc().erase_rc_cart();
+ /// let erased2: Yoke<_, ErasedRcCart> =
+ /// yoke2.wrap_cart_in_rc().erase_rc_cart();
///
/// // Now erased1 and erased2 have the same type!
/// ```
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
pub fn erase_rc_cart(self) -> Yoke<Y, ErasedRcCart> {
unsafe {
// safe because the cart is preserved, just
@@ -905,17 +925,19 @@ impl<Y: for<'a> Yokeable<'a>, C: 'static + Sized + Send + Sync> Yoke<Y, Arc<C>>
/// let buffer1: Arc<String> = Arc::new(" foo bar baz ".into());
/// let buffer2: Box<String> = Box::new(" baz quux ".into());
///
- /// let yoke1 = Yoke::<&'static str, _>::attach_to_cart(buffer1, |arc| arc.trim());
+ /// let yoke1 =
+ /// Yoke::<&'static str, _>::attach_to_cart(buffer1, |arc| arc.trim());
/// let yoke2 = Yoke::<&'static str, _>::attach_to_cart(buffer2, |b| b.trim());
///
/// let erased1: Yoke<_, ErasedArcCart> = yoke1.erase_arc_cart();
/// // Wrap the Box in an Rc to make it compatible
- /// let erased2: Yoke<_, ErasedArcCart> = yoke2.wrap_cart_in_arc().erase_arc_cart();
+ /// let erased2: Yoke<_, ErasedArcCart> =
+ /// yoke2.wrap_cart_in_arc().erase_arc_cart();
///
/// // Now erased1 and erased2 have the same type!
/// ```
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
pub fn erase_arc_cart(self) -> Yoke<Y, ErasedArcCart> {
unsafe {
// safe because the cart is preserved, just
@@ -949,17 +971,19 @@ impl<Y: for<'a> Yokeable<'a>, C: 'static + Sized> Yoke<Y, Box<C>> {
/// let buffer1: Rc<String> = Rc::new(" foo bar baz ".into());
/// let buffer2: Box<String> = Box::new(" baz quux ".into());
///
- /// let yoke1 = Yoke::<&'static str, _>::attach_to_cart(buffer1, |rc| rc.trim());
+ /// let yoke1 =
+ /// Yoke::<&'static str, _>::attach_to_cart(buffer1, |rc| rc.trim());
/// let yoke2 = Yoke::<&'static str, _>::attach_to_cart(buffer2, |b| b.trim());
///
/// // Wrap the Rc in an Box to make it compatible
- /// let erased1: Yoke<_, ErasedBoxCart> = yoke1.wrap_cart_in_box().erase_box_cart();
+ /// let erased1: Yoke<_, ErasedBoxCart> =
+ /// yoke1.wrap_cart_in_box().erase_box_cart();
/// let erased2: Yoke<_, ErasedBoxCart> = yoke2.erase_box_cart();
///
/// // Now erased1 and erased2 have the same type!
/// ```
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
pub fn erase_box_cart(self) -> Yoke<Y, ErasedBoxCart> {
unsafe {
// safe because the cart is preserved, just
@@ -974,7 +998,7 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// Helper function allowing one to wrap the cart type `C` in a `Box<T>`.
/// Can be paired with [`Yoke::erase_box_cart()`]
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
#[inline]
pub fn wrap_cart_in_box(self) -> Yoke<Y, Box<C>> {
unsafe {
@@ -986,7 +1010,7 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// Can be paired with [`Yoke::erase_rc_cart()`], or generally used
/// to make the [`Yoke`] cloneable.
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
#[inline]
pub fn wrap_cart_in_rc(self) -> Yoke<Y, Rc<C>> {
unsafe {
@@ -998,7 +1022,7 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// Can be paired with [`Yoke::erase_arc_cart()`], or generally used
/// to make the [`Yoke`] cloneable.
///
- /// Available with the `"alloc"` feature enabled.
+ /// Available with the `"alloc"` Cargo feature enabled.
#[inline]
pub fn wrap_cart_in_arc(self) -> Yoke<Y, Arc<C>> {
unsafe {
@@ -1037,7 +1061,7 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
}
}
-/// Safety docs for project()
+/// # Safety docs for project()
///
/// (Docs are on a private const to allow the use of compile_fail doctests)
///
@@ -1140,3 +1164,151 @@ impl<Y: for<'a> Yokeable<'a>, C> Yoke<Y, C> {
/// the output yokeable can _only_ have borrowed data flow in to it from the input. All paths of unsoundness require the
/// unification of an existential and universal lifetime, which isn't possible.
const _: () = ();
+
+/// # Safety docs for attach_to_cart()'s signature
+///
+/// The `attach_to_cart()` family of methods get by by using the following bound:
+///
+/// ```rust,ignore
+/// F: for<'de> FnOnce(&'de <C as Deref>::Target) -> <Y as Yokeable<'de>>::Output,
+/// C::Target: 'static
+/// ```
+///
+/// to enforce that the yoking closure produces a yokeable that is *only* allowed to borrow from the cart.
+/// A way to be sure of this is as follows: imagine if `F` *did* borrow data of lifetime `'a` and stuff it in
+/// its output. Then that lifetime `'a` would have to live at least as long as `'de` *for all `'de`*.
+/// The only lifetime that satisfies that is `'static` (since at least one of the potential `'de`s is `'static`),
+/// and we're fine with that.
+///
+/// ## Implied bounds and variance
+///
+/// The `C::Target: 'static` bound is tricky, however. Let's imagine a situation where we *didn't* have that bound.
+///
+/// One thing to remember is that we are okay with the cart itself borrowing from places,
+/// e.g. `&[u8]` is a valid cart, as is `Box<&[u8]>`. `C` is not `'static`.
+///
+/// (I'm going to use `CT` in prose to refer to `C::Target` here, since almost everything here has to do
+/// with C::Target and not C itself.)
+///
+/// Unfortunately, there's a sneaky additional bound inside `F`. The signature of `F` is *actually*
+///
+/// ```rust,ignore
+/// F: for<'de> where<C::Target: 'de> FnOnce(&'de C::Target) -> <Y as Yokeable<'de>>::Output
+/// ```
+///
+/// using made-up "where clause inside HRTB" syntax to represent a type that can be represented inside the compiler
+/// and type system but not in Rust code. The `CT: 'de` bond comes from the `&'de C::Target`: any time you
+/// write `&'a T`, an implied bound of `T: 'a` materializes and is stored alongside it, since references cannot refer
+/// to data that itself refers to data of shorter lifetimes. If a reference is valid, its referent must be valid for
+/// the duration of the reference's lifetime, so every reference *inside* its referent must also be valid, giving us `T: 'a`.
+/// This kind of constraint is often called a "well formedness" constraint: `&'a T` is not "well formed" without that
+/// bound, and rustc is being helpful by giving it to us for free.
+///
+/// Unfortunately, this messes with our universal quantification. The `for<'de>` is no longer "For all lifetimes `'de`",
+/// it is "for all lifetimes `'de` *where `CT: 'de`*". And if `CT` borrows from somewhere (with lifetime `'ct`), then we get a
+/// `'ct: 'de` bound, and `'de` candidates that live longer than `'ct` won't actually be considered.
+/// The neat little logic at the beginning stops working.
+///
+/// `attach_to_cart()` will instead enforce that the produced yokeable *either* borrows from the cart (fine), or from
+/// data that has a lifetime that is at least `'ct`. Which means that `attach_to_cart()` will allow us to borrow locals
+/// provided they live at least as long as `'ct`.
+///
+/// Is this a problem?
+///
+/// This is totally fine if CT's lifetime is covariant: if C is something like `Box<&'ct [u8]>`, even if our
+/// yoked object borrows from locals outliving `'ct`, our Yoke can't outlive that
+/// lifetime `'ct` anyway (since it's a part of the cart type), so we're fine.
+///
+/// However it's completely broken for contravariant carts (e.g. `Box<fn(&'ct u8)>`). In that case
+/// we still get `'ct: 'de`, and we still end up being able to
+/// borrow from locals that outlive `'ct`. However, our Yoke _can_ outlive
+/// that lifetime, because Yoke shares its variance over `'ct`
+/// with the cart type, and the cart type is contravariant over `'ct`.
+/// So the Yoke can be upcast to having a longer lifetime than `'ct`, and *that* Yoke
+/// can outlive `'ct`.
+///
+/// We fix this by forcing `C::Target: 'static` in `attach_to_cart()`, which would make it work
+/// for fewer types, but would also allow Yoke to continue to be covariant over cart lifetimes if necessary.
+///
+/// An alternate fix would be to not allowing yoke to ever be upcast over lifetimes contained in the cart
+/// by forcing them to be invariant. This is a bit more restrictive and affects *all* `Yoke` users, not just
+/// those using `attach_to_cart()`.
+///
+/// See https://github.com/unicode-org/icu4x/issues/2926
+/// See also https://github.com/rust-lang/rust/issues/106431 for potentially fixing this upstream by
+/// changing how the bound works.
+///
+/// # Tests
+///
+/// Here's a broken `attach_to_cart()` that attempts to borrow from a local:
+///
+/// ```rust,compile_fail
+/// use yoke::{Yoke, Yokeable};
+///
+/// let cart = vec![1, 2, 3, 4].into_boxed_slice();
+/// let local = vec![4, 5, 6, 7];
+/// let yoke: Yoke<&[u8], Box<[u8]>> = Yoke::attach_to_cart(cart, |_| &*local);
+/// ```
+///
+/// Fails as expected.
+///
+/// And here's a working one with a local borrowed cart that does not do any sneaky borrows whilst attaching.
+///
+/// ```rust
+/// use yoke::{Yoke, Yokeable};
+///
+/// let cart = vec![1, 2, 3, 4].into_boxed_slice();
+/// let local = vec![4, 5, 6, 7];
+/// let yoke: Yoke<&[u8], &[u8]> = Yoke::attach_to_cart(&cart, |c| &*c);
+/// ```
+///
+/// Here's an `attach_to_cart()` that attempts to borrow from a longer-lived local due to
+/// the cart being covariant. It fails, but would not if the alternate fix of forcing Yoke to be invariant
+/// were implemented. It is technically a safe operation:
+///
+/// ```rust,compile_fail
+/// use yoke::{Yoke, Yokeable};
+/// // longer lived
+/// let local = vec![4, 5, 6, 7];
+///
+/// let backing = vec![1, 2, 3, 4];
+/// let cart = Box::new(&*backing);
+///
+/// let yoke: Yoke<&[u8], Box<&[u8]>> = Yoke::attach_to_cart(cart, |_| &*local);
+/// println!("{:?}", yoke.get());
+/// ```
+///
+/// Finally, here's an `attach_to_cart()` that attempts to borrow from a longer lived local
+/// in the case of a contravariant lifetime. It does not compile, but in and of itself is not dangerous:
+///
+/// ```rust,compile_fail
+/// use yoke::Yoke;
+///
+/// type Contra<'a> = fn(&'a ());
+///
+/// let local = String::from("Hello World!");
+/// let yoke: Yoke<&'static str, Box<Contra<'_>>> = Yoke::attach_to_cart(Box::new((|_| {}) as _), |_| &local[..]);
+/// println!("{:?}", yoke.get());
+/// ```
+///
+/// It is dangerous if allowed to transform (testcase from #2926)
+///
+/// ```rust,compile_fail
+/// use yoke::Yoke;
+///
+/// type Contra<'a> = fn(&'a ());
+///
+///
+/// let local = String::from("Hello World!");
+/// let yoke: Yoke<&'static str, Box<Contra<'_>>> = Yoke::attach_to_cart(Box::new((|_| {}) as _), |_| &local[..]);
+/// println!("{:?}", yoke.get());
+/// let yoke_longer: Yoke<&'static str, Box<Contra<'static>>> = yoke;
+/// let leaked: &'static Yoke<&'static str, Box<Contra<'static>>> = Box::leak(Box::new(yoke_longer));
+/// let reference: &'static str = leaked.get();
+///
+/// println!("pre-drop: {reference}");
+/// drop(local);
+/// println!("post-drop: {reference}");
+///
+/// ```
+const _: () = ();
diff --git a/vendor/yoke/src/zero_from.rs b/vendor/yoke/src/zero_from.rs
index 679a28d59..d876d9c06 100644
--- a/vendor/yoke/src/zero_from.rs
+++ b/vendor/yoke/src/zero_from.rs
@@ -26,6 +26,7 @@ where
Y: for<'a> Yokeable<'a>,
for<'a> YokeTraitHack<<Y as Yokeable<'a>>::Output>: ZeroFrom<'a, <C as Deref>::Target>,
C: StableDeref + Deref,
+ <C as Deref>::Target: 'static,
{
/// Construct a [`Yoke`]`<Y, C>` from a cart implementing `StableDeref` by zero-copy cloning
/// the cart to `Y` and then yokeing that object to the cart.
@@ -42,7 +43,9 @@ where
/// use std::borrow::Cow;
/// use yoke::Yoke;
///
- /// let yoke = Yoke::<Cow<'static, str>, String>::attach_to_zero_copy_cart("demo".to_string());
+ /// let yoke = Yoke::<Cow<'static, str>, String>::attach_to_zero_copy_cart(
+ /// "demo".to_owned(),
+ /// );
///
/// assert_eq!("demo", yoke.get());
/// ```
diff --git a/vendor/zerovec-derive/.cargo-checksum.json b/vendor/zerovec-derive/.cargo-checksum.json
index 2b18eb43b..c8b3d36ae 100644
--- a/vendor/zerovec-derive/.cargo-checksum.json
+++ b/vendor/zerovec-derive/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"45317edd3e500bc9c1b4a5bdbdd818d9675d354f0a635c2b515206a98ca671c0","Cargo.toml":"45b4d104f456e16a84efc97ab6ebdacdc3dac27438d1aba3468e9f390daeab0b","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"77ca5a17524e17ae9098ab00f91cb67996f5a298468d68a15d23a09bde940b9a","examples/derives.rs":"2541311f2b532301ab8600927fb2d12a842166ce98d57879997df12dcb928dd2","examples/make.rs":"58373c89f49e5c7b343d3ab0cb746a062ea1f6aa8839707751bcee484126e77b","examples/make_var.rs":"8faac589bdfa89bb3e5b97553d2fad1aeebf914586731d26433b3dce966d1523","src/lib.rs":"4f4f2a1c0f0204ac782d741f9a188b474b9c0e23a5e6e4619b9a3e926c1e08e5","src/make_ule.rs":"892a6332a6ca29c51899d00e3581a240306e54842cd21ef26d2d496065278144","src/make_varule.rs":"adb44d66eab1cee9f785d2088115a49a8ae47c16d171e606bbb2bbc626906cd4","src/ule.rs":"1f0a46ff39e43bb19deb9e9a06289350096d4e6ca3f33cb76fec3c84c4a439d8","src/utils.rs":"54cd3a6b39c74dc6e12a4019c14ae2f5dd197e8a678dfbf3158dab61d496deac","src/varule.rs":"b0642df70023b2f04aca692010f6b2a81fd3f3db8612dbae072a200eb04f0913"},"package":"490e5f878c2856225e884c35927e7ea6db3c24cdb7229b72542c7526ad7ed49e"} \ No newline at end of file
+{"files":{"Cargo.lock":"2d7d59a1d4b4efeb00fbab029b36910c0caa82aba59c7490a027c6c568e0a8f6","Cargo.toml":"b80ff2c76944c3d016cb05b21c9c20af190e3492c1961a7a0d69b3ba5b4ae868","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"77ca5a17524e17ae9098ab00f91cb67996f5a298468d68a15d23a09bde940b9a","examples/derives.rs":"2541311f2b532301ab8600927fb2d12a842166ce98d57879997df12dcb928dd2","examples/make.rs":"58373c89f49e5c7b343d3ab0cb746a062ea1f6aa8839707751bcee484126e77b","examples/make_var.rs":"8faac589bdfa89bb3e5b97553d2fad1aeebf914586731d26433b3dce966d1523","src/lib.rs":"4f4f2a1c0f0204ac782d741f9a188b474b9c0e23a5e6e4619b9a3e926c1e08e5","src/make_ule.rs":"892a6332a6ca29c51899d00e3581a240306e54842cd21ef26d2d496065278144","src/make_varule.rs":"e5e037b18f9533e18c8078c532198995437dd08774f773ada01d901cfdef9bac","src/ule.rs":"1f0a46ff39e43bb19deb9e9a06289350096d4e6ca3f33cb76fec3c84c4a439d8","src/utils.rs":"54cd3a6b39c74dc6e12a4019c14ae2f5dd197e8a678dfbf3158dab61d496deac","src/varule.rs":"b0642df70023b2f04aca692010f6b2a81fd3f3db8612dbae072a200eb04f0913"},"package":"2154cb6e2a748163354165e22c6a555effb09ca2d16334767bf66bb404f2206e"} \ No newline at end of file
diff --git a/vendor/zerovec-derive/Cargo.lock b/vendor/zerovec-derive/Cargo.lock
index 06c4b36b2..a5a1a0bf0 100644
--- a/vendor/zerovec-derive/Cargo.lock
+++ b/vendor/zerovec-derive/Cargo.lock
@@ -13,48 +13,48 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -63,9 +63,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
"itoa",
"ryu",
@@ -74,9 +74,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -97,9 +97,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-xid"
@@ -108,14 +108,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
-name = "zerofrom"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "79e9355fccf72b04b7deaa99ce7a0f6630530acf34045391b74460fcd714de54"
-
-[[package]]
name = "zerovec-derive"
-version = "0.9.0"
+version = "0.9.3"
dependencies = [
"bincode",
"proc-macro2",
@@ -124,5 +118,4 @@ dependencies = [
"serde_json",
"syn",
"synstructure",
- "zerofrom",
]
diff --git a/vendor/zerovec-derive/Cargo.toml b/vendor/zerovec-derive/Cargo.toml
index fea429984..d9c2a0592 100644
--- a/vendor/zerovec-derive/Cargo.toml
+++ b/vendor/zerovec-derive/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "zerovec-derive"
-version = "0.9.0"
+version = "0.9.3"
authors = ["Manish Goregaokar <manishsmail@gmail.com>"]
description = "Custom derive for the zerovec crate"
keywords = [
@@ -32,6 +32,9 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[lib]
path = "src/lib.rs"
proc_macro = true
@@ -61,6 +64,3 @@ features = ["derive"]
[dev-dependencies.serde_json]
version = "1.0"
-
-[dev-dependencies.zerofrom]
-version = "0.1"
diff --git a/vendor/zerovec-derive/src/make_varule.rs b/vendor/zerovec-derive/src/make_varule.rs
index 7dda4932e..3220b0f8a 100644
--- a/vendor/zerovec-derive/src/make_varule.rs
+++ b/vendor/zerovec-derive/src/make_varule.rs
@@ -115,7 +115,6 @@ pub fn make_varule_impl(attr: AttributeArgs, mut input: DeriveInput) -> TokenStr
let doc = format!("[`VarULE`](zerovec::ule::VarULE) type for {name}");
let varule_struct: DeriveInput = parse_quote!(
#[repr(#repr_attr)]
- #[derive(PartialEq, Eq)]
#[doc = #doc]
#vis struct #ule_name #field_inits #semi
);
@@ -142,6 +141,19 @@ pub fn make_varule_impl(attr: AttributeArgs, mut input: DeriveInput) -> TokenStr
input.span(),
);
+ let eq_impl = quote!(
+ impl core::cmp::PartialEq for #ule_name {
+ fn eq(&self, other: &Self) -> bool {
+ // The VarULE invariants allow us to assume that equality is byte equality
+ // in non-safety-critical contexts
+ <Self as zerovec::ule::VarULE>::as_byte_slice(&self)
+ == <Self as zerovec::ule::VarULE>::as_byte_slice(&other)
+ }
+ }
+
+ impl core::cmp::Eq for #ule_name {}
+ );
+
let zerofrom_fq_path =
quote!(<#name as zerovec::__zerovec_internal_reexport::ZeroFrom<#ule_name>>);
@@ -234,6 +246,8 @@ pub fn make_varule_impl(attr: AttributeArgs, mut input: DeriveInput) -> TokenStr
#maybe_ord_impls
+ #eq_impl
+
#zmkv
#maybe_ser
diff --git a/vendor/zerovec/.cargo-checksum.json b/vendor/zerovec/.cargo-checksum.json
index 4e45eb54b..fbf1303f2 100644
--- a/vendor/zerovec/.cargo-checksum.json
+++ b/vendor/zerovec/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"4a4e071e00c3386a3870ffdc70c8ff156025f1b25b01f136bbc98e63cffe9525","Cargo.toml":"25e11edaad953215cca394ea63f181dbe0ce0105bd0c1b2439830f613c2b4e2a","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"95660d3a679736a3308aaf2f43728c3052ed9dc93fdec63de163ca90de531a33","benches/testdata/large_hashmap.postcard":"f327c7c25ee0ac05e0c2052ee38b76c77968240e084477b0f703b5a0bc6f4ccf","benches/testdata/large_zeromap.postcard":"d0f15324e7e3bf8318e87fefad9249c0bda8e8d555c220f81467e0a90270059a","benches/vzv.rs":"eb816f36a3fa29dc94e44d2f78bdb62d07fdf58e72054e24645d3da5091145fa","benches/zeromap.rs":"4833d3bddfc80c20748ca8513059f2ed8029fb9dc2267bf332a0659e895d77fa","benches/zerovec.rs":"e32e380e653c62b1f344cb7be917016b739b2c4e722d62193aa3fa45908e77c0","benches/zerovec_iai.rs":"13028e4f1d85bad3ee5e5b783360b7ea247dd5de233d104b91ce46d19bf5765a","benches/zerovec_serde.rs":"7ed92aaaf5981755b4f02f366e1a0434b49a4a6ffa0945c96663626d787db070","examples/zv_serde.rs":"d7fd49aba0400636c1af16ac1cbf28f8d74c61a94ee7b318dcaf23213acec1e8","src/error.rs":"19ca9ab31b2b16cb0f604ed5f4a43c18796672bfd498af334b47aad76059b143","src/flexzerovec/databake.rs":"a32503b5aa4c3e4d5627e91ab0ec0ffbc40983dab93ba9062e4672ee1e7d0335","src/flexzerovec/mod.rs":"647678116fcf0321463b3451462920f7c517f4cf6c8a57f4f7b13f5248c8b9b0","src/flexzerovec/owned.rs":"dc6ea4cdc1b147f9e64f89281a0315fa729d134617a694646463013314e3ab46","src/flexzerovec/serde.rs":"641821a6753477498d28126bdee10a0a625deb92b39ec8c2d3c2d432d9cb9512","src/flexzerovec/slice.rs":"91dd96c895e5da9d88cff9905deece3baef775282602e9a6f0d368ac6139e2cb","src/flexzerovec/vec.rs":"103b72950ae6be8d1fd4c8db07460c2b6203c87927b722807d329177e8d41d48","src/lib.rs":"c4841809466cff4fc33345638aa1c41500b5f76ad6c93990600176b83f75693d","src/map/borrowed.rs":"56a5d2a305227b2e748b2c7172561ffc7ca68302039b21947cc68cf7c6e7c02e","src/map/databake.rs":"29afbc7882f30ce63596e2cb98f589f24bd76919d7062a18f02a5df630f31dce","src/map/kv.rs":"03f3745df8d61ff399b66d32ba1ac9a7ee298ca00dfb463c3ea8d81c746e9d41","src/map/map.rs":"24094239ef0281241c080f6d80375eea8aacbb3ac303a1bd76d7845a8bd70419","src/map/mod.rs":"4961ad14522fcef16b1aad804d9559c0a5eb2582fdd163bf76f9629cb765b6c4","src/map/serde.rs":"e5ee812805788672c40bc912ed70b241328a4098ecc1acea332b92456d082f9b","src/map/serde_helpers.rs":"72787005972b93e49b9dc17aa47d30699364e6da9dc95aadb820ce58e4bf5c54","src/map/vecs.rs":"86c9b04a92b7a918af3653e05fbf622fbb1b52b0a4307a5dadef070882778233","src/map2d/borrowed.rs":"cfbed2709cf0dc508c2da1f6a9c49b6e22deacd19679e296af4e44da3052fe07","src/map2d/cursor.rs":"c19b3d0b13b3cc5d0b3f2fbc246ed511f8ec9ec77a83a2be1aabea0d09c8d3a7","src/map2d/databake.rs":"c9451511566f9589236789c8dc5fbb32112a8324bd9b4bae75cd3be05b42962c","src/map2d/map.rs":"663dbb62f3fe003d5d5587955f693b9e3ee55ac1d366487df45bd302aa5855e7","src/map2d/mod.rs":"80beae7a263f1fe39c9a06d287c9150480fe3ed43397c2a7475a50ee2b2fd37f","src/map2d/serde.rs":"ff25b5b5f40b8bc438f18a96d646c22b0d670a89a3fe822a38b573ce7dc7fc32","src/samples.rs":"a3a2571da3a911f20bad9179441ba2676f4e5cf1ca2408a4dc6111cadbcef6cd","src/ule/chars.rs":"f8b32dd407dcc05b7d416c361d7a7c2166708d1edaa0559c829bd449dcb0a7aa","src/ule/custom.rs":"49604e3c1d67836a7e783b91ef2bfeaddfbe4758d6c5614ce3cd1ae60cfdb5ea","src/ule/encode.rs":"91a11ff2f2ddf3dffc1411343e286249e63b0cfa076d6c41f83ea952c527b014","src/ule/mod.rs":"5550fd0e3c59562bc1e6b417a5c61882d776b906f83befd78608f448323b930c","src/ule/multi.rs":"ed470e4b62c6233ee81b33f473eccf1e8db75db52275ec371cdc2b477478d959","src/ule/niche.rs":"1c64b5862cd7f1cd384160012a39414a335ea1f36119bb5fc56ac7cb5e050d8b","src/ule/option.rs":"0b1ba426e665f7a7fd67e736bf8552a86859b30dc83c35751156f18133e1cf99","src/ule/plain.rs":"acd4507776963459228b5425866e92e7363124f48f96abd28ba99a7c6245709f","src/ule/slices.rs":"a7638535898b39be9f489f3ff9a2140b5334113c2ddc48c4fae2bc8b86efbd14","src/ule/tuple.rs":"498f6863b1af1e6b2c655e77df53cf9818613c60189b2ce1369850e388d09e3c","src/ule/unvalidated.rs":"26b937c6b44afbd94f0e2397e29db820c0eaa7c116c427304741957e825d5893","src/varzerovec/components.rs":"6810dc5cf007572a4e22831f55822f79f720e29309c33b1c546057fd5dedfb61","src/varzerovec/databake.rs":"819c6e511e4256cb0c1e3fbf2bad521472f133355ce0cc12e46c97ec2f71dbb2","src/varzerovec/mod.rs":"c7aaaf571f7406e666d877920966a2e39373b5cf6a038cb31dbe4192b2e75d4d","src/varzerovec/owned.rs":"0aac2dd14fde57c22f4b5a943756ee90001d824d04682336782700c1f7efd272","src/varzerovec/serde.rs":"e008df1773393340abdf1c1cc80ab9035881b5f5de1e48bf4edde0963198bde7","src/varzerovec/slice.rs":"6e03824809aa1d5dcae20844f7d40c2521a824774185fc82b23ed9327b0a6468","src/varzerovec/vec.rs":"49e66dc827ff8fad20dbe86a92239f2131d16c41d7ca6e56f636325a3ff576c6","src/yoke_impls.rs":"84ef42aa194dd56502dba63283652b3783a34b3069211920e5587256c62c4abe","src/zerofrom_impls.rs":"a3e02b3473cf6deba8c0e9dd815ad7db7ced75e7ad3b353597faea189b63c1d5","src/zerovec/databake.rs":"8d1f857fa89b8b64c7a5e2b2ba84d06b266b3cf6c716bc383c9004a6602c5d43","src/zerovec/mod.rs":"3c52bd0f02a92a2c1e8226b26db82c72bc5fa66ded67e1cd6fc05ac3caa1913a","src/zerovec/serde.rs":"873e6e07e4efda0643d77c920067c4b5a30e188da5a176d623b785bbd2980bab","src/zerovec/slice.rs":"e969657302ec0ab822bf6abbb46cce5a4e53ac49e4f7f1db5ed10962926c7adf"},"package":"b9d919a74c17749ccb17beaf6405562e413cd94e98ba52ca1e64bbe7eefbd8b8"} \ No newline at end of file
+{"files":{"Cargo.lock":"ab1a55fa6a0e30a1509d639a0a0c77f500470ddaf6b34a1a9e83ecd7931b9648","Cargo.toml":"977eb85b9a33c508f31264b230c72c66cbf1fd11de723ac79153c1b4f3080b58","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"6f1174cdbf3467cb6f02e6f715bf105b833cea392995c18ed28cda871c4e15fa","benches/testdata/large_hashmap.postcard":"f327c7c25ee0ac05e0c2052ee38b76c77968240e084477b0f703b5a0bc6f4ccf","benches/testdata/large_zeromap.postcard":"d0f15324e7e3bf8318e87fefad9249c0bda8e8d555c220f81467e0a90270059a","benches/vzv.rs":"2b0de82b4a40d45151e7faa2d18cfcb65351059f3f391dd64f031c565212c561","benches/zeromap.rs":"eb001722b4158a405610d2cb8aa4ccbb53d5e29ea0898639912cf7d55598de6c","benches/zerovec.rs":"e32e380e653c62b1f344cb7be917016b739b2c4e722d62193aa3fa45908e77c0","benches/zerovec_iai.rs":"13028e4f1d85bad3ee5e5b783360b7ea247dd5de233d104b91ce46d19bf5765a","benches/zerovec_serde.rs":"7ed92aaaf5981755b4f02f366e1a0434b49a4a6ffa0945c96663626d787db070","examples/zv_serde.rs":"d7fd49aba0400636c1af16ac1cbf28f8d74c61a94ee7b318dcaf23213acec1e8","src/error.rs":"19ca9ab31b2b16cb0f604ed5f4a43c18796672bfd498af334b47aad76059b143","src/flexzerovec/databake.rs":"a32503b5aa4c3e4d5627e91ab0ec0ffbc40983dab93ba9062e4672ee1e7d0335","src/flexzerovec/mod.rs":"647678116fcf0321463b3451462920f7c517f4cf6c8a57f4f7b13f5248c8b9b0","src/flexzerovec/owned.rs":"dc6ea4cdc1b147f9e64f89281a0315fa729d134617a694646463013314e3ab46","src/flexzerovec/serde.rs":"a0c7a8fcf5ef06f5ea44cbfb89cac47f173c7150082d0cd745a457de3d375b4f","src/flexzerovec/slice.rs":"6c115b32011d127bb59f1818e7d1061a0786a8b2d4d7d174f43acf02189d73eb","src/flexzerovec/vec.rs":"103b72950ae6be8d1fd4c8db07460c2b6203c87927b722807d329177e8d41d48","src/lib.rs":"0e91c4fb200f27665b509796fd4d0672578d140502721a645fe068f23d6aa2bd","src/map/borrowed.rs":"abd5886b384075780498ddc39300eeeda0bc520bdee1cea43096f50a9ac42f07","src/map/databake.rs":"29afbc7882f30ce63596e2cb98f589f24bd76919d7062a18f02a5df630f31dce","src/map/kv.rs":"03f3745df8d61ff399b66d32ba1ac9a7ee298ca00dfb463c3ea8d81c746e9d41","src/map/map.rs":"d0298742d6f46ca39f3ebc9f0761ab2997523da40df5c9ea9515a8584b266c50","src/map/mod.rs":"4961ad14522fcef16b1aad804d9559c0a5eb2582fdd163bf76f9629cb765b6c4","src/map/serde.rs":"3bed09076a45774c65a8116dd21e72413f1657ed569fe32c4abb48e979fd3144","src/map/serde_helpers.rs":"72787005972b93e49b9dc17aa47d30699364e6da9dc95aadb820ce58e4bf5c54","src/map/vecs.rs":"86c9b04a92b7a918af3653e05fbf622fbb1b52b0a4307a5dadef070882778233","src/map2d/borrowed.rs":"f2a1f927c6d0942fd8ad183de24a32a2b1450f2f747cb328862163f6d9505ebb","src/map2d/cursor.rs":"e4447e7b5869e895b852124443151918bff5fe8cfb5207acfe5ff7b57b56bfdf","src/map2d/databake.rs":"c9451511566f9589236789c8dc5fbb32112a8324bd9b4bae75cd3be05b42962c","src/map2d/map.rs":"358408292de4c06e82f6eebf2f014d3737241f383d8fc3c946bfb44da1071b3b","src/map2d/mod.rs":"80beae7a263f1fe39c9a06d287c9150480fe3ed43397c2a7475a50ee2b2fd37f","src/map2d/serde.rs":"c4f32f25bc6e850eccd677e5b0cf9cd796df71855b80459893c0323ad7901bd5","src/samples.rs":"a3a2571da3a911f20bad9179441ba2676f4e5cf1ca2408a4dc6111cadbcef6cd","src/ule/chars.rs":"f8b32dd407dcc05b7d416c361d7a7c2166708d1edaa0559c829bd449dcb0a7aa","src/ule/custom.rs":"49604e3c1d67836a7e783b91ef2bfeaddfbe4758d6c5614ce3cd1ae60cfdb5ea","src/ule/encode.rs":"91a11ff2f2ddf3dffc1411343e286249e63b0cfa076d6c41f83ea952c527b014","src/ule/mod.rs":"4a71652bb0d771aee247518a1c50e72bdea12fd884feaf7f55385808047069ef","src/ule/multi.rs":"ed470e4b62c6233ee81b33f473eccf1e8db75db52275ec371cdc2b477478d959","src/ule/niche.rs":"1c64b5862cd7f1cd384160012a39414a335ea1f36119bb5fc56ac7cb5e050d8b","src/ule/option.rs":"32979913da36452a27951e3f073494006c26f570af5733a1b0ce5a75427300b4","src/ule/plain.rs":"27f47ce32cbcb62ef088dbd60dbb9d79c2d54e1a3ff024cdc863362542b74336","src/ule/slices.rs":"a7638535898b39be9f489f3ff9a2140b5334113c2ddc48c4fae2bc8b86efbd14","src/ule/tuple.rs":"498f6863b1af1e6b2c655e77df53cf9818613c60189b2ce1369850e388d09e3c","src/ule/unvalidated.rs":"a92f2e94ba8bb50b6f63914549501653aa0622b6d6810824c271afe37edc367a","src/varzerovec/components.rs":"6810dc5cf007572a4e22831f55822f79f720e29309c33b1c546057fd5dedfb61","src/varzerovec/databake.rs":"819c6e511e4256cb0c1e3fbf2bad521472f133355ce0cc12e46c97ec2f71dbb2","src/varzerovec/mod.rs":"c7aaaf571f7406e666d877920966a2e39373b5cf6a038cb31dbe4192b2e75d4d","src/varzerovec/owned.rs":"0aac2dd14fde57c22f4b5a943756ee90001d824d04682336782700c1f7efd272","src/varzerovec/serde.rs":"efb06a8b139dffc6d0534f7e0de4dc39b82ed224481f25bc5dea14d0159b5208","src/varzerovec/slice.rs":"4524631711cfa9ac530f116fc2801526d1d14e7a21ead10aa2bfb3f4de298cac","src/varzerovec/vec.rs":"6557bc9a146cc6563a17a62c3e8d38b68ba1451ff99d5741d0bbdb5b67703573","src/yoke_impls.rs":"3d2486b99eda20cc2c9dd6360d5589f95e6b2bfce847c3637b91134836f73566","src/zerofrom_impls.rs":"a3e02b3473cf6deba8c0e9dd815ad7db7ced75e7ad3b353597faea189b63c1d5","src/zerovec/databake.rs":"8d1f857fa89b8b64c7a5e2b2ba84d06b266b3cf6c716bc383c9004a6602c5d43","src/zerovec/mod.rs":"777a70ac94f0c80869f4576d8aaad5c96083a94eb3c3db86ebc98b4805b4a0ff","src/zerovec/serde.rs":"3a088c88732ffc0814a09777f89a83857e948c1eff6dfc2d1eb6bad5ece3976a","src/zerovec/slice.rs":"49c29036ddc9efee1fc327152a78e9d20f8f8174a71468dfe6d68a6437882419"},"package":"154df60c74c4a844bc04a53cef4fc18a909d3ea07e19f5225eaba86209da3aa6"} \ No newline at end of file
diff --git a/vendor/zerovec/Cargo.lock b/vendor/zerovec/Cargo.lock
index 59e492118..f722be128 100644
--- a/vendor/zerovec/Cargo.lock
+++ b/vendor/zerovec/Cargo.lock
@@ -3,19 +3,10 @@
version = 3
[[package]]
-name = "aho-corasick"
-version = "0.7.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
-dependencies = [
- "memchr",
-]
-
-[[package]]
name = "atomic-polyfill"
-version = "0.1.10"
+version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c041a8d9751a520ee19656232a18971f18946a7900f1520ee4400002244dd89"
+checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28"
dependencies = [
"critical-section",
]
@@ -26,7 +17,7 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
"libc",
"winapi",
]
@@ -38,21 +29,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
-name = "bare-metal"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5deb64efa5bd81e31fcd1938615a6d98c82eafcbcd787162b6f63b91d6bac5b3"
-dependencies = [
- "rustc_version 0.2.3",
-]
-
-[[package]]
-name = "bare-metal"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8fe8f5a8a398345e52358e18ff07cc17a568fbca5c6f73873d3a62056309603"
-
-[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -62,18 +38,6 @@ dependencies = [
]
[[package]]
-name = "bit_field"
-version = "0.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcb6dd1c2376d2e096796e234a70e17e94cc2d5d54ff8ce42b28cef1d0d359a4"
-
-[[package]]
-name = "bitfield"
-version = "0.13.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46afbd2983a5d5a7bd740ccb198caf5b82f45c40c09c0eed36052d91cb92e719"
-
-[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -93,9 +57,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.11.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "byteorder"
@@ -133,18 +97,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]]
-name = "cortex-m"
-version = "0.7.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70858629a458fdfd39f9675c4dc309411f2a3f83bede76988d81bf1a0ecee9e0"
-dependencies = [
- "bare-metal 0.2.5",
- "bitfield",
- "embedded-hal",
- "volatile-register",
-]
-
-[[package]]
name = "criterion"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -182,15 +134,9 @@ dependencies = [
[[package]]
name = "critical-section"
-version = "0.2.7"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95da181745b56d4bd339530ec393508910c909c784e8962d15d722bacf0bcbcd"
-dependencies = [
- "bare-metal 1.0.0",
- "cfg-if",
- "cortex-m",
- "riscv",
-]
+checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52"
[[package]]
name = "crossbeam-channel"
@@ -215,26 +161,24 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.10"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
- "once_cell",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.11"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "once_cell",
]
[[package]]
@@ -261,9 +205,9 @@ dependencies = [
[[package]]
name = "databake"
-version = "0.1.2"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c87777d6d7bde863ba217aa87521dc857239de1f36d66aac46fd173fb0495858"
+checksum = "df626c4717e455cd7a70a82c4358630554a07e4341f86dd095c625f1474a2857"
dependencies = [
"databake-derive",
"proc-macro2",
@@ -273,9 +217,9 @@ dependencies = [
[[package]]
name = "databake-derive"
-version = "0.1.1"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "905c7a060fc0c84c0452d97473b1177dd7a5cbc7670cfbae4a7fe22e42f6432e"
+checksum = "be51a53c468489ae1ef0efa9f6b10706f426c0dde06d66122ffef1f0c51e87dc"
dependencies = [
"proc-macro2",
"quote",
@@ -290,20 +234,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
[[package]]
-name = "embedded-hal"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "35949884794ad573cf46071e41c9b60efb0cb311e3ca01f7af807af1debc66ff"
-dependencies = [
- "nb 0.1.3",
- "void",
-]
-
-[[package]]
name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"js-sys",
@@ -335,7 +269,7 @@ checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743"
dependencies = [
"atomic-polyfill",
"hash32",
- "rustc_version 0.4.0",
+ "rustc_version",
"serde",
"spin",
"stable_deref_trait",
@@ -351,16 +285,19 @@ dependencies = [
]
[[package]]
-name = "iai"
-version = "0.1.1"
+name = "hermit-abi"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71a816c97c42258aa5834d07590b718b4c9a598944cd39a52dc25b351185d678"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
[[package]]
-name = "icu_benchmark_macros"
-version = "0.7.0"
+name = "iai"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c867656f2d9c90b13709ac88e710a9d6afe33998c1dfa22384bab8804e8b3d4"
+checksum = "71a816c97c42258aa5834d07590b718b4c9a598944cd39a52dc25b351185d678"
[[package]]
name = "itertools"
@@ -379,9 +316,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "js-sys"
@@ -400,15 +337,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.133"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "libm"
-version = "0.2.5"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565"
+checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
[[package]]
name = "lock_api"
@@ -437,29 +374,14 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
-version = "0.6.5"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
-name = "nb"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "801d31da0513b6ec5214e9bf433a77966320625a37860f910be265be6e18d06f"
-dependencies = [
- "nb 1.0.0",
-]
-
-[[package]]
-name = "nb"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "546c37ac5d9e56f55e73b677106873d9d9f5190605e41a856503623648488cae"
-
-[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -471,19 +393,19 @@ dependencies = [
[[package]]
name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
- "hermit-abi",
+ "hermit-abi 0.2.6",
"libc",
]
[[package]]
name = "once_cell"
-version = "1.15.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
[[package]]
name = "oorandom"
@@ -532,24 +454,24 @@ dependencies = [
[[package]]
name = "ppv-lite86"
-version = "0.2.16"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
-version = "1.0.44"
+version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.21"
+version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
@@ -605,21 +527,19 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.5.3"
+version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
- "autocfg",
- "crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
-version = "1.9.3"
+version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -629,12 +549,10 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [
- "aho-corasick",
- "memchr",
"regex-syntax",
]
@@ -646,39 +564,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
-
-[[package]]
-name = "riscv"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6907ccdd7a31012b70faf2af85cd9e5ba97657cc3987c4f13f8e4d2c2a088aba"
-dependencies = [
- "bare-metal 1.0.0",
- "bit_field",
- "riscv-target",
-]
-
-[[package]]
-name = "riscv-target"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88aa938cda42a0cf62a20cfe8d139ff1af20c2e681212b5b34adb5a58333f222"
-dependencies = [
- "lazy_static",
- "regex",
-]
-
-[[package]]
-name = "rustc_version"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
-dependencies = [
- "semver 0.9.0",
-]
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "rustc_version"
@@ -686,14 +574,14 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
- "semver 1.0.14",
+ "semver",
]
[[package]]
name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "same-file"
@@ -712,30 +600,15 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "semver"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
-dependencies = [
- "semver-parser",
-]
-
-[[package]]
-name = "semver"
-version = "1.0.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
-
-[[package]]
-name = "semver-parser"
-version = "0.7.0"
+version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
@@ -752,9 +625,9 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@@ -763,11 +636,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
- "itoa 1.0.3",
+ "itoa 1.0.5",
"ryu",
"serde",
]
@@ -789,9 +662,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
@@ -831,9 +704,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "unicode-width"
@@ -848,27 +721,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
-name = "vcell"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77439c1b53d2303b20d9459b1ade71a83c716e3f9c34f3228c00e6f185d6c002"
-
-[[package]]
-name = "void"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
-
-[[package]]
-name = "volatile-register"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ee8f19f9d74293faf70901bc20ad067dc1ad390d2cbf1e3f75f721ffee908b6"
-dependencies = [
- "vcell",
-]
-
-[[package]]
name = "walkdir"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -982,59 +834,30 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yoke"
-version = "0.6.1"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "620cda8a59740b1e9313dad314848c6ffe15792c38cc4ac2def245ee77a6cae2"
+checksum = "222180af14a6b54ef2c33493c1eff77ae95a3687a21b243e752624006fb8f26e"
dependencies = [
"serde",
"stable_deref_trait",
- "yoke-derive",
"zerofrom",
]
[[package]]
-name = "yoke-derive"
-version = "0.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58c2c5bb7c929b85c1b9ec69091b0d835f0878b4fd9eb67973b25936e06c4374"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
-]
-
-[[package]]
name = "zerofrom"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79e9355fccf72b04b7deaa99ce7a0f6630530acf34045391b74460fcd714de54"
-dependencies = [
- "zerofrom-derive",
-]
-
-[[package]]
-name = "zerofrom-derive"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8785f47d6062c1932866147f91297286a9f350b3070e9d9f0b6078e37d623c1a"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "synstructure",
-]
[[package]]
name = "zerovec"
-version = "0.9.0"
+version = "0.9.2"
dependencies = [
"bincode",
"criterion",
"databake",
"getrandom",
"iai",
- "icu_benchmark_macros",
"postcard",
"rand",
"rand_distr",
@@ -1048,9 +871,9 @@ dependencies = [
[[package]]
name = "zerovec-derive"
-version = "0.9.0"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "490e5f878c2856225e884c35927e7ea6db3c24cdb7229b72542c7526ad7ed49e"
+checksum = "c630983d26a5f0c061dad3bf22df69a7329b4939a9752bc5f19f1cbd8e2263db"
dependencies = [
"proc-macro2",
"quote",
diff --git a/vendor/zerovec/Cargo.toml b/vendor/zerovec/Cargo.toml
index 2e305d302..2490aa0dc 100644
--- a/vendor/zerovec/Cargo.toml
+++ b/vendor/zerovec/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "zerovec"
-version = "0.9.0"
+version = "0.9.2"
authors = ["The ICU4X Project Developers"]
include = [
"src/**/*",
@@ -41,9 +41,15 @@ license = "Unicode-DFS-2016"
repository = "https://github.com/unicode-org/icu4x"
resolver = "2"
+[package.metadata.workspaces]
+independent = true
+
[package.metadata.docs.rs]
all-features = true
+[package.metadata.cargo-all-features]
+denylist = ["bench"]
+
[[example]]
name = "zv_serde"
required-features = ["serde"]
@@ -71,7 +77,7 @@ harness = false
required-features = ["serde"]
[dependencies.databake]
-version = "0.1.0"
+version = "0.1.3"
features = ["derive"]
optional = true
@@ -82,14 +88,14 @@ optional = true
default-features = false
[dependencies.yoke]
-version = "0.6.0"
+version = "0.7.0"
optional = true
[dependencies.zerofrom]
-version = "0.1.0"
+version = "0.1.1"
[dependencies.zerovec-derive]
-version = "0.9.0"
+version = "0.9.2"
optional = true
[dev-dependencies.bincode]
@@ -105,9 +111,6 @@ features = ["js"]
[dev-dependencies.iai]
version = "0.1"
-[dev-dependencies.icu_benchmark_macros]
-version = "0.7"
-
[dev-dependencies.postcard]
version = "1.0.0"
features = ["use-std"]
@@ -128,15 +131,7 @@ features = ["derive"]
[dev-dependencies.serde_json]
version = "1.0"
-[dev-dependencies.yoke]
-version = "0.6.0"
-features = ["derive"]
-
-[dev-dependencies.zerofrom]
-version = "0.1.0"
-features = ["derive"]
-
[features]
bench = []
-derive = ["zerovec-derive"]
+derive = ["dep:zerovec-derive"]
std = []
diff --git a/vendor/zerovec/README.md b/vendor/zerovec/README.md
index dbde4022c..b650d7dbd 100644
--- a/vendor/zerovec/README.md
+++ b/vendor/zerovec/README.md
@@ -32,7 +32,7 @@ works under the hood.
## Cargo features
-This crate has five optional features:
+This crate has several optional Cargo features:
- `serde`: Allows serializing and deserializing `zerovec`'s abstractions via [`serde`](https://docs.rs/serde)
- `yoke`: Enables implementations of `Yokeable` from the [`yoke`](https://docs.rs/yoke/) crate, which is also useful
in situations involving a lot of zero-copy deserialization.
diff --git a/vendor/zerovec/benches/vzv.rs b/vendor/zerovec/benches/vzv.rs
index 3bee5add6..94b6621a9 100644
--- a/vendor/zerovec/benches/vzv.rs
+++ b/vendor/zerovec/benches/vzv.rs
@@ -102,7 +102,7 @@ fn binary_search_benches(c: &mut Criterion) {
let (needles, _) = random_alphanums(2..=20, 10, seed);
let bytes: Vec<u8> = VarZeroVec::<str>::from(&string_vec).into_bytes();
let vzv = VarZeroVec::<str>::parse_byte_slice(black_box(bytes.as_slice())).unwrap();
- let single_needle = "lmnop".to_string();
+ let single_needle = "lmnop".to_owned();
// *** Binary search vec of 500 strings 10 times ***
c.bench_function("vzv/binary_search/slice", |b| {
@@ -169,7 +169,7 @@ fn vzv_precompute_bench(c: &mut Criterion) {
let vzv = VarZeroVec::<str>::parse_byte_slice(black_box(bytes.as_slice())).unwrap();
let borrowed = vzv.as_components();
let slice = vzv.as_slice();
- let single_needle = "lmnop".to_string();
+ let single_needle = "lmnop";
c.bench_function("vzv_precompute/get/precomputed", |b| {
b.iter(|| black_box(&borrowed).get(100));
@@ -180,11 +180,11 @@ fn vzv_precompute_bench(c: &mut Criterion) {
});
c.bench_function("vzv_precompute/search/precomputed", |b| {
- b.iter(|| black_box(&borrowed).binary_search(&single_needle));
+ b.iter(|| black_box(&borrowed).binary_search(single_needle));
});
c.bench_function("vzv_precompute/search/slice", |b| {
- b.iter(|| black_box(&slice).binary_search(&single_needle));
+ b.iter(|| black_box(&slice).binary_search(single_needle));
});
c.bench_function("vzv_precompute/search_multi/precomputed", |b| {
diff --git a/vendor/zerovec/benches/zeromap.rs b/vendor/zerovec/benches/zeromap.rs
index 40225ba7a..069331979 100644
--- a/vendor/zerovec/benches/zeromap.rs
+++ b/vendor/zerovec/benches/zeromap.rs
@@ -56,7 +56,8 @@ const POSTCARD_HASHMAP: [u8; 176] = [
114, 97, 98, 105, 99,
];
-/// Run this function to print new data to the console. Requires the optional `serde` feature.
+/// Run this function to print new data to the console.
+/// Requires the optional `serde` Cargo feature.
#[allow(dead_code)]
fn generate() {
let map = build_zeromap(false);
@@ -64,7 +65,8 @@ fn generate() {
println!("{:?}", buf);
}
-/// Run this function to print new data to the console. Requires the optional `serde` feature.
+/// Run this function to print new data to the console.
+/// Requires the optional `serde` Cargo feature.
#[allow(dead_code)]
fn generate_hashmap() {
let map = build_hashmap(false);
@@ -180,13 +182,13 @@ fn bench_hashmap(c: &mut Criterion) {
fn build_hashmap(large: bool) -> HashMap<String, String> {
let mut map: HashMap<String, String> = HashMap::new();
- for (key, value) in DATA.iter() {
+ for &(key, value) in DATA.iter() {
if large {
for n in 0..8192 {
- map.insert(format!("{}{}", key, n), value.to_string());
+ map.insert(format!("{}{}", key, n), value.to_owned());
}
} else {
- map.insert(key.to_string(), value.to_string());
+ map.insert(key.to_owned(), value.to_owned());
}
}
map
@@ -197,7 +199,7 @@ fn bench_deserialize_hashmap(c: &mut Criterion) {
b.iter(|| {
let map: HashMap<String, String> =
postcard::from_bytes(black_box(&POSTCARD_HASHMAP)).unwrap();
- assert_eq!(map.get("iu"), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get("iu"), Some(&"Inuktitut".to_owned()));
})
});
}
@@ -207,7 +209,7 @@ fn bench_deserialize_large_hashmap(c: &mut Criterion) {
c.bench_function("zeromap/deserialize/large/hashmap", |b| {
b.iter(|| {
let map: HashMap<String, String> = postcard::from_bytes(black_box(&buf)).unwrap();
- assert_eq!(map.get("iu3333"), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get("iu3333"), Some(&"Inuktitut".to_owned()));
})
});
}
@@ -216,7 +218,7 @@ fn bench_lookup_hashmap(c: &mut Criterion) {
let map: HashMap<String, String> = postcard::from_bytes(black_box(&POSTCARD_HASHMAP)).unwrap();
c.bench_function("zeromap/lookup/small/hashmap", |b| {
b.iter(|| {
- assert_eq!(map.get(black_box("iu")), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get(black_box("iu")), Some(&"Inuktitut".to_owned()));
assert_eq!(map.get(black_box("zz")), None);
});
});
@@ -227,7 +229,7 @@ fn bench_lookup_large_hashmap(c: &mut Criterion) {
let map: HashMap<String, String> = postcard::from_bytes(&buf).unwrap();
c.bench_function("zeromap/lookup/large/hashmap", |b| {
b.iter(|| {
- assert_eq!(map.get(black_box("iu3333")), Some(&"Inuktitut".to_string()));
+ assert_eq!(map.get(black_box("iu3333")), Some(&"Inuktitut".to_owned()));
assert_eq!(map.get(black_box("zz")), None);
});
});
diff --git a/vendor/zerovec/src/flexzerovec/serde.rs b/vendor/zerovec/src/flexzerovec/serde.rs
index 44179be32..fb7caa7a8 100644
--- a/vendor/zerovec/src/flexzerovec/serde.rs
+++ b/vendor/zerovec/src/flexzerovec/serde.rs
@@ -42,7 +42,7 @@ impl<'de> Visitor<'de> for FlexZeroVecVisitor {
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a> Deserialize<'de> for FlexZeroVec<'a>
where
'de: 'a,
@@ -60,7 +60,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a> Deserialize<'de> for &'a FlexZeroSlice
where
'de: 'a,
@@ -87,7 +87,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl Serialize for FlexZeroVec<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
@@ -105,7 +105,7 @@ impl Serialize for FlexZeroVec<'_> {
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl Serialize for FlexZeroSlice {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
diff --git a/vendor/zerovec/src/flexzerovec/slice.rs b/vendor/zerovec/src/flexzerovec/slice.rs
index 7cc6f12fa..ee164d05b 100644
--- a/vendor/zerovec/src/flexzerovec/slice.rs
+++ b/vendor/zerovec/src/flexzerovec/slice.rs
@@ -14,7 +14,6 @@ const USIZE_WIDTH: usize = mem::size_of::<usize>();
/// A zero-copy "slice" that efficiently represents `[usize]`.
#[repr(packed)]
-#[derive(Eq, PartialEq)]
pub struct FlexZeroSlice {
// Hard Invariant: 1 <= width <= USIZE_WIDTH (which is target_pointer_width)
// Soft Invariant: width == the width of the largest element
@@ -23,6 +22,13 @@ pub struct FlexZeroSlice {
data: [u8],
}
+impl PartialEq for FlexZeroSlice {
+ fn eq(&self, other: &Self) -> bool {
+ self.width == other.width && self.data == other.data
+ }
+}
+impl Eq for FlexZeroSlice {}
+
/// Helper function to decode a little-endian "chunk" (byte slice of a specific length)
/// into a `usize`. We cannot call `usize::from_le_bytes` directly because that function
/// requires the high bits to be set to 0.
diff --git a/vendor/zerovec/src/lib.rs b/vendor/zerovec/src/lib.rs
index 9a37c762e..b8b292488 100644
--- a/vendor/zerovec/src/lib.rs
+++ b/vendor/zerovec/src/lib.rs
@@ -34,7 +34,7 @@
//!
//! # Cargo features
//!
-//! This crate has five optional features:
+//! This crate has several optional Cargo features:
//! - `serde`: Allows serializing and deserializing `zerovec`'s abstractions via [`serde`](https://docs.rs/serde)
//! - `yoke`: Enables implementations of `Yokeable` from the [`yoke`](https://docs.rs/yoke/) crate, which is also useful
//! in situations involving a lot of zero-copy deserialization.
@@ -270,6 +270,8 @@ pub mod maps {
pub use crate::map2d::ZeroMap2dBorrowed;
pub use crate::map::{MutableZeroVecLike, ZeroMapKV, ZeroVecLike};
+
+ pub use crate::map2d::ZeroMap2dCursor;
}
pub mod vecs {
@@ -395,8 +397,9 @@ pub use zerovec_derive::make_ule;
/// Generate a corresponding [`VarULE`] type and the relevant [`EncodeAsVarULE`]/[`zerofrom::ZeroFrom`]
/// implementations for this type
///
-/// This can be attached to structs containing only [`AsULE`] types with the last field being [`Cow<'a, str>`](alloc::borrow::Cow),
-/// [`Cow<'a, str>`](alloc::borrow::Cow), [`ZeroSlice`], or [`VarZeroSlice`].
+/// This can be attached to structs containing only [`AsULE`] types with the last fields being
+/// [`Cow<'a, str>`](alloc::borrow::Cow), [`ZeroSlice`], or [`VarZeroSlice`]. If there is more than one such field, it will be represented
+/// using [`MultiFieldsULE`](crate::ule::MultiFieldsULE) and getters will be generated.
///
/// The type must be [`PartialEq`] and [`Eq`].
///
diff --git a/vendor/zerovec/src/map/borrowed.rs b/vendor/zerovec/src/map/borrowed.rs
index 4c1d1aef6..9d0854601 100644
--- a/vendor/zerovec/src/map/borrowed.rs
+++ b/vendor/zerovec/src/map/borrowed.rs
@@ -163,11 +163,6 @@ where
/// let borrowed = map.as_borrowed();
/// assert_eq!(borrowed.get(&1), Some("one"));
/// assert_eq!(borrowed.get(&3), None);
- ///
- /// let borrow = borrowed.get(&1);
- /// drop(borrowed);
- /// // still exists after the ZeroMapBorrowed has been dropped
- /// assert_eq!(borrow, Some("one"));
/// ```
pub fn get(&self, key: &K) -> Option<&'a V::GetType> {
let index = self.keys.zvl_binary_search(key).ok()?;
@@ -190,11 +185,6 @@ where
/// let borrowed = map.as_borrowed();
/// assert_eq!(borrowed.get_by(|probe| probe.cmp(&1)), Some("one"));
/// assert_eq!(borrowed.get_by(|probe| probe.cmp(&3)), None);
- ///
- /// let borrow = borrowed.get_by(|probe| probe.cmp(&1));
- /// drop(borrowed);
- /// // still exists after the ZeroMapBorrowed has been dropped
- /// assert_eq!(borrow, Some("one"));
/// ```
pub fn get_by(&self, predicate: impl FnMut(&K) -> Ordering) -> Option<&'a V::GetType> {
let index = self.keys.zvl_binary_search_by(predicate).ok()?;
@@ -211,8 +201,8 @@ where
/// map.insert(&1, "one");
/// map.insert(&2, "two");
/// let borrowed = map.as_borrowed();
- /// assert_eq!(borrowed.contains_key(&1), true);
- /// assert_eq!(borrowed.contains_key(&3), false);
+ /// assert!(borrowed.contains_key(&1));
+ /// assert!(!borrowed.contains_key(&3));
/// ```
pub fn contains_key(&self, key: &K) -> bool {
self.keys.zvl_binary_search(key).is_ok()
diff --git a/vendor/zerovec/src/map/map.rs b/vendor/zerovec/src/map/map.rs
index 379b22667..692e265d6 100644
--- a/vendor/zerovec/src/map/map.rs
+++ b/vendor/zerovec/src/map/map.rs
@@ -194,8 +194,8 @@ where
/// let mut map = ZeroMap::new();
/// map.insert(&1, "one");
/// map.insert(&2, "two");
- /// assert_eq!(map.contains_key(&1), true);
- /// assert_eq!(map.contains_key(&3), false);
+ /// assert!(map.contains_key(&1));
+ /// assert!(!map.contains_key(&3));
/// ```
pub fn contains_key(&self, key: &K) -> bool {
self.keys.zvl_binary_search(key).is_ok()
diff --git a/vendor/zerovec/src/map/serde.rs b/vendor/zerovec/src/map/serde.rs
index dbe4b433d..e82886d2a 100644
--- a/vendor/zerovec/src/map/serde.rs
+++ b/vendor/zerovec/src/map/serde.rs
@@ -9,7 +9,7 @@ use serde::de::{self, Deserialize, Deserializer, MapAccess, SeqAccess, Visitor};
#[cfg(feature = "serde")]
use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'a, K, V> Serialize for ZeroMap<'a, K, V>
where
@@ -49,7 +49,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'a, K, V> Serialize for ZeroMapBorrowed<'a, K, V>
where
@@ -158,7 +158,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, K, V> Deserialize<'de> for ZeroMap<'a, K, V>
where
K: ZeroMapKV<'a> + Ord + ?Sized,
@@ -190,7 +190,7 @@ where
}
}
-// /// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+// /// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, K, V> Deserialize<'de> for ZeroMapBorrowed<'a, K, V>
where
K: ZeroMapKV<'a> + Ord + ?Sized,
diff --git a/vendor/zerovec/src/map2d/borrowed.rs b/vendor/zerovec/src/map2d/borrowed.rs
index d9972fae9..209da299b 100644
--- a/vendor/zerovec/src/map2d/borrowed.rs
+++ b/vendor/zerovec/src/map2d/borrowed.rs
@@ -199,11 +199,6 @@ where
/// assert_eq!(borrowed.get_2d(&2, "one"), Some("bar"));
/// assert_eq!(borrowed.get_2d(&2, "two"), Some("baz"));
/// assert_eq!(borrowed.get_2d(&3, "three"), None);
- ///
- /// let borrow = borrowed.get_2d(&1, "one");
- /// drop(borrowed);
- /// // still exists after the ZeroMap2dBorrowed has been dropped
- /// assert_eq!(borrow, Some("foo"));
/// ```
pub fn get_2d(&self, key0: &K0, key1: &K1) -> Option<&'a V::GetType> {
self.get0(key0)?.get1(key1)
@@ -270,8 +265,8 @@ where
/// map.insert(&1, "one", "foo");
/// map.insert(&2, "two", "bar");
/// let borrowed = map.as_borrowed();
- /// assert_eq!(borrowed.contains_key0(&1), true);
- /// assert_eq!(borrowed.contains_key0(&3), false);
+ /// assert!(borrowed.contains_key0(&1));
+ /// assert!(!borrowed.contains_key0(&3));
/// ```
pub fn contains_key0(&self, key0: &K0) -> bool {
self.keys0.zvl_binary_search(key0).is_ok()
diff --git a/vendor/zerovec/src/map2d/cursor.rs b/vendor/zerovec/src/map2d/cursor.rs
index 0654ee794..4802187be 100644
--- a/vendor/zerovec/src/map2d/cursor.rs
+++ b/vendor/zerovec/src/map2d/cursor.rs
@@ -92,7 +92,9 @@ where
self.keys0.zvl_get(self.key0_index).unwrap()
}
- /// Borrow an ordered iterator over keys1 for a particular key0.
+ /// Borrow an ordered iterator over keys1 and values for a particular key0.
+ ///
+ /// To get the values as copy types, see [`Self::iter1_copied`].
///
/// For an example, see [`ZeroMap2d::iter0()`].
pub fn iter1(
@@ -153,6 +155,64 @@ where
impl<'l, 'a, K0, K1, V> ZeroMap2dCursor<'l, 'a, K0, K1, V>
where
K0: ZeroMapKV<'a>,
+ K1: ZeroMapKV<'a>,
+ V: ZeroMapKV<'a>,
+ K0: ?Sized,
+ K1: ?Sized,
+ V: Copy,
+{
+ /// Borrow an ordered iterator over keys1 and values for a particular key0.
+ ///
+ /// The values are returned as copy types.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use zerovec::ZeroMap2d;
+ ///
+ /// let zm2d: ZeroMap2d<str, u8, usize> = [
+ /// ("a", 0u8, 1usize),
+ /// ("b", 1u8, 1000usize),
+ /// ("b", 2u8, 2000usize),
+ /// ]
+ /// .into_iter()
+ /// .collect();
+ ///
+ /// let mut total_value = 0;
+ ///
+ /// for cursor in zm2d.iter0() {
+ /// for (_, value) in cursor.iter1_copied() {
+ /// total_value += value;
+ /// }
+ /// }
+ ///
+ /// assert_eq!(total_value, 3001);
+ /// ```
+ pub fn iter1_copied(
+ &self,
+ ) -> impl Iterator<Item = (&'l <K1 as ZeroMapKV<'a>>::GetType, V)> + '_ {
+ let range = self.get_range();
+ #[allow(clippy::unwrap_used)] // `self.get_range()` returns a valid range
+ range.map(move |idx| {
+ (
+ self.keys1.zvl_get(idx).unwrap(),
+ self.get1_copied_at(idx).unwrap(),
+ )
+ })
+ }
+
+ fn get1_copied_at(&self, index: usize) -> Option<V> {
+ let ule = self.values.zvl_get(index)?;
+ let mut result = Option::<V>::None;
+ V::Container::zvl_get_as_t(ule, |v| result.replace(*v));
+ #[allow(clippy::unwrap_used)] // `zvl_get_as_t` guarantees that the callback is invoked
+ Some(result.unwrap())
+ }
+}
+
+impl<'l, 'a, K0, K1, V> ZeroMap2dCursor<'l, 'a, K0, K1, V>
+where
+ K0: ZeroMapKV<'a>,
K1: ZeroMapKV<'a> + Ord,
V: ZeroMapKV<'a>,
K0: ?Sized,
@@ -253,14 +313,6 @@ where
let key1_index = self.get_key1_index_by(predicate)?;
self.get1_copied_at(key1_index)
}
-
- fn get1_copied_at(&self, index: usize) -> Option<V> {
- let ule = self.values.zvl_get(index)?;
- let mut result = Option::<V>::None;
- V::Container::zvl_get_as_t(ule, |v| result.replace(*v));
- #[allow(clippy::unwrap_used)] // `zvl_get_as_t` guarantees that the callback is invoked
- Some(result.unwrap())
- }
}
// We can't use the default PartialEq because ZeroMap2d is invariant
diff --git a/vendor/zerovec/src/map2d/map.rs b/vendor/zerovec/src/map2d/map.rs
index ab6eded4e..e6545dfa5 100644
--- a/vendor/zerovec/src/map2d/map.rs
+++ b/vendor/zerovec/src/map2d/map.rs
@@ -534,8 +534,8 @@ where
/// let mut map = ZeroMap2d::new();
/// map.insert(&1, "one", "foo");
/// map.insert(&2, "two", "bar");
- /// assert_eq!(map.contains_key0(&1), true);
- /// assert_eq!(map.contains_key0(&3), false);
+ /// assert!(map.contains_key0(&1));
+ /// assert!(!map.contains_key0(&3));
/// ```
pub fn contains_key0(&self, key0: &K0) -> bool {
self.keys0.zvl_binary_search(key0).is_ok()
@@ -814,13 +814,13 @@ mod test {
// Remove some elements
let result = zm2d.remove(&3, "ccc"); // first element
- assert_eq!(result, Some(String::from("CCC").into_boxed_str()));
+ assert_eq!(result.as_deref(), Some("CCC"));
let result = zm2d.remove(&3, "mmm"); // middle element
- assert_eq!(result, Some(String::from("MM0").into_boxed_str()));
+ assert_eq!(result.as_deref(), Some("MM0"));
let result = zm2d.remove(&5, "ddd"); // singleton K0
- assert_eq!(result, Some(String::from("DD1").into_boxed_str()));
+ assert_eq!(result.as_deref(), Some("DD1"));
let result = zm2d.remove(&9, "yyy"); // last element
- assert_eq!(result, Some(String::from("YYY").into_boxed_str()));
+ assert_eq!(result.as_deref(), Some("YYY"));
assert_eq!(format!("{:?}", zm2d), "ZeroMap2d { keys0: ZeroVec([3, 6, 7]), joiner: ZeroVec([1, 4, 7]), keys1: [\"eee\", \"ddd\", \"mmm\", \"nnn\", \"ddd\", \"eee\", \"www\"], values: [\"EEE\", \"DD3\", \"MM1\", \"NNN\", \"DD2\", \"EEE\", \"WWW\"] }");
}
diff --git a/vendor/zerovec/src/map2d/serde.rs b/vendor/zerovec/src/map2d/serde.rs
index f8b5f147b..b5e913654 100644
--- a/vendor/zerovec/src/map2d/serde.rs
+++ b/vendor/zerovec/src/map2d/serde.rs
@@ -12,7 +12,7 @@ use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
#[cfg(feature = "serde")]
use serde::ser::{Serialize, SerializeMap, Serializer};
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'a, K0, K1, V> Serialize for ZeroMap2d<'a, K0, K1, V>
where
@@ -75,7 +75,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'a, K0, K1, V> Serialize for ZeroMap2dBorrowed<'a, K0, K1, V>
where
@@ -220,7 +220,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, K0, K1, V> Deserialize<'de> for ZeroMap2d<'a, K0, K1, V>
where
K0: ZeroMapKV<'a> + Ord + ?Sized,
@@ -287,7 +287,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, K0, K1, V> Deserialize<'de> for ZeroMap2dBorrowed<'a, K0, K1, V>
where
K0: ZeroMapKV<'a> + Ord + ?Sized,
diff --git a/vendor/zerovec/src/ule/mod.rs b/vendor/zerovec/src/ule/mod.rs
index 29c7d0ecd..e8ecd26e5 100644
--- a/vendor/zerovec/src/ule/mod.rs
+++ b/vendor/zerovec/src/ule/mod.rs
@@ -283,6 +283,12 @@ where
/// "12.3e3" are logically equal, but not byte-for-byte equal, so we could define a canonical form
/// where only a single digit is allowed before `.`.
///
+/// There may also be cases where a `VarULE` has muiltiple canonical forms, such as a faster
+/// version and a smaller version. The cleanest way to handle this case would be separate types.
+/// However, if this is not feasible, then the application should ensure that the data it is
+/// deserializing is in the expected form. For example, if the data is being loaded from an
+/// external source, then requests could carry information about the expected form of the data.
+///
/// Failure to follow this invariant will cause surprising behavior in `PartialEq`, which may
/// result in unpredictable operations on `ZeroVec`, `VarZeroVec`, and `ZeroMap`.
pub unsafe trait VarULE: 'static {
diff --git a/vendor/zerovec/src/ule/option.rs b/vendor/zerovec/src/ule/option.rs
index e1d2d25fa..a6b1966a5 100644
--- a/vendor/zerovec/src/ule/option.rs
+++ b/vendor/zerovec/src/ule/option.rs
@@ -22,7 +22,7 @@ use core::mem::{self, MaybeUninit};
/// Some('ł'),
/// ]);
///
-/// assert_eq!(z.get(2), Some(Some(('ø'))));
+/// assert_eq!(z.get(2), Some(Some('ø')));
/// assert_eq!(z.get(3), Some(None));
/// ```
// Invariants:
diff --git a/vendor/zerovec/src/ule/plain.rs b/vendor/zerovec/src/ule/plain.rs
index 0b1bbb441..49455d45f 100644
--- a/vendor/zerovec/src/ule/plain.rs
+++ b/vendor/zerovec/src/ule/plain.rs
@@ -7,10 +7,7 @@
use super::*;
use crate::ZeroSlice;
-use core::{
- mem,
- num::{NonZeroI8, NonZeroU8},
-};
+use core::num::{NonZeroI8, NonZeroU8};
/// A u8 array of little-endian data with infallible conversions to and from &[u8].
#[repr(transparent)]
@@ -100,19 +97,7 @@ macro_rules! impl_const_constructors {
let len = bytes.len();
#[allow(clippy::modulo_one)]
if len % $size == 0 {
- unsafe {
- // Most of the slice manipulation functions are not yet const-stable,
- // so we construct a slice with the right metadata and cast its type
- // https://rust-lang.github.io/unsafe-code-guidelines/layout/pointers.html#notes
- //
- // Safety:
- // * [u8] and [RawBytesULE<N>] have different lengths but the same alignment
- // * ZeroSlice<$base> is repr(transparent) with [RawBytesULE<N>]
- let [ptr, _]: [usize; 2] = mem::transmute(bytes);
- let new_len = len / $size;
- let raw = [ptr, new_len];
- Ok(mem::transmute(raw))
- }
+ Ok(unsafe { Self::from_bytes_unchecked(bytes) })
} else {
Err(ZeroVecError::InvalidLength {
ty: concat!("<const construct: ", $size, ">"),
diff --git a/vendor/zerovec/src/ule/unvalidated.rs b/vendor/zerovec/src/ule/unvalidated.rs
index a6ae55dcf..4564c8673 100644
--- a/vendor/zerovec/src/ule/unvalidated.rs
+++ b/vendor/zerovec/src/ule/unvalidated.rs
@@ -95,9 +95,9 @@ impl UnvalidatedStr {
/// ```
/// use zerovec::ule::UnvalidatedStr;
///
- /// static a: &UnvalidatedStr = UnvalidatedStr::from_bytes(b"abc");
+ /// static A: &UnvalidatedStr = UnvalidatedStr::from_bytes(b"abc");
///
- /// let b = a.try_as_str().unwrap();
+ /// let b = A.try_as_str().unwrap();
/// assert_eq!(b, "abc");
/// ```
// Note: this is const starting in 1.63
@@ -154,7 +154,7 @@ unsafe impl VarULE for UnvalidatedStr {
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl serde::Serialize for UnvalidatedStr {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@@ -173,7 +173,7 @@ impl serde::Serialize for UnvalidatedStr {
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for Box<UnvalidatedStr> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
@@ -190,7 +190,7 @@ impl<'de> serde::Deserialize<'de> for Box<UnvalidatedStr> {
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<'de, 'a> serde::Deserialize<'de> for &'a UnvalidatedStr
where
diff --git a/vendor/zerovec/src/varzerovec/serde.rs b/vendor/zerovec/src/varzerovec/serde.rs
index dd6e863ff..649b29cfb 100644
--- a/vendor/zerovec/src/varzerovec/serde.rs
+++ b/vendor/zerovec/src/varzerovec/serde.rs
@@ -60,7 +60,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, T, F> Deserialize<'de> for VarZeroVec<'a, T, F>
where
T: VarULE + ?Sized,
@@ -81,7 +81,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, T, F> Deserialize<'de> for &'a VarZeroSlice<T, F>
where
T: VarULE + ?Sized,
@@ -111,7 +111,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<T, F> Serialize for VarZeroVec<'_, T, F>
where
@@ -134,7 +134,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
#[cfg(feature = "serde")]
impl<T, F> Serialize for VarZeroSlice<T, F>
where
diff --git a/vendor/zerovec/src/varzerovec/slice.rs b/vendor/zerovec/src/varzerovec/slice.rs
index 59e8da03f..afdbe80d9 100644
--- a/vendor/zerovec/src/varzerovec/slice.rs
+++ b/vendor/zerovec/src/varzerovec/slice.rs
@@ -29,8 +29,13 @@ use core::ops::Range;
/// The `F` type parameter is a [`VarZeroVecFormat`] (see its docs for more details), which can be used to select the
/// precise format of the backing buffer with various size and performance tradeoffs. It defaults to [`Index16`].
///
-/// This type can be nested within itself to allow for multi-level nested `Vec`s, for
-/// example the following code constructs the conceptual zero-copy equivalent of `Vec<Vec<Vec<str>>>`
+/// This type can be nested within itself to allow for multi-level nested `Vec`s.
+///
+/// # Examples
+///
+/// ## Nested Slices
+///
+/// The following code constructs the conceptual zero-copy equivalent of `Vec<Vec<Vec<str>>>`
///
/// ```rust
/// use zerovec::ule::*;
@@ -71,6 +76,25 @@ use core::ops::Range;
/// VarZeroVec::parse_byte_slice(bytes).unwrap();
/// assert_eq!(vzv_from_bytes, vzv_all);
/// ```
+///
+/// ## Iterate over Windows
+///
+/// Although [`VarZeroSlice`] does not itself have a `.windows` iterator like
+/// [core::slice::Windows], this behavior can be easily modeled using an iterator:
+///
+/// ```
+/// use zerovec::VarZeroVec;
+///
+/// let vzv = VarZeroVec::<str>::from(&["a", "b", "c", "d"]);
+/// # let mut pairs: Vec<(&str, &str)> = Vec::new();
+///
+/// let mut it = vzv.iter().peekable();
+/// while let (Some(x), Some(y)) = (it.next(), it.peek()) {
+/// // Evaluate (x, y) here.
+/// # pairs.push((x, y));
+/// }
+/// # assert_eq!(pairs, &[("a", "b"), ("b", "c"), ("c", "d")]);
+/// ```
//
// safety invariant: The slice MUST be one which parses to
// a valid VarZeroVecComponents<T>
diff --git a/vendor/zerovec/src/varzerovec/vec.rs b/vendor/zerovec/src/varzerovec/vec.rs
index 031da6453..7edb48a96 100644
--- a/vendor/zerovec/src/varzerovec/vec.rs
+++ b/vendor/zerovec/src/varzerovec/vec.rs
@@ -11,7 +11,7 @@ use core::ops::Deref;
use super::*;
-/// A zero-copy vector for variable-width types.
+/// A zero-copy, byte-aligned vector for variable-width types.
///
/// `VarZeroVec<T>` is designed as a drop-in replacement for `Vec<T>` in situations where it is
/// desirable to borrow data from an unaligned byte slice, such as zero-copy deserialization, and
@@ -39,6 +39,20 @@ use super::*;
/// The `F` type parameter is a [`VarZeroVecFormat`] (see its docs for more details), which can be used to select the
/// precise format of the backing buffer with various size and performance tradeoffs. It defaults to [`Index16`].
///
+/// # Bytes and Equality
+///
+/// Two [`VarZeroVec`]s are equal if and only if their bytes are equal, as described in the trait
+/// [`VarULE`]. However, we do not guarantee stability of byte equality or serialization format
+/// across major SemVer releases.
+///
+/// To compare a [`Vec<T>`] to a [`VarZeroVec<T>`], it is generally recommended to use
+/// [`Iterator::eq`], since it is somewhat expensive at runtime to convert from a [`Vec<T>`] to a
+/// [`VarZeroVec<T>`] or vice-versa.
+///
+/// Prior to zerovec reaching 1.0, the precise byte representation of [`VarZeroVec`] is still
+/// under consideration, with different options along the space-time spectrum. See
+/// [#1410](https://github.com/unicode-org/icu4x/issues/1410).
+///
/// # Example
///
/// ```rust
diff --git a/vendor/zerovec/src/yoke_impls.rs b/vendor/zerovec/src/yoke_impls.rs
index 81fc22c74..0efb47a2d 100644
--- a/vendor/zerovec/src/yoke_impls.rs
+++ b/vendor/zerovec/src/yoke_impls.rs
@@ -15,7 +15,7 @@ use core::{mem, ptr};
use yoke::*;
// This impl is similar to the impl on Cow and is safe for the same reasons
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
unsafe impl<'a, T: 'static + AsULE + ?Sized> Yokeable<'a> for ZeroVec<'static, T> {
type Output = ZeroVec<'a, T>;
#[inline]
@@ -43,7 +43,7 @@ unsafe impl<'a, T: 'static + AsULE + ?Sized> Yokeable<'a> for ZeroVec<'static, T
}
// This impl is similar to the impl on Cow and is safe for the same reasons
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
unsafe impl<'a, T: 'static + VarULE + ?Sized> Yokeable<'a> for VarZeroVec<'static, T> {
type Output = VarZeroVec<'a, T>;
#[inline]
@@ -71,7 +71,7 @@ unsafe impl<'a, T: 'static + VarULE + ?Sized> Yokeable<'a> for VarZeroVec<'stati
}
// This impl is similar to the impl on Cow and is safe for the same reasons
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
unsafe impl<'a> Yokeable<'a> for FlexZeroVec<'static> {
type Output = FlexZeroVec<'a>;
#[inline]
@@ -98,7 +98,7 @@ unsafe impl<'a> Yokeable<'a> for FlexZeroVec<'static> {
}
}
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
#[allow(clippy::transmute_ptr_to_ptr)]
unsafe impl<'a, K, V> Yokeable<'a> for ZeroMap<'static, K, V>
where
@@ -144,7 +144,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
#[allow(clippy::transmute_ptr_to_ptr)]
unsafe impl<'a, K, V> Yokeable<'a> for ZeroMapBorrowed<'static, K, V>
where
@@ -190,7 +190,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
#[allow(clippy::transmute_ptr_to_ptr)]
unsafe impl<'a, K0, K1, V> Yokeable<'a> for ZeroMap2d<'static, K0, K1, V>
where
@@ -238,7 +238,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `yoke` feature of the `zerovec` crate
+/// This impl requires enabling the optional `yoke` Cargo feature of the `zerovec` crate
#[allow(clippy::transmute_ptr_to_ptr)]
unsafe impl<'a, K0, K1, V> Yokeable<'a> for ZeroMap2dBorrowed<'static, K0, K1, V>
where
diff --git a/vendor/zerovec/src/zerovec/mod.rs b/vendor/zerovec/src/zerovec/mod.rs
index e0876338f..371450e21 100644
--- a/vendor/zerovec/src/zerovec/mod.rs
+++ b/vendor/zerovec/src/zerovec/mod.rs
@@ -22,7 +22,7 @@ use core::marker::PhantomData;
use core::mem;
use core::ops::Deref;
-/// A zero-copy vector for fixed-width types.
+/// A zero-copy, byte-aligned vector for fixed-width types.
///
/// `ZeroVec<T>` is designed as a drop-in replacement for `Vec<T>` in situations where it is
/// desirable to borrow data from an unaligned byte slice, such as zero-copy deserialization.
@@ -352,11 +352,6 @@ where
/// `bytes` need to be an output from [`ZeroSlice::as_bytes()`].
pub const unsafe fn from_bytes_unchecked(bytes: &'a [u8]) -> Self {
// &[u8] and &[T::ULE] are the same slice with different length metadata.
- /// core::slice::from_raw_parts(a, b) = core::mem::transmute((a, b)) hack
- /// ```compile_fail
- /// const unsafe fn canary() { core::slice::from_raw_parts(0 as *const u8, 0); }
- /// ```
- const _: () = ();
Self::new_borrowed(core::mem::transmute((
bytes.as_ptr(),
bytes.len() / core::mem::size_of::<T::ULE>(),
diff --git a/vendor/zerovec/src/zerovec/serde.rs b/vendor/zerovec/src/zerovec/serde.rs
index 8250fc20a..e3141071c 100644
--- a/vendor/zerovec/src/zerovec/serde.rs
+++ b/vendor/zerovec/src/zerovec/serde.rs
@@ -58,7 +58,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, T> Deserialize<'de> for ZeroVec<'a, T>
where
T: 'de + Deserialize<'de> + AsULE,
@@ -77,7 +77,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<T> Serialize for ZeroVec<'_, T>
where
T: Serialize + AsULE,
@@ -98,7 +98,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, T> Deserialize<'de> for Box<ZeroSlice<T>>
where
T: Deserialize<'de> + AsULE + 'static,
@@ -113,7 +113,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<'de, 'a, T> Deserialize<'de> for &'a ZeroSlice<T>
where
T: Deserialize<'de> + AsULE + 'static,
@@ -141,7 +141,7 @@ where
}
}
-/// This impl can be made available by enabling the optional `serde` feature of the `zerovec` crate
+/// This impl requires enabling the optional `serde` Cargo feature of the `zerovec` crate
impl<T> Serialize for ZeroSlice<T>
where
T: Serialize + AsULE,
diff --git a/vendor/zerovec/src/zerovec/slice.rs b/vendor/zerovec/src/zerovec/slice.rs
index ce27a15b2..847705304 100644
--- a/vendor/zerovec/src/zerovec/slice.rs
+++ b/vendor/zerovec/src/zerovec/slice.rs
@@ -65,12 +65,10 @@ where
/// `bytes` need to be an output from [`ZeroSlice::as_bytes()`].
pub const unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
// &[u8] and &[T::ULE] are the same slice with different length metadata.
- /// core::slice::from_raw_parts(a, b) = core::mem::transmute((a, b)) hack
- /// ```compile_fail
- /// const unsafe fn canary() { core::slice::from_raw_parts(0 as *const u8, 0); }
- /// ```
- const _: () = ();
- core::mem::transmute((bytes.as_ptr(), bytes.len() / core::mem::size_of::<T::ULE>()))
+ Self::from_ule_slice(core::mem::transmute((
+ bytes.as_ptr(),
+ bytes.len() / core::mem::size_of::<T::ULE>(),
+ )))
}
/// Construct a `&ZeroSlice<T>` from a slice of ULEs.
@@ -235,17 +233,17 @@ where
/// ```
/// use zerovec::ZeroSlice;
///
- /// const bytes: &[u8] = &[0xD3, 0x00, 0x19, 0x01, 0xA5, 0x01, 0xCD, 0x80];
- /// const zs_u16: &ZeroSlice<u16> = {
- /// match ZeroSlice::<u16>::try_from_bytes(bytes) {
+ /// const BYTES: &[u8] = &[0xD3, 0x00, 0x19, 0x01, 0xA5, 0x01, 0xCD, 0x80];
+ /// const ZS_U16: &ZeroSlice<u16> = {
+ /// match ZeroSlice::<u16>::try_from_bytes(BYTES) {
/// Ok(s) => s,
/// Err(_) => unreachable!(),
/// }
/// };
///
- /// let zs_i16: &ZeroSlice<i16> = zs_u16.cast();
+ /// let zs_i16: &ZeroSlice<i16> = ZS_U16.cast();
///
- /// assert_eq!(zs_u16.get(3), Some(32973));
+ /// assert_eq!(ZS_U16.get(3), Some(32973));
/// assert_eq!(zs_i16.get(3), Some(-32563));
/// ```
#[inline]
@@ -268,18 +266,18 @@ where
/// ```
/// use zerovec::ZeroSlice;
///
- /// const bytes: &[u8] = &[0x7F, 0xF3, 0x01, 0x00, 0x49, 0xF6, 0x01, 0x00];
- /// const zs_u32: &ZeroSlice<u32> = {
- /// match ZeroSlice::<u32>::try_from_bytes(bytes) {
+ /// const BYTES: &[u8] = &[0x7F, 0xF3, 0x01, 0x00, 0x49, 0xF6, 0x01, 0x00];
+ /// const ZS_U32: &ZeroSlice<u32> = {
+ /// match ZeroSlice::<u32>::try_from_bytes(BYTES) {
/// Ok(s) => s,
/// Err(_) => unreachable!(),
/// }
/// };
///
/// let zs_u8_4: &ZeroSlice<[u8; 4]> =
- /// zs_u32.try_as_converted().expect("valid code points");
+ /// ZS_U32.try_as_converted().expect("valid code points");
///
- /// assert_eq!(zs_u32.get(0), Some(127871));
+ /// assert_eq!(ZS_U32.get(0), Some(127871));
/// assert_eq!(zs_u8_4.get(0), Some([0x7F, 0xF3, 0x01, 0x00]));
/// ```
#[inline]
diff --git a/vendor/zip/.cargo-checksum.json b/vendor/zip/.cargo-checksum.json
index e3121cd34..9766507eb 100644
--- a/vendor/zip/.cargo-checksum.json
+++ b/vendor/zip/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"CODE_OF_CONDUCT.md":"77930b2129928632cae79e10d0863885b34729e4f144f3ae5443cff65997196a","Cargo.lock":"bf885d0e396f14628e19f1e5e6757d8539ba6ffa3103849159aa2106d280d6c2","Cargo.toml":"89efc81641360c8d5542d56dd8b23d65b7c4f7921dea5fd4c8c43ef8db358063","LICENSE":"6ac8711fb340c62ce0a4ecd463342d3fa0e8e70de697c863a2e1c0c53006003c","README.md":"0c9a153af715012ac84e84f11de2cf9af94eefd98f4fcaee272fcffae482732e","benches/read_entry.rs":"700235ee560887f33da56895d2a4da901c070b2a6a59aef90331046af4ddadad","benches/read_metadata.rs":"4134c674361534b32ef7fa3776c9da93928d2ba4ad13bc74438397a5a6281555","examples/extract.rs":"c6e16e42391ce294093866822dc2ee8cf45231d50101d619a0caffa1be633ae6","examples/extract_lorem.rs":"a5b33a31e7a2865e8ed7af52bab538268a8ba07cc4c3d8aa85811f80c86cb1fb","examples/file_info.rs":"e091923fc8c492255609efdd3c5fb656ad177601a73f4011005deab8096ca425","examples/stdin_info.rs":"f0bfed4016abbdb3922426f118bc990e6a1335bab878e1e146cb057f4e33d3cf","examples/write_dir.rs":"6f0b62022946765b6ef5f8ecac054a742d823ab0473152748d637da1568a9475","examples/write_sample.rs":"f57f463a9dea4eca7054d10d35bd8c65e1b6e0530bfbebf758006acea1e4ca0d","src/aes.rs":"c1b19bdf15c75cc27b5657d1dc574b5c5ebcdc7ca2573e9909cc6ba1ffe7f6d8","src/aes_ctr.rs":"6a7a07c9538daf53624818f82084c6655db3862ca00751f3679c8b0bc346b70d","src/compression.rs":"20c2f0c1857a3dce154505b5a73b6a22da867e16d676f0617a0e453f13cad175","src/cp437.rs":"e3a044ddefc947f6d9c26ba50ebd438955d90b8971ad8b15e5dd1194495f3b93","src/crc32.rs":"6c5a48834dfa1ab33590fcc63f4121ee6d3f8fbcc5018e905c4d8570096874cf","src/lib.rs":"7c2f6bc3dde9575eeeaef9569139f28351333c29362f74569e627ad7be5a10b2","src/read.rs":"245b8a4d5b367f917668d4db77f20331612a48a59641484e56692a22ffb61a7e","src/result.rs":"cfa7a545a56c6ce807c04675d70f9aed42d1aa19ffb717f4f94b8901a9f3728d","src/spec.rs":"e3b3429c814bb78efe14a8a48d53c30793f017b5d090889d72a2dc20b94c549a","src/types.rs":"b2979cdba63c9f5e9360578036541a80780300ca4febaece5730f670ab1b0a74","src/write.rs":"8062be33459c58751c3ca8f058df0524cfc05d12648cc77b0d505e32afb98c48","src/zipcrypto.rs":"4fe362af2a8e419d9cff6acc165acdc0dccaacc53baec7269b5f55dfcd396f5c","tests/aes_encryption.rs":"3733df1f5dcab4e9f9b9ec25cc7660f23c917759670e9b2e6fe77cfcd771faa9","tests/data/aes_archive.zip":"4abb3f304d1ab669453b7c4eae80db6ce8aff4ab91c8ab9a6edf90bbfede12f4","tests/data/comment_garbage.zip":"4a908af18691e50fc7773b34669a8ab6cb60875bee4d60fb7cd66d90eed55d2b","tests/data/files_and_dirs.zip":"e54cd8d084739f8d0b119d769c1263507b4da915cc38e4e2d3e4ef17c718f317","tests/data/invalid_cde_number_of_files_allocation_greater_offset.zip":"c680ed5ad622aae23eea3b5d90145cb79550e413cc6e9a7b67bb1043851512ee","tests/data/invalid_cde_number_of_files_allocation_smaller_offset.zip":"7c272255e825aff7167dc8d1f2e1d16583e58347bb56ecf9879bdb07c6ad18e9","tests/data/invalid_offset.zip":"c5534a1803145f6344b04c437d436bd583852c78dd3937f4a73a2a39aa2d76b2","tests/data/invalid_offset2.zip":"0f0f3d654f303eaf90f599c0b0266be476ce34857cff932070a86e9853dcb4f9","tests/data/mimetype.zip":"aad4289745bd89016181a35653c9a483c6b9b632e06d002736c7040d61c562a2","tests/data/zip64_demo.zip":"223072a480f60d6a700e427b294e575ae2d6265f3e9881b647927f31d96c7e01","tests/end_to_end.rs":"a39d88edcea901bba2bbdb277f073724a4aadd5c8e692256e9c57a09fb2fd357","tests/invalid_date.rs":"72d5425ffe2887e05646ba56f5116ac760cbeeb7889da5c9be612fe57477bc94","tests/issue_234.rs":"1a3d68d79108140100709c188841c6bf54af8500c819072a440ec72df5d8ab26","tests/zip64_large.rs":"083f070ae566e0a3b317bd94d51186b37f2ff88f225deae721b0b56be838da1c","tests/zip_comment_garbage.rs":"ab0e2e3ace1ca5459977a092ee96836f58c7f7a7cfc8de258ab448d8d24f1cbb","tests/zip_crypto.rs":"7f4f9670e5d971ac4234c7ab2cafecf43801baf0be0cfca6e34c651f6fc6a80d"},"package":"537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080"} \ No newline at end of file
+{"files":{"CHANGELOG.md":"da022ffbfcbf04616fc8e31b7a38fd7c8f2ad47a7fb42750fc04de110cf95857","CODE_OF_CONDUCT.md":"77930b2129928632cae79e10d0863885b34729e4f144f3ae5443cff65997196a","Cargo.lock":"5c81aacae00a3838358fed3e453cbb128f9870162a280d91b77f20c7b4c94390","Cargo.toml":"69d71445861d823f30fbd330f74cb395ed1f4e981f3c61e4754eda2f1f1cb0b4","LICENSE":"6ac8711fb340c62ce0a4ecd463342d3fa0e8e70de697c863a2e1c0c53006003c","README.md":"4da0a964d8fb78a5caae4674b4f69fc24f080167af56927d1a5c15412f9f672d","benches/read_entry.rs":"700235ee560887f33da56895d2a4da901c070b2a6a59aef90331046af4ddadad","benches/read_metadata.rs":"591e5adb9d36bf66da21d7e0d36a129c9bcf1331fa11714df8c53c860aee65ff","examples/extract.rs":"81f58da293553fe019319348e1d226a2d192480667877e72d77ab58812f5de1b","examples/extract_lorem.rs":"2d438706ca6e02468f362a54f4f2125598d2be1051e7ce40b71be58cfd703669","examples/file_info.rs":"20639234293511022938c8204488d182ccbcdfb59d962000acd8ac8f2f6db667","examples/stdin_info.rs":"5735801c9692a9b6ec35da6e6a65807db48b577068ab9df0018343df5161034b","examples/write_dir.rs":"224239e040c583f2e1ca4b1328ebe1a63b3c2213df6da8232cd62e004db05587","examples/write_sample.rs":"f723bf69676492599c2684aa995177f80e2aa922f1c9ae1681ed39e0a15959ad","src/aes.rs":"c1b19bdf15c75cc27b5657d1dc574b5c5ebcdc7ca2573e9909cc6ba1ffe7f6d8","src/aes_ctr.rs":"6a7a07c9538daf53624818f82084c6655db3862ca00751f3679c8b0bc346b70d","src/compression.rs":"84f8ded41182155cfbdb1ce79b9dc84cae0a6e55557021cf9f2d7168d8913b48","src/cp437.rs":"e3a044ddefc947f6d9c26ba50ebd438955d90b8971ad8b15e5dd1194495f3b93","src/crc32.rs":"6c5a48834dfa1ab33590fcc63f4121ee6d3f8fbcc5018e905c4d8570096874cf","src/lib.rs":"7c2f6bc3dde9575eeeaef9569139f28351333c29362f74569e627ad7be5a10b2","src/read.rs":"1c8e60c37a7016399d5bdf3796bcd4ad8a5cdf7450498233c7d056fd7533692a","src/result.rs":"7dc62da507ef2228313ed617d01f4d207eacb0b66352a497d24f04298e90e5df","src/spec.rs":"2a249aeb3c58ca475b5c7da6919cb61ac3cbe07b94a0cb3c730646d46fb265a8","src/types.rs":"21a37a8632bd9b2572a7ccd19e6048dcc453a3c4fd7583f3eddefbec7dd641a5","src/write.rs":"16ccc971f2858042d2f92e0448a72849745ea0a25449fd7c8961b78a466b6e6e","src/zipcrypto.rs":"4fe362af2a8e419d9cff6acc165acdc0dccaacc53baec7269b5f55dfcd396f5c","tests/aes_encryption.rs":"3733df1f5dcab4e9f9b9ec25cc7660f23c917759670e9b2e6fe77cfcd771faa9","tests/data/aes_archive.zip":"4abb3f304d1ab669453b7c4eae80db6ce8aff4ab91c8ab9a6edf90bbfede12f4","tests/data/comment_garbage.zip":"4a908af18691e50fc7773b34669a8ab6cb60875bee4d60fb7cd66d90eed55d2b","tests/data/files_and_dirs.zip":"e54cd8d084739f8d0b119d769c1263507b4da915cc38e4e2d3e4ef17c718f317","tests/data/invalid_cde_number_of_files_allocation_greater_offset.zip":"c680ed5ad622aae23eea3b5d90145cb79550e413cc6e9a7b67bb1043851512ee","tests/data/invalid_cde_number_of_files_allocation_smaller_offset.zip":"7c272255e825aff7167dc8d1f2e1d16583e58347bb56ecf9879bdb07c6ad18e9","tests/data/invalid_offset.zip":"c5534a1803145f6344b04c437d436bd583852c78dd3937f4a73a2a39aa2d76b2","tests/data/invalid_offset2.zip":"0f0f3d654f303eaf90f599c0b0266be476ce34857cff932070a86e9853dcb4f9","tests/data/mimetype.zip":"aad4289745bd89016181a35653c9a483c6b9b632e06d002736c7040d61c562a2","tests/data/zip64_demo.zip":"223072a480f60d6a700e427b294e575ae2d6265f3e9881b647927f31d96c7e01","tests/end_to_end.rs":"ba69c247e2f555a954ae97615a46a9b21092671f7ff1abebaba058b30a53bb66","tests/invalid_date.rs":"72d5425ffe2887e05646ba56f5116ac760cbeeb7889da5c9be612fe57477bc94","tests/issue_234.rs":"1a3d68d79108140100709c188841c6bf54af8500c819072a440ec72df5d8ab26","tests/zip64_large.rs":"d4859f551be83c2ae3bd4a8e37c7e617fb54bb27310e725de6c7076c5e566d6b","tests/zip_comment_garbage.rs":"ab0e2e3ace1ca5459977a092ee96836f58c7f7a7cfc8de258ab448d8d24f1cbb","tests/zip_crypto.rs":"7f4f9670e5d971ac4234c7ab2cafecf43801baf0be0cfca6e34c651f6fc6a80d"},"package":"0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"} \ No newline at end of file
diff --git a/vendor/zip/CHANGELOG.md b/vendor/zip/CHANGELOG.md
new file mode 100644
index 000000000..cd79e3913
--- /dev/null
+++ b/vendor/zip/CHANGELOG.md
@@ -0,0 +1,9 @@
+# Changelog
+
+## [0.6.4]
+
+### Changed
+
+ - [#333](https://github.com/zip-rs/zip/pull/333): disabled the default features of the `time` dependency, and also `formatting` and `macros`, as they were enabled by mistake.
+ - Deprecated [`DateTime::from_time`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#method.from_time) in favor of [`DateTime::try_from`](https://docs.rs/zip/0.6/zip/struct.DateTime.html#impl-TryFrom-for-DateTime)
+ \ No newline at end of file
diff --git a/vendor/zip/Cargo.lock b/vendor/zip/Cargo.lock
index 1da5fdf76..f0f4b2bd0 100644
--- a/vendor/zip/Cargo.lock
+++ b/vendor/zip/Cargo.lock
@@ -22,9 +22,9 @@ dependencies = [
[[package]]
name = "base64ct"
-version = "1.0.1"
+version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b"
+checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf"
[[package]]
name = "bencher"
@@ -34,9 +34,9 @@ checksum = "7dfdb4953a096c551ce9ace855a604d702e6e62d77fac690575ae347571717f5"
[[package]]
name = "block-buffer"
-version = "0.10.2"
+version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
dependencies = [
"generic-array",
]
@@ -49,9 +49,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "bzip2"
-version = "0.4.3"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6afcd980b5f3a45017c57e57a2fcccbb351cc43a356ce117ef760ef8052b89b0"
+checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8"
dependencies = [
"bzip2-sys",
"libc",
@@ -70,9 +70,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.0.73"
+version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"
dependencies = [
"jobserver",
]
@@ -100,9 +100,9 @@ checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
[[package]]
name = "cpufeatures"
-version = "0.2.2"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
dependencies = [
"libc",
]
@@ -118,19 +118,18 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
-version = "0.8.8"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "lazy_static",
]
[[package]]
name = "crypto-common"
-version = "0.1.3"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
@@ -138,9 +137,9 @@ dependencies = [
[[package]]
name = "digest"
-version = "0.10.3"
+version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
+checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
dependencies = [
"block-buffer",
"crypto-common",
@@ -149,9 +148,9 @@ dependencies = [
[[package]]
name = "flate2"
-version = "1.0.24"
+version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
dependencies = [
"crc32fast",
"libz-sys",
@@ -160,9 +159,9 @@ dependencies = [
[[package]]
name = "generic-array"
-version = "0.14.5"
+version = "0.14.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
dependencies = [
"typenum",
"version_check",
@@ -170,9 +169,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.6"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -190,30 +189,24 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.1"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "jobserver"
-version = "0.1.24"
+version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa"
+checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
dependencies = [
"libc",
]
[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-
-[[package]]
name = "libc"
-version = "0.2.121"
+version = "0.2.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]]
name = "libz-sys"
@@ -228,23 +221,14 @@ dependencies = [
[[package]]
name = "miniz_oxide"
-version = "0.5.4"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
-name = "num_threads"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0"
-dependencies = [
- "libc",
-]
-
-[[package]]
name = "opaque-debug"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -275,15 +259,15 @@ dependencies = [
[[package]]
name = "pkg-config"
-version = "0.3.25"
+version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
[[package]]
name = "same-file"
@@ -295,10 +279,16 @@ dependencies = [
]
[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+
+[[package]]
name = "sha1"
-version = "0.10.1"
+version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c77f4e7f65455545c2153c1253d25056825e77ee2533f0e41deb65a93a34852f"
+checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -307,9 +297,9 @@ dependencies = [
[[package]]
name = "sha2"
-version = "0.10.2"
+version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676"
+checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -324,27 +314,36 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "time"
-version = "0.3.9"
+version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
+checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
dependencies = [
"itoa",
- "libc",
- "num_threads",
+ "serde",
+ "time-core",
"time-macros",
]
[[package]]
+name = "time-core"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+
+[[package]]
name = "time-macros"
-version = "0.2.4"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792"
+checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"
+dependencies = [
+ "time-core",
+]
[[package]]
name = "typenum"
-version = "1.15.0"
+version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "vcpkg"
@@ -371,9 +370,9 @@ dependencies = [
[[package]]
name = "wasi"
-version = "0.10.2+wasi-snapshot-preview1"
+version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
@@ -408,7 +407,7 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "zip"
-version = "0.6.3"
+version = "0.6.4"
dependencies = [
"aes",
"bencher",
@@ -448,10 +447,11 @@ dependencies = [
[[package]]
name = "zstd-sys"
-version = "2.0.1+zstd.1.5.2"
+version = "2.0.5+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b"
+checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
dependencies = [
"cc",
"libc",
+ "pkg-config",
]
diff --git a/vendor/zip/Cargo.toml b/vendor/zip/Cargo.toml
index 7480ca9ea..7a1b656b2 100644
--- a/vendor/zip/Cargo.toml
+++ b/vendor/zip/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2018"
name = "zip"
-version = "0.6.3"
+version = "0.6.4"
authors = [
"Mathijs van de Nes <git@mathijs.vd-nes.nl>",
"Marli Frost <marli@frost.red>",
@@ -21,6 +21,7 @@ authors = [
description = """
Library to support the reading and writing of zip files.
"""
+readme = "README.md"
keywords = [
"zip",
"archive",
@@ -74,14 +75,12 @@ optional = true
[dependencies.time]
version = "0.3.7"
-features = [
- "formatting",
- "macros",
-]
+features = ["std"]
optional = true
+default-features = false
[dependencies.zstd]
-version = "0.11.0"
+version = "0.11.2"
optional = true
[dev-dependencies.bencher]
@@ -90,6 +89,13 @@ version = "0.1.5"
[dev-dependencies.getrandom]
version = "0.2.5"
+[dev-dependencies.time]
+version = "0.3.7"
+features = [
+ "formatting",
+ "macros",
+]
+
[dev-dependencies.walkdir]
version = "2.3.2"
diff --git a/vendor/zip/README.md b/vendor/zip/README.md
index 9b3c0599c..3754a7c0f 100644
--- a/vendor/zip/README.md
+++ b/vendor/zip/README.md
@@ -35,14 +35,14 @@ With all default features:
```toml
[dependencies]
-zip = "0.6.3"
+zip = "0.6.4"
```
Without the default features:
```toml
[dependencies]
-zip = { version = "0.6.3", default-features = false }
+zip = { version = "0.6.4", default-features = false }
```
The features available are:
diff --git a/vendor/zip/benches/read_metadata.rs b/vendor/zip/benches/read_metadata.rs
index 51f1f69e8..95334b1c5 100644
--- a/vendor/zip/benches/read_metadata.rs
+++ b/vendor/zip/benches/read_metadata.rs
@@ -17,10 +17,7 @@ fn generate_random_archive(count_files: usize, file_size: usize) -> Vec<u8> {
let bytes = vec![0u8; file_size];
for i in 0..count_files {
- let name = format!(
- "file_deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef_{}.dat",
- i
- );
+ let name = format!("file_deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef_{i}.dat");
writer.start_file(name, options).unwrap();
writer.write_all(&bytes).unwrap();
}
diff --git a/vendor/zip/examples/extract.rs b/vendor/zip/examples/extract.rs
index 7b8860ca0..308071626 100644
--- a/vendor/zip/examples/extract.rs
+++ b/vendor/zip/examples/extract.rs
@@ -12,7 +12,7 @@ fn real_main() -> i32 {
return 1;
}
let fname = std::path::Path::new(&*args[1]);
- let file = fs::File::open(&fname).unwrap();
+ let file = fs::File::open(fname).unwrap();
let mut archive = zip::ZipArchive::new(file).unwrap();
@@ -26,7 +26,7 @@ fn real_main() -> i32 {
{
let comment = file.comment();
if !comment.is_empty() {
- println!("File {} comment: {}", i, comment);
+ println!("File {i} comment: {comment}");
}
}
@@ -42,7 +42,7 @@ fn real_main() -> i32 {
);
if let Some(p) = outpath.parent() {
if !p.exists() {
- fs::create_dir_all(&p).unwrap();
+ fs::create_dir_all(p).unwrap();
}
}
let mut outfile = fs::File::create(&outpath).unwrap();
diff --git a/vendor/zip/examples/extract_lorem.rs b/vendor/zip/examples/extract_lorem.rs
index a34a04f43..bc50abe16 100644
--- a/vendor/zip/examples/extract_lorem.rs
+++ b/vendor/zip/examples/extract_lorem.rs
@@ -11,7 +11,7 @@ fn real_main() -> i32 {
return 1;
}
let fname = std::path::Path::new(&*args[1]);
- let zipfile = std::fs::File::open(&fname).unwrap();
+ let zipfile = std::fs::File::open(fname).unwrap();
let mut archive = zip::ZipArchive::new(zipfile).unwrap();
@@ -25,7 +25,7 @@ fn real_main() -> i32 {
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
- println!("{}", contents);
+ println!("{contents}");
0
}
diff --git a/vendor/zip/examples/file_info.rs b/vendor/zip/examples/file_info.rs
index 64969b66c..6a2adc58e 100644
--- a/vendor/zip/examples/file_info.rs
+++ b/vendor/zip/examples/file_info.rs
@@ -12,7 +12,7 @@ fn real_main() -> i32 {
return 1;
}
let fname = std::path::Path::new(&*args[1]);
- let file = fs::File::open(&fname).unwrap();
+ let file = fs::File::open(fname).unwrap();
let reader = BufReader::new(file);
let mut archive = zip::ZipArchive::new(reader).unwrap();
@@ -30,7 +30,7 @@ fn real_main() -> i32 {
{
let comment = file.comment();
if !comment.is_empty() {
- println!("Entry {} comment: {}", i, comment);
+ println!("Entry {i} comment: {comment}");
}
}
diff --git a/vendor/zip/examples/stdin_info.rs b/vendor/zip/examples/stdin_info.rs
index 10d7aa8b8..a609916a0 100644
--- a/vendor/zip/examples/stdin_info.rs
+++ b/vendor/zip/examples/stdin_info.rs
@@ -20,12 +20,12 @@ fn real_main() -> i32 {
);
match file.read(&mut buf) {
Ok(n) => println!("The first {} bytes are: {:?}", n, &buf[0..n]),
- Err(e) => println!("Could not read the file: {:?}", e),
+ Err(e) => println!("Could not read the file: {e:?}"),
};
}
Ok(None) => break,
Err(e) => {
- println!("Error encountered while reading zip: {:?}", e);
+ println!("Error encountered while reading zip: {e:?}");
return 1;
}
}
diff --git a/vendor/zip/examples/write_dir.rs b/vendor/zip/examples/write_dir.rs
index 8cc561ffb..3b043528f 100644
--- a/vendor/zip/examples/write_dir.rs
+++ b/vendor/zip/examples/write_dir.rs
@@ -54,8 +54,8 @@ fn real_main() -> i32 {
continue;
}
match doit(src_dir, dst_file, method.unwrap()) {
- Ok(_) => println!("done: {} written to {}", src_dir, dst_file),
- Err(e) => println!("Error: {:?}", e),
+ Ok(_) => println!("done: {src_dir} written to {dst_file}"),
+ Err(e) => println!("Error: {e:?}"),
}
}
@@ -84,18 +84,18 @@ where
// Write file or directory explicitly
// Some unzip tools unzip files with directory paths correctly, some do not!
if path.is_file() {
- println!("adding file {:?} as {:?} ...", path, name);
+ println!("adding file {path:?} as {name:?} ...");
#[allow(deprecated)]
zip.start_file_from_path(name, options)?;
let mut f = File::open(path)?;
f.read_to_end(&mut buffer)?;
- zip.write_all(&*buffer)?;
+ zip.write_all(&buffer)?;
buffer.clear();
} else if !name.as_os_str().is_empty() {
// Only if not root! Avoids path spec / warning
// and mapname conversion failed error on unzip
- println!("adding dir {:?} as {:?} ...", path, name);
+ println!("adding dir {path:?} as {name:?} ...");
#[allow(deprecated)]
zip.add_directory_from_path(name, options)?;
}
@@ -114,7 +114,7 @@ fn doit(
}
let path = Path::new(dst_file);
- let file = File::create(&path).unwrap();
+ let file = File::create(path).unwrap();
let walkdir = WalkDir::new(src_dir);
let it = walkdir.into_iter();
diff --git a/vendor/zip/examples/write_sample.rs b/vendor/zip/examples/write_sample.rs
index b5749509e..2e45cb1ea 100644
--- a/vendor/zip/examples/write_sample.rs
+++ b/vendor/zip/examples/write_sample.rs
@@ -14,8 +14,8 @@ fn real_main() -> i32 {
let filename = &*args[1];
match doit(filename) {
- Ok(_) => println!("File written to {}", filename),
- Err(e) => println!("Error: {:?}", e),
+ Ok(_) => println!("File written to {filename}"),
+ Err(e) => println!("Error: {e:?}"),
}
0
@@ -23,7 +23,7 @@ fn real_main() -> i32 {
fn doit(filename: &str) -> zip::result::ZipResult<()> {
let path = std::path::Path::new(filename);
- let file = std::fs::File::create(&path).unwrap();
+ let file = std::fs::File::create(path).unwrap();
let mut zip = zip::ZipWriter::new(file);
diff --git a/vendor/zip/src/compression.rs b/vendor/zip/src/compression.rs
index abd8b5300..baec93994 100644
--- a/vendor/zip/src/compression.rs
+++ b/vendor/zip/src/compression.rs
@@ -141,7 +141,7 @@ impl CompressionMethod {
impl fmt::Display for CompressionMethod {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Just duplicate what the Debug format looks like, i.e, the enum key:
- write!(f, "{:?}", self)
+ write!(f, "{self:?}")
}
}
@@ -195,8 +195,8 @@ mod test {
#[test]
fn to_display_fmt() {
fn check_match(method: CompressionMethod) {
- let debug_str = format!("{:?}", method);
- let display_str = format!("{}", method);
+ let debug_str = format!("{method:?}");
+ let display_str = format!("{method}");
assert_eq!(debug_str, display_str);
}
diff --git a/vendor/zip/src/read.rs b/vendor/zip/src/read.rs
index 728ddf579..dad20c260 100644
--- a/vendor/zip/src/read.rs
+++ b/vendor/zip/src/read.rs
@@ -348,7 +348,9 @@ impl<R: Read + io::Seek> ZipArchive<R> {
Some(locator64) => {
// If we got here, this is indeed a ZIP64 file.
- if footer.disk_number as u32 != locator64.disk_with_central_directory {
+ if !footer.record_too_small()
+ && footer.disk_number as u32 != locator64.disk_with_central_directory
+ {
return unsupported_zip_error(
"Support for multi-disk files is not implemented",
);
@@ -401,7 +403,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
pub fn new(mut reader: R) -> ZipResult<ZipArchive<R>> {
let (footer, cde_start_pos) = spec::CentralDirectoryEnd::find_and_parse(&mut reader)?;
- if footer.disk_number != footer.disk_with_central_directory {
+ if !footer.record_too_small() && footer.disk_number != footer.disk_with_central_directory {
return unsupported_zip_error("Support for multi-disk files is not implemented");
}
@@ -461,7 +463,7 @@ impl<R: Read + io::Seek> ZipArchive<R> {
} else {
if let Some(p) = outpath.parent() {
if !p.exists() {
- fs::create_dir_all(&p)?;
+ fs::create_dir_all(p)?;
}
}
let mut outfile = fs::File::create(&outpath)?;
@@ -681,11 +683,11 @@ pub(crate) fn central_header_to_zip_file<R: Read + io::Seek>(
reader.read_exact(&mut file_comment_raw)?;
let file_name = match is_utf8 {
- true => String::from_utf8_lossy(&*file_name_raw).into_owned(),
+ true => String::from_utf8_lossy(&file_name_raw).into_owned(),
false => file_name_raw.clone().from_cp437(),
};
let file_comment = match is_utf8 {
- true => String::from_utf8_lossy(&*file_comment_raw).into_owned(),
+ true => String::from_utf8_lossy(&file_comment_raw).into_owned(),
false => file_comment_raw.from_cp437(),
};
@@ -920,12 +922,12 @@ impl<'a> ZipFile<'a> {
self.data.compression_method
}
- /// Get the size of the file in the archive
+ /// Get the size of the file, in bytes, in the archive
pub fn compressed_size(&self) -> u64 {
self.data.compressed_size
}
- /// Get the size of the file when uncompressed
+ /// Get the size of the file, in bytes, when uncompressed
pub fn size(&self) -> u64 {
self.data.uncompressed_size
}
@@ -1085,7 +1087,7 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
reader.read_exact(&mut extra_field)?;
let file_name = match is_utf8 {
- true => String::from_utf8_lossy(&*file_name_raw).into_owned(),
+ true => String::from_utf8_lossy(&file_name_raw).into_owned(),
false => file_name_raw.clone().from_cp437(),
};
@@ -1129,7 +1131,7 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
return unsupported_zip_error("The file length is not available in the local header");
}
- let limit_reader = (reader as &'a mut dyn io::Read).take(result.compressed_size as u64);
+ let limit_reader = (reader as &'a mut dyn io::Read).take(result.compressed_size);
let result_crc32 = result.crc32;
let result_compression_method = result.compression_method;
diff --git a/vendor/zip/src/result.rs b/vendor/zip/src/result.rs
index 72a30e488..00d558cb4 100644
--- a/vendor/zip/src/result.rs
+++ b/vendor/zip/src/result.rs
@@ -44,9 +44,9 @@ impl From<io::Error> for ZipError {
impl fmt::Display for ZipError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
- ZipError::Io(err) => write!(fmt, "{}", err),
- ZipError::InvalidArchive(err) => write!(fmt, "invalid Zip archive: {}", err),
- ZipError::UnsupportedArchive(err) => write!(fmt, "unsupported Zip archive: {}", err),
+ ZipError::Io(err) => write!(fmt, "{err}"),
+ ZipError::InvalidArchive(err) => write!(fmt, "invalid Zip archive: {err}"),
+ ZipError::UnsupportedArchive(err) => write!(fmt, "unsupported Zip archive: {err}"),
ZipError::FileNotFound => write!(fmt, "specified file not found in archive"),
}
}
@@ -81,3 +81,18 @@ impl From<ZipError> for io::Error {
io::Error::new(io::ErrorKind::Other, err)
}
}
+
+/// Error type for time parsing
+#[derive(Debug)]
+pub struct DateTimeRangeError;
+
+impl fmt::Display for DateTimeRangeError {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ fmt,
+ "a date could not be represented within the bounds the MS-DOS date range (1980-2107)"
+ )
+ }
+}
+
+impl Error for DateTimeRangeError {}
diff --git a/vendor/zip/src/spec.rs b/vendor/zip/src/spec.rs
index 3ffcf7323..1d8cb0a64 100644
--- a/vendor/zip/src/spec.rs
+++ b/vendor/zip/src/spec.rs
@@ -23,6 +23,18 @@ pub struct CentralDirectoryEnd {
}
impl CentralDirectoryEnd {
+ // Per spec 4.4.1.4 - a CentralDirectoryEnd field might be insufficient to hold the
+ // required data. In this case the file SHOULD contain a ZIP64 format record
+ // and the field of this record will be set to -1
+ pub(crate) fn record_too_small(&self) -> bool {
+ self.disk_number == 0xFFFF
+ || self.disk_with_central_directory == 0xFFFF
+ || self.number_of_files_on_this_disk == 0xFFFF
+ || self.number_of_files == 0xFFFF
+ || self.central_directory_size == 0xFFFFFFFF
+ || self.central_directory_offset == 0xFFFFFFFF
+ }
+
pub fn parse<T: Read>(reader: &mut T) -> ZipResult<CentralDirectoryEnd> {
let magic = reader.read_u32::<LittleEndian>()?;
if magic != CENTRAL_DIRECTORY_END_SIGNATURE {
@@ -64,12 +76,12 @@ impl CentralDirectoryEnd {
let mut pos = file_length - HEADER_SIZE;
while pos >= search_upper_bound {
- reader.seek(io::SeekFrom::Start(pos as u64))?;
+ reader.seek(io::SeekFrom::Start(pos))?;
if reader.read_u32::<LittleEndian>()? == CENTRAL_DIRECTORY_END_SIGNATURE {
reader.seek(io::SeekFrom::Current(
BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE as i64,
))?;
- let cde_start_pos = reader.seek(io::SeekFrom::Start(pos as u64))?;
+ let cde_start_pos = reader.seek(io::SeekFrom::Start(pos))?;
return CentralDirectoryEnd::parse(reader).map(|cde| (cde, cde_start_pos));
}
pos = match pos.checked_sub(1) {
diff --git a/vendor/zip/src/types.rs b/vendor/zip/src/types.rs
index b65fad401..ad3a5700b 100644
--- a/vendor/zip/src/types.rs
+++ b/vendor/zip/src/types.rs
@@ -1,13 +1,16 @@
//! Types that specify what is contained in a ZIP.
-#[cfg(doc)]
-use {crate::read::ZipFile, crate::write::FileOptions};
-
+#[cfg(feature = "time")]
+use std::convert::{TryFrom, TryInto};
#[cfg(not(any(
all(target_arch = "arm", target_pointer_width = "32"),
target_arch = "mips",
target_arch = "powerpc"
)))]
use std::sync::atomic;
+#[cfg(not(feature = "time"))]
+use std::time::SystemTime;
+#[cfg(doc)]
+use {crate::read::ZipFile, crate::write::FileOptions};
#[cfg(any(
all(target_arch = "arm", target_pointer_width = "32"),
@@ -42,9 +45,11 @@ mod atomic {
}
#[cfg(feature = "time")]
+use crate::result::DateTimeRangeError;
+#[cfg(feature = "time")]
use time::{error::ComponentRange, Date, Month, OffsetDateTime, PrimitiveDateTime, Time};
-#[derive(Clone, Copy, Debug, PartialEq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum System {
Dos = 0,
Unix = 3,
@@ -115,7 +120,7 @@ impl DateTime {
let years = (datepart & 0b1111111000000000) >> 9;
DateTime {
- year: (years + 1980) as u16,
+ year: years + 1980,
month: months as u8,
day: days as u8,
hour: hours as u8,
@@ -143,10 +148,8 @@ impl DateTime {
second: u8,
) -> Result<DateTime, ()> {
if (1980..=2107).contains(&year)
- && month >= 1
- && month <= 12
- && day >= 1
- && day <= 31
+ && (1..=12).contains(&month)
+ && (1..=31).contains(&day)
&& hour <= 23
&& minute <= 59
&& second <= 60
@@ -169,19 +172,9 @@ impl DateTime {
///
/// Returns `Err` when this object is out of bounds
#[allow(clippy::result_unit_err)]
+ #[deprecated(note = "use `DateTime::try_from()`")]
pub fn from_time(dt: OffsetDateTime) -> Result<DateTime, ()> {
- if dt.year() >= 1980 && dt.year() <= 2107 {
- Ok(DateTime {
- year: (dt.year()) as u16,
- month: (dt.month()) as u8,
- day: dt.day() as u8,
- hour: dt.hour() as u8,
- minute: dt.minute() as u8,
- second: dt.second() as u8,
- })
- } else {
- Err(())
- }
+ dt.try_into().map_err(|_err| ())
}
/// Gets the time portion of this datetime in the msdos representation
@@ -197,8 +190,6 @@ impl DateTime {
#[cfg(feature = "time")]
/// Converts the DateTime to a OffsetDateTime structure
pub fn to_time(&self) -> Result<OffsetDateTime, ComponentRange> {
- use std::convert::TryFrom;
-
let date =
Date::from_calendar_date(self.year as i32, Month::try_from(self.month)?, self.day)?;
let time = Time::from_hms(self.hour, self.minute, self.second)?;
@@ -256,6 +247,26 @@ impl DateTime {
}
}
+#[cfg(feature = "time")]
+impl TryFrom<OffsetDateTime> for DateTime {
+ type Error = DateTimeRangeError;
+
+ fn try_from(dt: OffsetDateTime) -> Result<Self, Self::Error> {
+ if dt.year() >= 1980 && dt.year() <= 2107 {
+ Ok(DateTime {
+ year: (dt.year()) as u16,
+ month: (dt.month()) as u8,
+ day: dt.day(),
+ hour: dt.hour(),
+ minute: dt.minute(),
+ second: dt.second(),
+ })
+ } else {
+ Err(DateTimeRangeError)
+ }
+ }
+}
+
pub const DEFAULT_VERSION: u8 = 46;
/// A type like `AtomicU64` except it implements `Clone` and has predefined
@@ -500,20 +511,43 @@ mod test {
#[cfg(feature = "time")]
#[test]
fn datetime_from_time_bounds() {
+ use std::convert::TryFrom;
+
+ use super::DateTime;
+ use time::macros::datetime;
+
+ // 1979-12-31 23:59:59
+ assert!(DateTime::try_from(datetime!(1979-12-31 23:59:59 UTC)).is_err());
+
+ // 1980-01-01 00:00:00
+ assert!(DateTime::try_from(datetime!(1980-01-01 00:00:00 UTC)).is_ok());
+
+ // 2107-12-31 23:59:59
+ assert!(DateTime::try_from(datetime!(2107-12-31 23:59:59 UTC)).is_ok());
+
+ // 2108-01-01 00:00:00
+ assert!(DateTime::try_from(datetime!(2108-01-01 00:00:00 UTC)).is_err());
+ }
+
+ #[cfg(feature = "time")]
+ #[test]
+ fn datetime_try_from_bounds() {
+ use std::convert::TryFrom;
+
use super::DateTime;
use time::macros::datetime;
// 1979-12-31 23:59:59
- assert!(DateTime::from_time(datetime!(1979-12-31 23:59:59 UTC)).is_err());
+ assert!(DateTime::try_from(datetime!(1979-12-31 23:59:59 UTC)).is_err());
// 1980-01-01 00:00:00
- assert!(DateTime::from_time(datetime!(1980-01-01 00:00:00 UTC)).is_ok());
+ assert!(DateTime::try_from(datetime!(1980-01-01 00:00:00 UTC)).is_ok());
// 2107-12-31 23:59:59
- assert!(DateTime::from_time(datetime!(2107-12-31 23:59:59 UTC)).is_ok());
+ assert!(DateTime::try_from(datetime!(2107-12-31 23:59:59 UTC)).is_ok());
// 2108-01-01 00:00:00
- assert!(DateTime::from_time(datetime!(2108-01-01 00:00:00 UTC)).is_err());
+ assert!(DateTime::try_from(datetime!(2108-01-01 00:00:00 UTC)).is_err());
}
#[test]
@@ -564,10 +598,11 @@ mod test {
#[test]
fn time_at_january() {
use super::DateTime;
+ use std::convert::TryFrom;
// 2020-01-01 00:00:00
let clock = OffsetDateTime::from_unix_timestamp(1_577_836_800).unwrap();
- assert!(DateTime::from_time(clock).is_ok());
+ assert!(DateTime::try_from(clock).is_ok());
}
}
diff --git a/vendor/zip/src/write.rs b/vendor/zip/src/write.rs
index 61ce378c0..14252b4d5 100644
--- a/vendor/zip/src/write.rs
+++ b/vendor/zip/src/write.rs
@@ -7,6 +7,7 @@ use crate::spec;
use crate::types::{AtomicU64, DateTime, System, ZipFileData, DEFAULT_VERSION};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use crc32fast::Hasher;
+use std::convert::TryInto;
use std::default::Default;
use std::io;
use std::io::prelude::*;
@@ -110,31 +111,6 @@ pub struct FileOptions {
}
impl FileOptions {
- /// Construct a new FileOptions object
- pub fn default() -> FileOptions {
- FileOptions {
- #[cfg(any(
- feature = "deflate",
- feature = "deflate-miniz",
- feature = "deflate-zlib"
- ))]
- compression_method: CompressionMethod::Deflated,
- #[cfg(not(any(
- feature = "deflate",
- feature = "deflate-miniz",
- feature = "deflate-zlib"
- )))]
- compression_method: CompressionMethod::Stored,
- compression_level: None,
- #[cfg(feature = "time")]
- last_modified_time: DateTime::from_time(OffsetDateTime::now_utc()).unwrap_or_default(),
- #[cfg(not(feature = "time"))]
- last_modified_time: DateTime::default(),
- permissions: None,
- large_file: false,
- }
- }
-
/// Set the compression method for the new file
///
/// The default is `CompressionMethod::Deflated`. If the deflate compression feature is
@@ -198,8 +174,29 @@ impl FileOptions {
}
impl Default for FileOptions {
+ /// Construct a new FileOptions object
fn default() -> Self {
- Self::default()
+ Self {
+ #[cfg(any(
+ feature = "deflate",
+ feature = "deflate-miniz",
+ feature = "deflate-zlib"
+ ))]
+ compression_method: CompressionMethod::Deflated,
+ #[cfg(not(any(
+ feature = "deflate",
+ feature = "deflate-miniz",
+ feature = "deflate-zlib"
+ )))]
+ compression_method: CompressionMethod::Stored,
+ compression_level: None,
+ #[cfg(feature = "time")]
+ last_modified_time: OffsetDateTime::now_utc().try_into().unwrap_or_default(),
+ #[cfg(not(feature = "time"))]
+ last_modified_time: DateTime::default(),
+ permissions: None,
+ large_file: false,
+ }
}
}
@@ -848,7 +845,7 @@ impl<W: Write + io::Seek> Drop for ZipWriter<W> {
fn drop(&mut self) {
if !self.inner.is_closed() {
if let Err(e) = self.finalize() {
- let _ = write!(io::stderr(), "ZipWriter drop failed: {:?}", e);
+ let _ = write!(io::stderr(), "ZipWriter drop failed: {e:?}");
}
}
}
@@ -1211,8 +1208,7 @@ fn validate_extra_data(file: &ZipFileData) -> ZipResult<()> {
return Err(ZipError::Io(io::Error::new(
io::ErrorKind::Other,
format!(
- "Extra data header ID {:#06} requires crate feature \"unreserved\"",
- kind,
+ "Extra data header ID {kind:#06} requires crate feature \"unreserved\"",
),
)));
}
@@ -1301,7 +1297,7 @@ fn path_to_string(path: &std::path::Path) -> String {
if !path_str.is_empty() {
path_str.push('/');
}
- path_str.push_str(&*os_str.to_string_lossy());
+ path_str.push_str(&os_str.to_string_lossy());
}
}
path_str
diff --git a/vendor/zip/tests/end_to_end.rs b/vendor/zip/tests/end_to_end.rs
index 25d0c54d0..09e7ce47e 100644
--- a/vendor/zip/tests/end_to_end.rs
+++ b/vendor/zip/tests/end_to_end.rs
@@ -13,7 +13,7 @@ fn end_to_end() {
for &method in SUPPORTED_COMPRESSION_METHODS {
let file = &mut Cursor::new(Vec::new());
- println!("Writing file with {} compression", method);
+ println!("Writing file with {method} compression");
write_test_archive(file, method).expect("Couldn't write test zip archive");
println!("Checking file contents");
diff --git a/vendor/zip/tests/zip64_large.rs b/vendor/zip/tests/zip64_large.rs
index 3d10a3181..468ef198f 100644
--- a/vendor/zip/tests/zip64_large.rs
+++ b/vendor/zip/tests/zip64_large.rs
@@ -205,7 +205,7 @@ fn zip64_large() {
match file.read_exact(&mut buf) {
Ok(()) => println!("The first {} bytes are: {:?}", buf.len(), buf),
- Err(e) => println!("Could not read the file: {:?}", e),
+ Err(e) => println!("Could not read the file: {e:?}"),
};
}
}