summaryrefslogtreecommitdiffstats
path: root/rust/vendor/bendy
diff options
context:
space:
mode:
Diffstat (limited to 'rust/vendor/bendy')
-rw-r--r--rust/vendor/bendy/.cargo-checksum.json1
-rw-r--r--rust/vendor/bendy/CHANGELOG.md85
-rw-r--r--rust/vendor/bendy/Cargo.lock230
-rw-r--r--rust/vendor/bendy/Cargo.toml65
-rw-r--r--rust/vendor/bendy/LICENSE-BSD329
-rw-r--r--rust/vendor/bendy/README.md656
-rw-r--r--rust/vendor/bendy/examples/decode_torrent.rs212
-rw-r--r--rust/vendor/bendy/examples/encode_torrent.rs115
-rw-r--r--rust/vendor/bendy/examples/torrent_files/debian-9.4.0-amd64-netinst.iso.torrentbin0 -> 23821 bytes
-rw-r--r--rust/vendor/bendy/examples/torrent_files/pieces.isobin0 -> 23280 bytes
-rwxr-xr-xrust/vendor/bendy/githooks/pre-commit/rustfmt-up-to-date13
-rw-r--r--rust/vendor/bendy/rustfmt.toml10
-rw-r--r--rust/vendor/bendy/src/assert_matches.rs12
-rw-r--r--rust/vendor/bendy/src/decoding.rs76
-rw-r--r--rust/vendor/bendy/src/decoding/decoder.rs803
-rw-r--r--rust/vendor/bendy/src/decoding/error.rs155
-rw-r--r--rust/vendor/bendy/src/decoding/from_bencode.rs216
-rw-r--r--rust/vendor/bendy/src/decoding/object.rs394
-rw-r--r--rust/vendor/bendy/src/encoding.rs129
-rw-r--r--rust/vendor/bendy/src/encoding/encoder.rs482
-rw-r--r--rust/vendor/bendy/src/encoding/error.rs56
-rw-r--r--rust/vendor/bendy/src/encoding/printable_integer.rs15
-rw-r--r--rust/vendor/bendy/src/encoding/to_bencode.rs265
-rw-r--r--rust/vendor/bendy/src/lib.rs23
-rw-r--r--rust/vendor/bendy/src/serde.rs553
-rw-r--r--rust/vendor/bendy/src/serde/common.rs30
-rw-r--r--rust/vendor/bendy/src/serde/de.rs487
-rw-r--r--rust/vendor/bendy/src/serde/error.rs108
-rw-r--r--rust/vendor/bendy/src/serde/ser.rs368
-rw-r--r--rust/vendor/bendy/src/serde/ser/map_serializer.rs80
-rw-r--r--rust/vendor/bendy/src/serde/ser/struct_serializer.rs64
-rw-r--r--rust/vendor/bendy/src/state_tracker.rs7
-rw-r--r--rust/vendor/bendy/src/state_tracker/stack.rs36
-rw-r--r--rust/vendor/bendy/src/state_tracker/state.rs159
-rw-r--r--rust/vendor/bendy/src/state_tracker/structure_error.rs44
-rw-r--r--rust/vendor/bendy/src/state_tracker/token.rs26
-rw-r--r--rust/vendor/bendy/src/value.rs312
-rw-r--r--rust/vendor/bendy/tests/core_test.rs428
-rw-r--r--rust/vendor/bendy/tests/readme.rs365
-rw-r--r--rust/vendor/bendy/tests/struct_codec.rs89
40 files changed, 7198 insertions, 0 deletions
diff --git a/rust/vendor/bendy/.cargo-checksum.json b/rust/vendor/bendy/.cargo-checksum.json
new file mode 100644
index 0000000..657986f
--- /dev/null
+++ b/rust/vendor/bendy/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"76048bcca30ad48e5f0a2ca26ff5cf66bf5cae039d023a9ec9879cbfaf12f115","Cargo.lock":"3ebdb39c4231e54ea98baf84c37dfb656b50a360bcb63dc2b6bcf4ddfa107a2d","Cargo.toml":"ba3dece047020295f58019611895c2bc434c589c77f6a04c20be6690068f67d5","LICENSE-BSD3":"6a72844d9394477bbf7fd3155782e9f092f4c1cb73457ae9c5770c693062525a","README.md":"afe9a0e59e02befb402180173fd0efdd56ba7a87f6756d68153cab2046fc14d7","examples/decode_torrent.rs":"678d1f2df6bed6f973299f86a78da94f9219b1e7635956f0f7688745427c15af","examples/encode_torrent.rs":"07790022ba6517841192bb12a06fdffd1320364252729fc8754cee6702e26fd5","examples/torrent_files/debian-9.4.0-amd64-netinst.iso.torrent":"b5424776cdd0e9f6ffcd9df93cb966939a801fcf14921e0d6fb2dbc86c17b9c6","examples/torrent_files/pieces.iso":"827e1dddd6ea305aac24c8e28967db8ce0797d915673442addc5cbd9c8380807","githooks/pre-commit/rustfmt-up-to-date":"feb787a6c54f0262b0349e9a72d0635831cf8279065a9b7f3658dd305507929e","rustfmt.toml":"d201a44d9d72ddd54c851047714e54654ad23bd88c63781a7a9557228475f761","src/assert_matches.rs":"d6e58d985eceedd5657a3f6a4e99d178538abd07ac4950e0ebdfebe94ddeefa1","src/decoding.rs":"d628328be379f51be1dc29f2aa127f8cb2b5e0b90d96441403d30941ec36a63b","src/decoding/decoder.rs":"2e3718c2be3aefc9f8b9fea9b6baeda7c96e619c414be87aef572bea3a7a751e","src/decoding/error.rs":"36abd154096715ba882e902c0be1621291b8fa3fc793da97cff7dd70993a45b2","src/decoding/from_bencode.rs":"f9acd01d7b0088d63c3552837cc0eb01e4792c18c832d6c200faa182b3057a20","src/decoding/object.rs":"568a3bfca62075d04b25897b7be54d339b5fc51b2ae774bdac8ee9ced4630c4e","src/encoding.rs":"8461582f1d2644b77a1b0f7c08fe7a0d31d298f43873c962fac6d0f6593209e4","src/encoding/encoder.rs":"1ba21ef1296571e3a3ca7b45da07cf2c0d4f635132bda7006f69d0b6a9c0bfb6","src/encoding/error.rs":"3ae79670c84ef2d7fb3cc84efebc1240871d2b98a08e02caf91a1e9e94d16920","src/encoding/printable_integer.rs":"f059dbc63d783fdc86333bc624fb90fd04e1a062191d58f9a6e58375a0c09186","src/encoding/to_bencode.rs":"78ac9cec758e47d2a306f546471af43a49af1a45a1485e602ef8026abd0332f4","src/lib.rs":"2ab7f24c37608659e21f50beb84fb83e410bb5947eebbdf85050bcc303b4a776","src/serde.rs":"197039a7db46894c71659cc159cfcf48ff4983c3d1d97f39c1b3a0ade8342ef5","src/serde/common.rs":"6fd307f8409e6391edd4ff92b10b199d131e0580743d668fd809ddd423711abf","src/serde/de.rs":"c66d79e9dfe700c61e5f9a48e77c4b03bbc1554c4f1a928fb774b860ad13a986","src/serde/error.rs":"4e7dbe32b0e28df82d2cd0945d398af2d57d58c4dbf05d573b4b98d94de85f80","src/serde/ser.rs":"2a71e35f9a338e3a790277364c8a8a63abe4325944c8a9d73285ff5d36f6f3dc","src/serde/ser/map_serializer.rs":"939bd057952beb206021dbfd74a2a0bba46e8138285cdd52b47eb826e21e4c74","src/serde/ser/struct_serializer.rs":"91c8cf7e2138365e7d7ef2463e855b94593d004e6a97f96347b508ca9a6a6ce1","src/state_tracker.rs":"e555f0870409b9b3de012f91061f3024ff84d0726fdd0abe4562fb9ea8693883","src/state_tracker/stack.rs":"eed758a7452ea418342efe05ac2cbab45123d60140b7a9de46204845491d030c","src/state_tracker/state.rs":"4c1ee8aabc70fb83e84fb5d3c6a5b9a2b72d87c1e42421d41efd633ce5d0c9e8","src/state_tracker/structure_error.rs":"79c7af717e9da3cce7c1f1885c3af84bc0cdf710e4836246a11c3b5e115294ee","src/state_tracker/token.rs":"6777000f5231647aaa881987705f05c8f8df28d23da53360f221514cd506d00e","src/value.rs":"4b1b0425775e3d6fa4dab37d0695eb334be06161d99d64af45f713af711c6d21","tests/core_test.rs":"ddba3330df107428bfabe409a268a68eee62a2f58add20ee8ab98534f78b4971","tests/readme.rs":"2465b5d6004ece830de7234c66f1f35d1dfda3aed0a83bccd98be101ae3276b0","tests/struct_codec.rs":"f706a657a3ecfff59d6e9ba38f867a81d9288d9b0a01b6978fbfc0f378de534d"},"package":"8133e404c8bec821e531f347dab1247bf64f60882826e7228f8ffeb33a35a658"} \ No newline at end of file
diff --git a/rust/vendor/bendy/CHANGELOG.md b/rust/vendor/bendy/CHANGELOG.md
new file mode 100644
index 0000000..a7f1a2e
--- /dev/null
+++ b/rust/vendor/bendy/CHANGELOG.md
@@ -0,0 +1,85 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+## Unreleased
+
+## 0.3.2 (2020/06/04)
+
+- Fix lifetime on Deserializer implementation for Value (thanks @euclio)
+- Many documentation fixes (thanks @casey)
+
+## 0.3.1 (2020/05/07)
+
+- Bugfix release allowing generic values to be contained within lists or maps
+
+## 0.3.0 (2020/03/13)
+
+- Added serde support
+- Added generic value type that can represent any Bencode value
+
+## 0.2.2 (2020/01/29)
+
+- Make the `no_std` api match the `std` api a little bit more closely.
+
+## 0.2.1 (2019/09/03)
+
+- Add missing [`FromBencode`] implementation for [`BTreeMap`].
+- Introduce `std` as default enabled feature.
+ - Disabling this feature makes bendy `no_std` compatible.
+ - This currently requires that the target provides allocator support and
+ also supports `atomic_cas` as bendy contains a default [`ToBencode`]
+ implementation for `Arc<T: ToBencode>`.
+- Update minimal required rustc version to v1.36 (to use `extern crate alloc`
+ inside tests and examples).
+
+## 0.2.0 (2019/02/28)
+- Add new `try_into_*` utility methods on [`Object`].
+- Introduce ...
+ - [`FromBencode`] trait for simpler decoding.
+ - a high level encoding [`Error`][`EncodingError`] type.
+ - a high level decoding [`Error`][`DecodingError`] type.
+ - [`ResultExt`] decoding trait to improve error handling.
+- Subscribed into edition 2018 and latest rustfmt version.
+
+**Breaking Changes**
+
+- Remove [`Error`] from the public API.
+- Move [`Token`] from [`decoder`] into [`state_tracker`] submodule.
+- Rename ...
+ - [`encoder`] submodule into [`encoding`].
+ - [`decoder`] submodule into [`decoding`].
+ - [`Encodable`] trait into [`ToBencode`].
+- Changed signatures of all `_or_err` methods on [`Object`] .
+- Replaced all occurrences of [`Error`] inside the API with the new high level decoding
+ [`Error`][`DecodingError`] and encoding [`Error`][`EncodingError`].
+
+## 0.1.2 (2018/08/14)
+- Add [`AsRef<[u8]>`][`AsRef`] and [`From<&[u8]>`][`From`] for [`AsString`] if the content supports them.
+
+## 0.1.1 (2018/08/07)
+- Add missing trait derives for the [`AsString`] encoding wrapper.
+
+## 0.1.0 (2018/07/24)
+Initial release
+
+<!-- -->
+
+[`AsRef`]: https://doc.rust-lang.org/std/convert/trait.AsRef.html
+[`AsString`]: https://docs.rs/bendy/latest/bendy/encoding/struct.AsString.html
+[`BTreeMap`]: https://doc.rust-lang.org/std/collections/struct.BTreeMap.html
+[`decoder`]: https://docs.rs/bendy/0.1.2/bendy/decoder/index.html
+[`decoding`]: https://docs.rs/bendy/latest/bendy/decoding/index.html
+[`DecodingError`]: https://docs.rs/bendy/latest/bendy/decoding/struct.Error.html
+[`Encodable`]: https://docs.rs/bendy/0.1.2/bendy/encoder/trait.Encodable.html
+[`encoder`]: https://docs.rs/bendy/0.1.2/bendy/encoder/index.html
+[`encoding`]: https://docs.rs/bendy/latest/bendy/encoding/index.html
+[`EncodingError`]: https://docs.rs/bendy/latest/bendy/encoding/struct.Error.html
+[`Error`]: https://docs.rs/bendy/0.1.2/bendy/enum.Error.html
+[`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
+[`FromBencode`]: https://docs.rs/bendy/latest/bendy/decoding/trait.FromBencode.html
+[`Object`]: https://docs.rs/bendy/latest/bendy/decoding/enum.Object.html
+[`ResultExt`]: https://docs.rs/bendy/latest/bendy/decoding/trait.ResultExt.html
+[`state_tracker`]: https://docs.rs/bendy/latest/bendy/state_tracker/index.html
+[`ToBencode`]: https://docs.rs/bendy/latest/bendy/encoding/trait.ToBencode.html
+[`Token`]: https://docs.rs/bendy/latest/bendy/state_tracker/enum.Token.html
diff --git a/rust/vendor/bendy/Cargo.lock b/rust/vendor/bendy/Cargo.lock
new file mode 100644
index 0000000..9088159
--- /dev/null
+++ b/rust/vendor/bendy/Cargo.lock
@@ -0,0 +1,230 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "addr2line"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+
+[[package]]
+name = "backtrace"
+version = "0.3.54"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2baad346b2d4e94a24347adeee9c7a93f412ee94b9cc26e5b59dea23848e9f28"
+dependencies = [
+ "addr2line",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "bendy"
+version = "0.3.3"
+dependencies = [
+ "failure",
+ "regex",
+ "serde",
+ "serde_bytes",
+ "serde_derive",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "failure"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
+dependencies = [
+ "backtrace",
+ "failure_derive",
+]
+
+[[package]]
+name = "failure_derive"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "gimli"
+version = "0.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614"
+
+[[package]]
+name = "memchr"
+version = "2.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
+dependencies = [
+ "adler",
+ "autocfg",
+]
+
+[[package]]
+name = "object"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex"
+version = "1.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+ "thread_local",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232"
+
+[[package]]
+name = "serde"
+version = "1.0.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a"
+
+[[package]]
+name = "serde_bytes"
+version = "0.11.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbd1ae72adb44aab48f325a02444a5fc079349a8d804c1fc922aed3f7454c74e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.48"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc371affeffc477f42a221a1e4297aedcea33d47d19b61455588bd9d8f6b19ac"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.12.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
diff --git a/rust/vendor/bendy/Cargo.toml b/rust/vendor/bendy/Cargo.toml
new file mode 100644
index 0000000..907c7fe
--- /dev/null
+++ b/rust/vendor/bendy/Cargo.toml
@@ -0,0 +1,65 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "bendy"
+version = "0.3.3"
+authors = ["P3KI <contact@p3ki.com>", "TQ Hirsch <tq@p3ki.com>", "Bruno Kirschner <bruno@p3ki.com>"]
+description = "A rust library for encoding and decoding bencode with enforced canonicalization rules.\n"
+readme = "README.md"
+keywords = ["bencode", "serialization", "deserialization", "bittorent"]
+categories = ["encoding", "no-std"]
+license = "BSD-3-Clause"
+repository = "https://github.com/P3KI/bendy"
+[package.metadata.docs.rs]
+all-features = true
+[profile.release]
+opt-level = 3
+lto = true
+codegen-units = 1
+
+[[example]]
+name = "encode_torrent"
+required-features = ["std"]
+
+[[test]]
+name = "core_test"
+required-features = ["std"]
+[dependencies.failure]
+version = "^0.1.3"
+features = ["derive"]
+default_features = false
+
+[dependencies.serde_]
+version = "^1.0"
+optional = true
+package = "serde"
+
+[dependencies.serde_bytes]
+version = "^0.11.3"
+optional = true
+[dev-dependencies.regex]
+version = "^1.0"
+
+[dev-dependencies.serde_derive]
+version = "^1.0"
+
+[features]
+default = ["std"]
+serde = ["serde_", "serde_bytes"]
+std = ["failure/std"]
+[badges.maintenance]
+status = "actively-developed"
+
+[badges.travis-ci]
+repository = "P3KI/bendy"
diff --git a/rust/vendor/bendy/LICENSE-BSD3 b/rust/vendor/bendy/LICENSE-BSD3
new file mode 100644
index 0000000..0fbbd14
--- /dev/null
+++ b/rust/vendor/bendy/LICENSE-BSD3
@@ -0,0 +1,29 @@
+Copyright (c) 2018 P3KI GmbH, All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+contributors may be used to endorse or promote products derived
+from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/rust/vendor/bendy/README.md b/rust/vendor/bendy/README.md
new file mode 100644
index 0000000..0c5fc30
--- /dev/null
+++ b/rust/vendor/bendy/README.md
@@ -0,0 +1,656 @@
+# Bendy
+
+[![Build Status](https://travis-ci.org/P3KI/bendy.svg?branch=master)](https://travis-ci.org/P3KI/bendy)
+[![Current Version](https://meritbadge.herokuapp.com/bendy)](https://crates.io/crates/bendy)
+[![License: BSD-3-Clause](https://img.shields.io/github/license/P3KI/bendy.svg)](https://github.com/P3KI/bendy/blob/master/LICENSE-BSD3)
+
+A Rust library for encoding and decoding bencode with enforced canonicalization rules.
+[Bencode](https://en.wikipedia.org/wiki/Bencode) is a simple but very effective encoding
+scheme, originating with the BitTorrent peer-to-peer system.
+
+---
+
+You may be looking for:
+
+- [Known Alternatives](#known-alternatives)
+- [Why should I use it](#why-should-i-use-it)
+- [Usage](#usage)
+ - [Encoding](#encoding-with-tobencode)
+ - [Decoding](#decoding-with-frombencode)
+- [Unsafe Code](#usage-of-unsafe-code)
+- [Contributing](#contributing)
+
+---
+
+## Known alternatives:
+This is not the first library to implement Bencode. In fact there's several
+implementations already:
+
+- Toby Padilla [serde-bencode](https://github.com/toby/serde-bencode)
+- Arjan Topolovec's [rust-bencode](https://github.com/arjantop/rust-bencode),
+- Murarth's [bencode](https://github.com/murarth/bencode),
+- and Jonas Hermsmeier's [rust-bencode](https://github.com/jhermsmeier/rust-bencode)
+
+## Why should I use it?
+So why the extra work adding yet-another-version of a thing that already exists, you
+might ask?
+
+### Enforced correctness
+Implementing a canonical encoding form is straight forward. It comes down to defining
+*a proper way of handling unordered data*. The next step is that bendy's sorting data
+before encoding it using the regular Bencode rules. If your data is already sorted bendy
+will of course skip the extra sorting step to gain efficiency.
+But bendy goes a step further to *ensure correctness*: If you hand the library data that
+you say is already sorted, bendy still does an in-place verification to *ensure that your
+data actually is sorted* and complains if it isn't. In the end, once bendy serialized
+your data, it's Bencode through and through. So it's perfectly compatible with every
+other Bencode library.
+
+Just remember: At this point *only bendy* enforces the correctness of the canonical
+format if you read it back in.
+
+### Canonical representation
+Bendy ensures that any de-serialize / serialize round trip produces the exact *same*
+and *correct* binary representation. This is relevant if you're dealing with unordered
+sets or map-structured data where theoretically the order is not relevant, but in
+practice it is, especially if you want to ensure that cryptographic signatures related
+to the data structure do not get invalidated accidentally.
+
+| Data Structure | Default Impl | Comment |
+|----------------|--------------|--------------------------------------------------------------------------------------------|
+| Vec | ✔ | Defines own ordering |
+| VecDeque | ✔ | Defines own ordering |
+| LinkedList | ✔ | Defines own ordering |
+| HashMap | ✔ | Ordering missing but content is ordered by key byte representation. |
+| BTreeMap | ✔ | Defines own ordering |
+| HashSet | ✘ | (Unordered) Set handling not yet defined |
+| BTreeSet | ✘ | (Unordered) Set handling not yet defined |
+| BinaryHeap | ✘ | Ordering missing |
+| Iterator | ~ | `emit_unchecked_list()` allows to emit any iterable but user needs to ensure the ordering. |
+
+**Attention:**
+
+- Since most list types already define their inner ordering, data structures
+ like `Vec`, `VecDeque`, and `LinkedList` will not get sorted during encoding!
+
+- There is no default implementation for handling generic iterators.
+ This is by design. `Bendy` cannot tell from an iterator whether the underlying
+ structure requires sorting or not and would have to take data as-is.
+
+## Usage
+
+First you need to add bendy as a project dependency:
+
+```toml
+[dependencies]
+bendy = "^0.3"
+```
+
+### Encoding with `ToBencode`
+
+To encode an object of a type which already implements the `ToBencode` trait
+it is enough to import the trait and call the `to_bencode()` function on the object.
+
+```rust
+use bendy::encoding::{ToBencode, Error};
+
+fn main() {}
+
+#[test]
+fn encode_vector() -> Result<(), Error> {
+ let my_data = vec!["hello", "world"];
+ let encoded = my_data.to_bencode()?;
+
+ assert_eq!(b"l5:hello5:worlde", encoded.as_slice());
+ Ok(())
+}
+```
+
+### Implementing `ToBencode`
+
+In most cases it should be enough to overwrite the associated `encode` function
+and keep the default implementation of `to_bencode`.
+
+The function will provide you with a `SingleItemEncoder` which must be used to
+emit any relevant components of the current object. As long as these implement
+`ToBencode` themselves it is enough to pass them into the `emit` function of
+the encoder as this will serialize any type implementing the trait.
+
+Next to `emit` the encoder also provides a list of functions to encode specific
+bencode primitives (i.e. `emit_int` and `emit_str`) and nested bencode elements
+(i.e. `emit_dict` and `emit_list`). These methods should be used if its necessary
+to output a specific non default data type.
+
+**Implementing Integer Encoding**
+
+As bencode has native integer support bendy provides default implementations for
+all of rusts native integer types. This allows to call `to_bencode` on any integer
+object and to pass these objects into the encoder's `emit_int` function.
+
+```rust
+use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+
+struct IntegerWrapper(i64);
+
+impl ToBencode for IntegerWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_int(self.0)
+ }
+}
+
+fn main() {}
+
+#[test]
+fn encode_integer() -> Result<(), Error> {
+ let example = IntegerWrapper(21);
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"i21e", encoded.as_slice());
+
+ let encoded = 21.to_bencode()?;
+ assert_eq!(b"i21e", encoded.as_slice());
+
+ Ok(())
+}
+```
+
+**Encode a byte string**
+
+Another data type bencode natively supports are byte strings. Therefore bendy
+provides default implementations for `String` and `&str`.
+
+```rust
+use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+
+struct StringWrapper(String);
+
+impl ToBencode for StringWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_str(&self.0)
+ }
+}
+
+fn main() {}
+
+#[test]
+fn encode_string() -> Result<(), Error> {
+ let example = StringWrapper("content".to_string());
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ let encoded = "content".to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ Ok(())
+}
+```
+
+As its a very common pattern to represent a byte string as `Vec<u8>` bendy
+exposes the `AsString` wrapper. This can be used to encapsulate any element
+implementing `AsRef<[u8]>` to output itself as a bencode string instead of a
+list.
+
+```rust
+use bendy::encoding::{ToBencode, SingleItemEncoder, Error, AsString};
+
+struct ByteStringWrapper(Vec<u8>);
+
+impl ToBencode for ByteStringWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ let content = AsString(&self.0);
+ encoder.emit(&content)
+ }
+}
+
+fn main() {}
+
+#[test]
+fn encode_byte_string() -> Result<(), Error> {
+ let example = ByteStringWrapper(b"content".to_vec());
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ let encoded = AsString(b"content").to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ Ok(())
+}
+```
+
+**Encode a dictionary**
+
+If a data structure contains key-value pairs its most likely a good idea to
+encode it as a bencode dictionary. This is also true for most structs with
+more then one member as it might be helpful to represent their names to ensure
+the existence of specific (optional) member.
+
+__Attention:__ To ensure a canonical representation bendy requires that the keys
+of a dictionary emitted via `emit_dict` are sorted in ascending order or the
+encoding will fail with an error of kind `UnsortedKeys`. In case of an unsorted
+dictionary it might be useful to use `emit_and_sort_dict` instead.
+
+```rust
+use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+
+struct Example {
+ label: String,
+ counter: u64,
+}
+
+impl ToBencode for Example {
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_dict(|mut e| {
+ e.emit_pair(b"counter", &self.counter)?;
+ e.emit_pair(b"label", &self.label)?;
+
+ Ok(())
+ })
+ }
+}
+
+fn main() {}
+
+#[test]
+fn encode_dictionary() -> Result<(), Error> {
+ let example = Example { label: "Example".to_string(), counter: 0 };
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"d7:counteri0e5:label7:Examplee", encoded.as_slice());
+
+ Ok(())
+}
+```
+
+**Encode a list**
+
+While encoding a list bendy assumes the elements inside this list are
+inherently sorted through their position inside the list. The implementation
+is therefore free to choose its own sorting.
+
+```rust
+use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+
+struct Location(i64, i64);
+
+impl ToBencode for Location {
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_list(|e| {
+ e.emit_int(self.0)?;
+ e.emit_int(self.1)
+ })
+ }
+}
+
+fn main() {}
+
+#[test]
+fn encode_list() -> Result<(), Error> {
+ let example = Location(2, 3);
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"li2ei3ee", encoded.as_slice());
+
+ Ok(())
+}
+```
+
+### Decoding with `FromBencode`
+
+To decode an object of a type which already implements the `FromBencode` trait
+it is enough to import the trait and call the `from_bencode()` function on the object.
+
+```rust
+use bendy::decoding::{FromBencode, Error};
+
+fn main() {}
+
+#[test]
+fn decode_vector() -> Result<(), Error> {
+ let encoded = b"l5:hello5:worlde".to_vec();
+ let decoded = Vec::<String>::from_bencode(&encoded)?;
+
+ assert_eq!(vec!["hello", "world"], decoded);
+ Ok(())
+}
+
+```
+
+### Implementing `FromBencode`
+
+In most cases it should be enough to overwrite the associated
+`decode_bencode_object` function and keep the default implementation of
+`from_bencode`.
+
+The function will provide you with an representation of a bencode `Object`
+which must be processed to receive any relevant components of the expected data
+type. As long as these implement `FromBencode` themselves it is enough to call
+`decode_bencode_object` on the expected data type of the element as this will
+deserialize any type implementing the trait.
+
+Next to `from_bencode` the bencode `Object` representation also provides a list
+of helper functions to itself into specific bencode primitives and container
+(i.e. `bytes_or`, `integer_or_else` or `try_into_list`). Which than can be used
+to restore the actual element.
+
+**Decode an integer**
+
+As bencode has native integer support bendy provides default implementations
+for all of rusts native integer types. This allows to call `from_bencode` on
+any type of integer.
+
+*Attention:* If it's necessary to handle a big integer which has no
+representation through one of the default data types it's always possible to
+access the string version of the number during decoding.
+
+```rust
+use bendy::decoding::{FromBencode, Object, Error};
+
+#[derive(Debug, Eq, PartialEq)]
+struct IntegerWrapper(i64);
+
+impl FromBencode for IntegerWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ // This is an example for content handling. It would also be possible
+ // to call `i64::decode_bencode_object(object)` directly.
+ let content = object.try_into_integer()?;
+ let number = content.parse::<i64>()?;
+
+ Ok(IntegerWrapper(number))
+ }
+}
+
+fn main() {}
+
+#[test]
+fn decode_integer() -> Result<(), Error> {
+ let encoded = b"i21e".to_vec();
+
+ let example = IntegerWrapper::from_bencode(&encoded)?;
+ assert_eq!(IntegerWrapper(21), example);
+
+ let example = i64::from_bencode(&encoded)?;
+ assert_eq!(21, example);
+
+ Ok(())
+}
+```
+
+**Decode a byte string**
+
+In most cases it is possible to restore a string from its bencode
+representation as a byte sequence via the `String::from_utf8` and
+`str::from_utf8`.
+
+```rust
+use bendy::decoding::{FromBencode, Object, Error};
+
+#[derive(Debug, Eq, PartialEq)]
+struct StringWrapper(String);
+
+impl FromBencode for StringWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ // This is an example for content handling. It would also be possible
+ // to call `String::decode_bencode_object(object)` directly.
+ let content = object.try_into_bytes()?;
+ let content = String::from_utf8(content.to_vec())?;
+
+ Ok(StringWrapper(content))
+ }
+}
+
+fn main() {}
+
+#[test]
+fn decode_string() -> Result<(), Error> {
+ let encoded = b"7:content".to_vec();
+
+ let example = StringWrapper::from_bencode(&encoded)?;
+ assert_eq!(StringWrapper("content".to_string()), example);
+
+ let example = String::from_bencode(&encoded)?;
+ assert_eq!("content".to_string(), example);
+
+ Ok(())
+}
+```
+
+If the content is a non utf8 encoded string or an actual byte sequence the
+`AsString` wrapper might be useful to restore the bencode string object as
+a sequence of bytes through an object of type `Vec<u8>`.
+
+```rust
+use bendy::{
+ decoding::{FromBencode, Object, Error},
+ encoding::AsString,
+};
+
+#[derive(Debug, Eq, PartialEq)]
+struct ByteStringWrapper(Vec<u8>);
+
+impl FromBencode for ByteStringWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let content = AsString::decode_bencode_object(object)?;
+ Ok(ByteStringWrapper(content.0))
+ }
+}
+
+fn main() {}
+
+#[test]
+fn decode_byte_string() -> Result<(), Error> {
+ let encoded = b"7:content".to_vec();
+
+ let example = ByteStringWrapper::from_bencode(&encoded)?;
+ assert_eq!(ByteStringWrapper(b"content".to_vec()), example);
+
+ let example = AsString::from_bencode(&encoded)?;
+ assert_eq!(b"content".to_vec(), example.0);
+
+ Ok(())
+}
+```
+
+**Decode a dictionary**
+
+Unwrapping the bencode object into a dictionary will provide a dictionary
+decoder which can be used to access the included key-value pairs.
+
+To improve the error handling in case of huge or multiple nested dictionaries
+the decoding module provides a `ResultExt` trait which allows to add a context
+description in case of an error. If multiple context calls are nested they will
+concatenated in a dot notation like style.
+
+```rust
+use bendy::decoding::{FromBencode, Object, Error, ResultExt};
+
+#[derive(Debug, Eq, PartialEq)]
+struct Example {
+ label: String,
+ counter: u64,
+}
+
+impl FromBencode for Example {
+ const EXPECTED_RECURSION_DEPTH: usize = 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let mut counter = None;
+ let mut label = None;
+
+ let mut dict = object.try_into_dictionary()?;
+ while let Some(pair) = dict.next_pair()? {
+ match pair {
+ (b"counter", value) => {
+ counter = u64::decode_bencode_object(value)
+ .context("counter")
+ .map(Some)?;
+ },
+ (b"label", value) => {
+ label = String::decode_bencode_object(value)
+ .context("label")
+ .map(Some)?;
+ },
+ (unknown_field, _) => {
+ return Err(Error::unexpected_field(String::from_utf8_lossy(
+ unknown_field,
+ )));
+ },
+ }
+ }
+
+ let counter = counter.ok_or_else(|| Error::missing_field("counter"))?;
+ let label= label.ok_or_else(|| Error::missing_field("label"))?;
+
+ Ok(Example { counter, label })
+ }
+}
+
+fn main() {}
+
+#[test]
+fn decode_dictionary() -> Result<(), Error> {
+ let encoded = b"d7:counteri0e5:label7:Examplee".to_vec();
+ let expected = Example { label: "Example".to_string(), counter: 0 };
+
+ let example = Example::from_bencode(&encoded)?;
+ assert_eq!(expected, example);
+
+ Ok(())
+}
+```
+
+**Decode a list**
+
+Unwrapping the bencode object into a list will provide a list decoder which can
+be used to access the contained elements.
+
+```rust
+use bendy::decoding::{FromBencode, Object, Error};
+
+#[derive(Debug, PartialEq, Eq)]
+struct Location(i64, i64);
+
+impl FromBencode for Location {
+ const EXPECTED_RECURSION_DEPTH: usize = 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let mut list = object.try_into_list()?;
+
+ let x = list.next_object()?.ok_or(Error::missing_field("x"))?;
+ let x = i64::decode_bencode_object(x)?;
+
+ let y = list.next_object()?.ok_or(Error::missing_field("y"))?;
+ let y = i64::decode_bencode_object(y)?;
+
+ Ok(Location(x, y))
+ }
+}
+
+fn main() {}
+
+#[test]
+fn decode_list() -> Result<(), Error> {
+ let encoded = b"li2ei3ee".to_vec();
+ let expected = Location(2, 3);
+
+ let example = Location::from_bencode(&encoded)?;
+ assert_eq!(expected, example);
+
+ Ok(())
+}
+```
+
+### Optional: Limitation of recursive parsing
+
+**What?**
+
+The library allows to set an expected recursion depth limit for de- and encoding.
+If set, the parser will use this value as an upper limit for the validation of any nested
+data structure and abort with an error if an additional level of nesting is detected.
+
+While the encoding limit itself is primarily there to increase the confidence of bendy
+users in their own validation code, the decoding limit should be used to avoid
+parsing of malformed or malicious external data.
+
+ - The encoding limit can be set through the `MAX_DEPTH` constant in any implementation
+ of the `ToBencode` trait.
+ - The decoding limit can be set through the `EXPECTED_RECURSION_DEPTH` constant in any
+ implementation of the `FromBencode` trait.
+
+**How?**
+
+The nesting level calculation always starts on level zero, is incremented by one when
+the parser enters a nested bencode element (i.e. list, dictionary) and decrement as
+soon as the related element ends. Therefore any values decoded as bencode strings
+or integers do not affect the nesting limit.
+
+### Serde Support
+
+Bendy supports serde when the `serde` feature is enabled:
+
+```toml
+[dependencies]
+bendy = { version = "^0.3", features = ["std", "serde"] }
+serde = { version = "1.0", features = ["derive"] }
+```
+
+With the feature enabled, values can be serialized to and deserialized from
+bencode with `bendy::serde::from_bytes` and `bendy::serde::to_bytes`
+respectively:
+
+
+```rust
+use serde::{Deserialize, Serialize};
+
+#[derive(Serialize, Deserialize, PartialEq, Debug)]
+struct Foo {
+ bar: String,
+}
+
+fn main() {
+ let value = Foo {
+ bar: "hello".into(),
+ };
+ let bencode = bendy::serde::to_bytes(&value).unwrap();
+ assert_eq!(bencode, b"d3:bar5:helloe");
+ let deserialized = bendy::serde::from_bytes::<Foo>(&bencode).unwrap();
+ assert_eq!(deserialized, value);
+}
+```
+
+Information on how Rust types are represented in bencode is available in the
+[serde module documentation](https://docs.rs/bendy/*/bendy/serde/index.html).
+
+## Usage of unsafe code
+The parser would not require any unsafe code to work but it still contains a single unsafe call
+to `str::from_utf8_unchecked`. This call is used to avoid a duplicated UTF-8 check when the
+parser converts the bytes representing an incoming integer into a `&str` after its successful
+validation.
+
+*Disclaimer: Further unsafe code may be introduced through the dependency on the `failure` crate.*
+
+## Contributing
+
+We welcome everyone to ask questions, open issues or provide merge requests.
+Each merge request will be reviewed and either landed in the main tree or given
+feedback for changes that would be required.
+
+All code in this repository is under the [BSD-3-Clause](https://opensource.org/licenses/BSD-3-Clause)
+license.
diff --git a/rust/vendor/bendy/examples/decode_torrent.rs b/rust/vendor/bendy/examples/decode_torrent.rs
new file mode 100644
index 0000000..3960a89
--- /dev/null
+++ b/rust/vendor/bendy/examples/decode_torrent.rs
@@ -0,0 +1,212 @@
+//! A decoder for torrent files.
+//!
+//! This example will ...
+//!
+//! - read a torrent file,
+//! - deserialize the bencode formatted information
+//! - and print the result into stdout.
+//!
+//! *Attention*: Please consider to pipe the output into a file of your choice.
+//!
+//! # Run the Example
+//!
+//! ```
+//! cargo run --example decode_torrent > parsing_output.txt
+//! ```
+
+use bendy::{
+ decoding::{Error, FromBencode, Object, ResultExt},
+ encoding::AsString,
+};
+
+static EXAMPLE_TORRENT: &[u8] =
+ include_bytes!("torrent_files/debian-9.4.0-amd64-netinst.iso.torrent");
+
+/// Main struct containing all required information.
+///
+/// Based on: [http://fileformats.wikia.com/wiki/Torrent_file].
+///
+/// # Design Decision
+///
+/// To keep the example simple we won't parse the integers fields
+/// into a concrete number type as the bencode integer definition
+/// is actually a `BigNum` and the content may not fit.
+#[derive(Debug)]
+struct MetaInfo {
+ pub announce: String,
+ pub info: Info,
+ pub comment: Option<String>, // not official element
+ pub creation_date: Option<u64>, // not official element
+ pub http_seeds: Option<Vec<String>>, // not official element
+}
+
+/// File related information (Single-file format)
+#[derive(Debug)]
+struct Info {
+ pub piece_length: String,
+ pub pieces: Vec<u8>,
+ pub name: String,
+ pub file_length: String,
+}
+
+impl FromBencode for MetaInfo {
+ // Try to parse with a `max_depth` of two.
+ //
+ // The required max depth of a data structure is calculated as follows:
+ //
+ // - Every potential nesting level encoded as bencode dictionary or list count as +1,
+ // - everything else is ignored.
+ //
+ // This typically means that we only need to count the amount of nested structs and container
+ // types. (Potentially ignoring lists of bytes as they are normally encoded as strings.)
+ //
+ // struct MetaInfo { // encoded as dictionary (+1)
+ // announce: String,
+ // info: Info { // encoded as dictionary (+1)
+ // piece_length: String,
+ // pieces: Vec<u8>, // encoded as string and therefore ignored
+ // name: String,
+ // file_length: String,
+ // },
+ // comment: Option<String>,
+ // creation_date: Option<u64>,
+ // http_seeds: Option<Vec<String>> // if available encoded as list but even then doesn't
+ // increase the limit over the deepest chain including
+ // info
+ // }
+ const EXPECTED_RECURSION_DEPTH: usize = Info::EXPECTED_RECURSION_DEPTH + 1;
+
+ /// Entry point for decoding a torrent. The dictionary is parsed for all
+ /// non-optional and optional fields. Missing optional fields are ignored
+ /// but any other missing fields result in stopping the decoding and in
+ /// spawning [`DecodingError::MissingField`].
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut announce = None;
+ let mut comment = None;
+ let mut creation_date = None;
+ let mut http_seeds = None;
+ let mut info = None;
+
+ let mut dict_dec = object.try_into_dictionary()?;
+ while let Some(pair) = dict_dec.next_pair()? {
+ match pair {
+ (b"announce", value) => {
+ announce = String::decode_bencode_object(value)
+ .context("announce")
+ .map(Some)?;
+ },
+ (b"comment", value) => {
+ comment = String::decode_bencode_object(value)
+ .context("comment")
+ .map(Some)?;
+ },
+ (b"creation date", value) => {
+ creation_date = u64::decode_bencode_object(value)
+ .context("creation_date")
+ .map(Some)?;
+ },
+ (b"httpseeds", value) => {
+ http_seeds = Vec::decode_bencode_object(value)
+ .context("http_seeds")
+ .map(Some)?;
+ },
+ (b"info", value) => {
+ info = Info::decode_bencode_object(value)
+ .context("info")
+ .map(Some)?;
+ },
+ (unknown_field, _) => {
+ return Err(Error::unexpected_field(String::from_utf8_lossy(
+ unknown_field,
+ )));
+ },
+ }
+ }
+
+ let announce = announce.ok_or_else(|| Error::missing_field("announce"))?;
+ let info = info.ok_or_else(|| Error::missing_field("info"))?;
+
+ Ok(MetaInfo {
+ announce,
+ info,
+ comment,
+ creation_date,
+ http_seeds,
+ })
+ }
+}
+
+impl FromBencode for Info {
+ const EXPECTED_RECURSION_DEPTH: usize = 1;
+
+ /// Treats object as dictionary containing all fields for the info struct.
+ /// On success the dictionary is parsed for the fields of info which are
+ /// necessary for torrent. Any missing field will result in a missing field
+ /// error which will stop the decoding.
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut file_length = None;
+ let mut name = None;
+ let mut piece_length = None;
+ let mut pieces = None;
+
+ let mut dict_dec = object.try_into_dictionary()?;
+ while let Some(pair) = dict_dec.next_pair()? {
+ match pair {
+ (b"length", value) => {
+ file_length = value
+ .try_into_integer()
+ .context("file.length")
+ .map(ToString::to_string)
+ .map(Some)?;
+ },
+ (b"name", value) => {
+ name = String::decode_bencode_object(value)
+ .context("name")
+ .map(Some)?;
+ },
+ (b"piece length", value) => {
+ piece_length = value
+ .try_into_integer()
+ .context("length")
+ .map(ToString::to_string)
+ .map(Some)?;
+ },
+ (b"pieces", value) => {
+ pieces = AsString::decode_bencode_object(value)
+ .context("pieces")
+ .map(|bytes| Some(bytes.0))?;
+ },
+ (unknown_field, _) => {
+ return Err(Error::unexpected_field(String::from_utf8_lossy(
+ unknown_field,
+ )));
+ },
+ }
+ }
+
+ let file_length = file_length.ok_or_else(|| Error::missing_field("file_length"))?;
+ let name = name.ok_or_else(|| Error::missing_field("name"))?;
+ let piece_length = piece_length.ok_or_else(|| Error::missing_field("piece_length"))?;
+ let pieces = pieces.ok_or_else(|| Error::missing_field("pieces"))?;
+
+ // Check that we discovered all necessary fields
+ Ok(Info {
+ file_length,
+ name,
+ piece_length,
+ pieces,
+ })
+ }
+}
+
+fn main() -> Result<(), Error> {
+ let torrent = MetaInfo::from_bencode(EXAMPLE_TORRENT)?;
+ println!("{:#?}", torrent);
+ Ok(())
+}
diff --git a/rust/vendor/bendy/examples/encode_torrent.rs b/rust/vendor/bendy/examples/encode_torrent.rs
new file mode 100644
index 0000000..1e9b8c2
--- /dev/null
+++ b/rust/vendor/bendy/examples/encode_torrent.rs
@@ -0,0 +1,115 @@
+//! An encoder for torrent files
+//!
+//! This example will ...
+//!
+//! - serialize a torrent file representing object in bencode format
+//! - and print the result into stdout.
+//!
+//! *Attention*: Please consider to pipe the output into a file of your choice.
+//!
+//! # Run the Example
+//!
+//! ```
+//! cargo run --example encode_torrent > example.torrent
+//! ```
+
+use std::io::Write;
+
+use bendy::encoding::{AsString, Error as EncodingError, SingleItemEncoder, ToBencode};
+use failure::Error;
+
+/// Main struct containing all required information.
+///
+/// Based on: [http://fileformats.wikia.com/wiki/Torrent_file].
+///
+/// # Design Decision
+///
+/// To keep the example simple we won't parse the integers fields
+/// into a concrete number type as the bencode integer definition
+/// is actually a `BigNum` and the content may not fit.
+#[derive(Debug)]
+struct MetaInfo {
+ pub announce: String,
+ pub info: Info,
+ pub comment: Option<String>, // not official element
+ pub creation_date: Option<String>, // not official element
+ pub http_seeds: Option<Vec<String>>, // not official element
+}
+
+/// File related information (Single-file format)
+#[derive(Debug)]
+struct Info {
+ pub piece_length: String,
+ pub pieces: Vec<u8>,
+ pub name: String,
+ pub file_length: String,
+}
+
+impl ToBencode for MetaInfo {
+ // Adds an additional recursion level -- itself formatted as dictionary --
+ // around the info struct.
+ const MAX_DEPTH: usize = Info::MAX_DEPTH + 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), EncodingError> {
+ encoder.emit_dict(|mut e| {
+ e.emit_pair(b"announce", &self.announce)?;
+
+ if let Some(comment) = &self.comment {
+ e.emit_pair(b"comment", comment)?;
+ }
+
+ if let Some(creation_date) = &self.creation_date {
+ e.emit_pair(b"creation date", creation_date)?;
+ }
+
+ if let Some(seeds) = &self.http_seeds {
+ // List is a simple iterable wrapper that allows to encode
+ // any list like container as bencode list object.
+ e.emit_pair(b"httpseeds", seeds)?;
+ }
+
+ e.emit_pair(b"info", &self.info)
+ })?;
+
+ Ok(())
+ }
+}
+
+impl ToBencode for Info {
+ // The struct is encoded as dictionary and all of it internals are encoded
+ // as flat values, i.e. strings or integers.
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), EncodingError> {
+ encoder.emit_dict(|mut e| {
+ e.emit_pair(b"length", &self.file_length)?;
+ e.emit_pair(b"name", &self.name)?;
+ e.emit_pair(b"piece length", &self.piece_length)?;
+ e.emit_pair(b"pieces", AsString(&self.pieces))
+ })?;
+ Ok(())
+ }
+}
+
+fn main() -> Result<(), Error> {
+ let torrent = MetaInfo {
+ announce: "http://bttracker.debian.org:6969/announce".to_owned(),
+ comment: Some("\"Debian CD from cdimage.debian.org\"".to_owned()),
+ creation_date: Some(1_520_682_848.to_string()),
+ http_seeds: Some(vec![
+ "https://cdimage.debian.org/cdimage/release/9.4.0//srv/cdbuilder.debian.org/dst/deb-cd/weekly-builds/amd64/iso-cd/debian-9.4.0-amd64-netinst.iso".to_owned(),
+ "https://cdimage.debian.org/cdimage/archive/9.4.0//srv/cdbuilder.debian.org/dst/deb-cd/weekly-builds/amd64/iso-cd/debian-9.4.0-amd64-netinst.iso".to_owned(),
+ ]),
+ info: Info {
+ piece_length: 262_144.to_string(),
+ pieces: include_bytes!("torrent_files/pieces.iso").to_vec(),
+ name: "debian-9.4.0-amd64-netinst.iso".to_owned(),
+ file_length: 305_135_616.to_string(),
+ },
+ };
+
+ let data = torrent.to_bencode()?;
+ std::io::stdout().write_all(&data)?;
+
+ Ok(())
+}
diff --git a/rust/vendor/bendy/examples/torrent_files/debian-9.4.0-amd64-netinst.iso.torrent b/rust/vendor/bendy/examples/torrent_files/debian-9.4.0-amd64-netinst.iso.torrent
new file mode 100644
index 0000000..5c8652e
--- /dev/null
+++ b/rust/vendor/bendy/examples/torrent_files/debian-9.4.0-amd64-netinst.iso.torrent
Binary files differ
diff --git a/rust/vendor/bendy/examples/torrent_files/pieces.iso b/rust/vendor/bendy/examples/torrent_files/pieces.iso
new file mode 100644
index 0000000..1b35971
--- /dev/null
+++ b/rust/vendor/bendy/examples/torrent_files/pieces.iso
Binary files differ
diff --git a/rust/vendor/bendy/githooks/pre-commit/rustfmt-up-to-date b/rust/vendor/bendy/githooks/pre-commit/rustfmt-up-to-date
new file mode 100755
index 0000000..e408b24
--- /dev/null
+++ b/rust/vendor/bendy/githooks/pre-commit/rustfmt-up-to-date
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -euo pipefail
+
+EXPORT_DIR="$(mktemp -d --tmpdir= rustfmt-hook.XXXXXXX)"
+# Cleanup temp dir on exit
+trap '{ rm -rf "${EXPORT_DIR}"; }' EXIT
+
+git checkout-index -a --prefix="${EXPORT_DIR}/"
+cd "${EXPORT_DIR}"
+
+# Nightly version is pinned to avoid untracable version clashes.
+# Update from time to time manually or if any error is encounterd.
+cargo +nightly fmt -- --check
diff --git a/rust/vendor/bendy/rustfmt.toml b/rust/vendor/bendy/rustfmt.toml
new file mode 100644
index 0000000..49e0f5f
--- /dev/null
+++ b/rust/vendor/bendy/rustfmt.toml
@@ -0,0 +1,10 @@
+unstable_features = true
+
+required_version = "1.4.22"
+edition = "2018"
+
+format_code_in_doc_comments = true
+match_block_trailing_comma = true
+merge_imports = true
+reorder_impl_items = true
+use_field_init_shorthand = true
diff --git a/rust/vendor/bendy/src/assert_matches.rs b/rust/vendor/bendy/src/assert_matches.rs
new file mode 100644
index 0000000..b9822e9
--- /dev/null
+++ b/rust/vendor/bendy/src/assert_matches.rs
@@ -0,0 +1,12 @@
+macro_rules! assert_matches {
+ ($expression:expr, $( $pattern:pat )|+ $( if $guard:expr )?) => {
+ match $expression {
+ $( $pattern )|+ $( if $guard )? => {}
+ left => panic!(
+ "assertion failed: (left ~= right)\n left: `{:?}`\n right: `{}`",
+ left,
+ stringify!($($pattern)|+ $(if $guard)?)
+ ),
+ }
+ }
+}
diff --git a/rust/vendor/bendy/src/decoding.rs b/rust/vendor/bendy/src/decoding.rs
new file mode 100644
index 0000000..d2f151a
--- /dev/null
+++ b/rust/vendor/bendy/src/decoding.rs
@@ -0,0 +1,76 @@
+//! Decodes a bencoded struct
+//!
+//! # Basic decoding
+//! For any decoding process, first we need to create a decoder:
+//!
+//! ```
+//! # use bendy::decoding::{Decoder};
+//! #
+//! # let buf: &[u8] = b"d3:fooi1ee";
+//! let _decoder = Decoder::new(buf);
+//! ```
+//!
+//! Decoders have a depth limit to prevent resource exhaustion from hostile inputs. By default, it's
+//! set high enough for most structures that you'd encounter when prototyping, but for production
+//! use, not only may it not be enough, but the higher the depth limit, the more stack space an
+//! attacker can cause your program to use, so we recommend setting the bounds tightly:
+//!
+//! ```
+//! # use bendy::decoding::{Decoder};
+//! #
+//! # let buf: &[u8] = b"d3:fooi1ee";
+//! let _decoder = Decoder::new(buf).with_max_depth(3);
+//! ```
+//!
+//! Atoms (integers and strings) have depth zero, and lists and dicts have a depth equal to the
+//! depth of their deepest member plus one. As an special case, an empty list or dict has depth 1.
+//!
+//! Now, you can start reading objects:
+//!
+//! ```
+//! # use bendy::decoding::{Decoder,Object};
+//! #
+//! # fn decode_list(_: bendy::decoding::ListDecoder) {}
+//! # fn decode_dict(_: bendy::decoding::DictDecoder) {}
+//! #
+//! # let buf: &[u8] = b"d3:fooi1ee";
+//! # let mut decoder = Decoder::new(buf);
+//! #
+//! match decoder.next_object().unwrap() {
+//! None => (), // EOF
+//! Some(Object::List(d)) => decode_list(d),
+//! Some(Object::Dict(d)) => decode_dict(d),
+//! Some(Object::Integer(_)) => (), // integer, as a string
+//! Some(Object::Bytes(_)) => (), // A raw bytestring
+//! };
+//! ```
+//!
+//! # Error handling
+//!
+//! Once an error is encountered, the decoder won't try to muddle through it; instead, every future
+//! call to the decoder will return the same error. This behaviour can be used to check the syntax
+//! of an input object without fully decoding it:
+//!
+//! ```
+//! # use bendy::decoding::Decoder;
+//! #
+//! fn syntax_check(buf: &[u8]) -> bool {
+//! let mut decoder = Decoder::new(buf);
+//! decoder.next_object().ok(); // ignore the return value of this
+//! return decoder.next_object().is_ok();
+//! }
+//! #
+//! # assert!(syntax_check(b"i18e"));
+//! ```
+
+mod decoder;
+mod error;
+mod from_bencode;
+mod object;
+
+pub use self::{
+ decoder::{Decoder, DictDecoder, ListDecoder, Tokens},
+ error::{Error, ErrorKind, ResultExt},
+ from_bencode::FromBencode,
+ object::Object,
+};
diff --git a/rust/vendor/bendy/src/decoding/decoder.rs b/rust/vendor/bendy/src/decoding/decoder.rs
new file mode 100644
index 0000000..203f8f7
--- /dev/null
+++ b/rust/vendor/bendy/src/decoding/decoder.rs
@@ -0,0 +1,803 @@
+use alloc::format;
+use core::str;
+
+use crate::{
+ decoding::{Error, Object},
+ state_tracker::{StateTracker, StructureError, Token},
+};
+
+/// A bencode decoder
+///
+/// This can be used to either get a stream of tokens (using the [`Decoder::tokens()`] method) or to
+/// read a complete object at a time (using the [`Decoder::next_object()`]) method.
+#[derive(Debug)]
+pub struct Decoder<'a> {
+ source: &'a [u8],
+ offset: usize,
+ state: StateTracker<&'a [u8], Error>,
+}
+
+impl<'ser> Decoder<'ser> {
+ /// Create a new decoder from the given byte array
+ pub fn new(buffer: &'ser [u8]) -> Self {
+ Decoder {
+ source: buffer,
+ offset: 0,
+ state: StateTracker::new(),
+ }
+ }
+
+ /// Set the maximum nesting depth of the decoder. An unlimited-depth decoder may be
+ /// created using `with_max_depth(<usize>::max_value())`, but be warned that this will likely
+ /// exhaust memory if the nesting depth is too deep (even when reading raw tokens)
+ pub fn with_max_depth(mut self, new_max_depth: usize) -> Self {
+ self.state.set_max_depth(new_max_depth);
+ self
+ }
+
+ fn take_byte(&mut self) -> Option<u8> {
+ if self.offset < self.source.len() {
+ let ret = Some(self.source[self.offset]);
+ self.offset += 1;
+ ret
+ } else {
+ None
+ }
+ }
+
+ fn take_chunk(&mut self, count: usize) -> Option<&'ser [u8]> {
+ match self.offset.checked_add(count) {
+ Some(end_pos) if end_pos <= self.source.len() => {
+ let ret = &self.source[self.offset..end_pos];
+ self.offset = end_pos;
+ Some(ret)
+ },
+ _ => None,
+ }
+ }
+
+ fn take_int(&mut self, expected_terminator: char) -> Result<&'ser str, StructureError> {
+ enum State {
+ Start,
+ Sign,
+ Zero,
+ Digits,
+ }
+
+ let mut curpos = self.offset;
+ let mut state = State::Start;
+
+ let mut success = false;
+ while curpos < self.source.len() {
+ let c = self.source[curpos] as char;
+ match state {
+ State::Start => {
+ if c == '-' {
+ state = State::Sign;
+ } else if c == '0' {
+ state = State::Zero;
+ } else if c >= '1' && c <= '9' {
+ state = State::Digits;
+ } else {
+ return Err(StructureError::unexpected("'-' or '0'..'9'", c, curpos));
+ }
+ },
+ State::Zero => {
+ if c == expected_terminator {
+ success = true;
+ break;
+ } else {
+ return Err(StructureError::unexpected(
+ &format!("{:?}", expected_terminator),
+ c,
+ curpos,
+ ));
+ }
+ },
+ State::Sign => {
+ if c >= '1' && c <= '9' {
+ state = State::Digits;
+ } else {
+ return Err(StructureError::unexpected("'1'..'9'", c, curpos));
+ }
+ },
+ State::Digits => {
+ if c >= '0' && c <= '9' {
+ // do nothing, this is ok
+ } else if c == expected_terminator {
+ success = true;
+ break;
+ } else {
+ return Err(StructureError::unexpected(
+ &format!("{:?} or '0'..'9'", expected_terminator),
+ c,
+ curpos,
+ ));
+ }
+ },
+ }
+ curpos += 1;
+ }
+
+ if !success {
+ return Err(StructureError::UnexpectedEof);
+ }
+
+ let slice = &self.source[self.offset..curpos];
+ self.offset = curpos + 1;
+ let ival = if cfg!(debug) {
+ str::from_utf8(slice).expect("We've already examined every byte in the string")
+ } else {
+ // Avoid a second UTF-8 check here
+ unsafe { str::from_utf8_unchecked(slice) }
+ };
+
+ Ok(ival)
+ }
+
+ fn raw_next_token(&mut self) -> Result<Token<'ser>, Error> {
+ let token = match self.take_byte().ok_or(StructureError::UnexpectedEof)? as char {
+ 'e' => Token::End,
+ 'l' => Token::List,
+ 'd' => Token::Dict,
+ 'i' => Token::Num(self.take_int('e')?),
+ c if c >= '0' && c <= '9' => {
+ self.offset -= 1;
+
+ let curpos = self.offset;
+ let ival = self.take_int(':')?;
+ let len = usize::from_str_radix(ival, 10).map_err(|_| {
+ StructureError::SyntaxError(format!("Invalid integer at offset {}", curpos))
+ })?;
+ Token::String(self.take_chunk(len).ok_or(StructureError::UnexpectedEof)?)
+ },
+ tok => {
+ return Err(Error::from(StructureError::SyntaxError(format!(
+ "Invalid token starting with {:?} at offset {}",
+ tok,
+ self.offset - 1
+ ))));
+ },
+ };
+
+ Ok(token)
+ }
+
+ /// Read the next token. Returns Ok(Some(token)) if a token was successfully read,
+ fn next_token(&mut self) -> Result<Option<Token<'ser>>, Error> {
+ self.state.check_error()?;
+
+ if self.offset == self.source.len() {
+ self.state.observe_eof()?;
+ return Ok(None);
+ }
+
+ let tok_result = self.raw_next_token();
+ let tok = self.state.latch_err(tok_result)?;
+
+ self.state.observe_token(&tok)?;
+ Ok(Some(tok))
+ }
+
+ /// Iterate over the tokens in the input stream. This guarantees that the resulting stream
+ /// of tokens constitutes a valid bencoded structure.
+ pub fn tokens(self) -> Tokens<'ser> {
+ Tokens(self)
+ }
+}
+
+/// Iterator over the tokens in the input stream. This guarantees that the resulting stream
+/// of tokens constitutes a valid bencoded structure.
+pub struct Tokens<'a>(Decoder<'a>);
+
+impl<'a> Iterator for Tokens<'a> {
+ type Item = Result<Token<'a>, Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ // Only report an error once
+ if self.0.state.check_error().is_err() {
+ return None;
+ }
+ match self.0.next_token() {
+ Ok(Some(token)) => Some(Ok(token)),
+ Ok(None) => None,
+ Err(err) => Some(Err(err)),
+ }
+ }
+}
+
+// High level interface
+
+impl<'ser> Decoder<'ser> {
+ /// Read the next object from the encoded stream
+ ///
+ /// If the beginning of an object was successfully read, returns `Ok(Some(object))`.
+ /// At the end of the input stream, this will return `Ok(None)`; otherwise, returns
+ /// `Err(some_error)`.
+ ///
+ /// Note that complex objects (lists and dicts) are not fully validated before being
+ /// returned from this method, so you may still get an error while decoding the contents
+ /// of the object
+ pub fn next_object<'obj>(&'obj mut self) -> Result<Option<Object<'obj, 'ser>>, Error> {
+ use self::Token::*;
+ Ok(match self.next_token()? {
+ None | Some(End) => None,
+ Some(List) => Some(Object::List(ListDecoder::new(self))),
+ Some(Dict) => Some(Object::Dict(DictDecoder::new(self))),
+ Some(String(s)) => Some(Object::Bytes(s)),
+ Some(Num(s)) => Some(Object::Integer(s)),
+ })
+ }
+}
+
+/// A dictionary read from the input stream
+#[derive(Debug)]
+pub struct DictDecoder<'obj, 'ser: 'obj> {
+ decoder: &'obj mut Decoder<'ser>,
+ finished: bool,
+ start_point: usize,
+}
+
+/// A list read from the input stream
+#[derive(Debug)]
+pub struct ListDecoder<'obj, 'ser: 'obj> {
+ decoder: &'obj mut Decoder<'ser>,
+ finished: bool,
+ start_point: usize,
+}
+
+impl<'obj, 'ser: 'obj> DictDecoder<'obj, 'ser> {
+ fn new(decoder: &'obj mut Decoder<'ser>) -> Self {
+ let offset = decoder.offset - 1;
+ DictDecoder {
+ decoder,
+ finished: false,
+ start_point: offset,
+ }
+ }
+
+ /// Parse the next key/value pair from the dictionary. Returns `Ok(None)`
+ /// at the end of the dictionary
+ pub fn next_pair<'item>(
+ &'item mut self,
+ ) -> Result<Option<(&'ser [u8], Object<'item, 'ser>)>, Error> {
+ if self.finished {
+ return Ok(None);
+ }
+
+ // We convert to a token to release the mut ref to decoder
+ let key = self.decoder.next_object()?.map(Object::into_token);
+
+ if let Some(Token::String(k)) = key {
+ // This unwrap should be safe because None would produce an error here
+ let v = self.decoder.next_object()?.unwrap();
+ Ok(Some((k, v)))
+ } else {
+ // We can't have gotten anything but a string, as anything else would be
+ // a state error
+ self.finished = true;
+ Ok(None)
+ }
+ }
+
+ /// Consume (and validate the structure of) the rest of the items from the
+ /// dictionary. This method should be used to check for encoding errors if
+ /// [`DictDecoder::next_pair`] is not called until it returns `Ok(None)`.
+ pub fn consume_all(&mut self) -> Result<(), Error> {
+ while self.next_pair()?.is_some() {
+ // just drop the items
+ }
+ Ok(())
+ }
+
+ /// Get the raw bytes that made up this dictionary
+ pub fn into_raw(mut self) -> Result<&'ser [u8], Error> {
+ self.consume_all()?;
+ Ok(&self.decoder.source[self.start_point..self.decoder.offset])
+ }
+}
+
+impl<'obj, 'ser: 'obj> Drop for DictDecoder<'obj, 'ser> {
+ fn drop(&mut self) {
+ // we don't care about errors in drop; they'll be reported again in the parent
+ self.consume_all().ok();
+ }
+}
+
+impl<'obj, 'ser: 'obj> ListDecoder<'obj, 'ser> {
+ fn new(decoder: &'obj mut Decoder<'ser>) -> Self {
+ let offset = decoder.offset - 1;
+ ListDecoder {
+ decoder,
+ finished: false,
+ start_point: offset,
+ }
+ }
+
+ /// Get the next item from the list. Returns `Ok(None)` at the end of the list
+ pub fn next_object<'item>(&'item mut self) -> Result<Option<Object<'item, 'ser>>, Error> {
+ if self.finished {
+ return Ok(None);
+ }
+
+ let item = self.decoder.next_object()?;
+ if item.is_none() {
+ self.finished = true;
+ }
+
+ Ok(item)
+ }
+
+ /// Consume (and validate the structure of) the rest of the items from the
+ /// list. This method should be used to check for encoding errors if
+ /// [`ListDecoder::next_object`] is not called until it returns [`Ok(())`].
+ ///
+ /// [`Ok(())`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ pub fn consume_all(&mut self) -> Result<(), Error> {
+ while self.next_object()?.is_some() {
+ // just drop the items
+ }
+ Ok(())
+ }
+
+ /// Get the raw bytes that made up this list
+ pub fn into_raw(mut self) -> Result<&'ser [u8], Error> {
+ self.consume_all()?;
+ Ok(&self.decoder.source[self.start_point..self.decoder.offset])
+ }
+}
+
+impl<'obj, 'ser: 'obj> Drop for ListDecoder<'obj, 'ser> {
+ fn drop(&mut self) {
+ // we don't care about errors in drop; they'll be reported again in the parent
+ self.consume_all().ok();
+ }
+}
+
+#[cfg(test)]
+mod test {
+
+ #[cfg(not(feature = "std"))]
+ use alloc::{vec, vec::Vec};
+ use core::iter;
+
+ use regex;
+
+ use super::*;
+
+ static SIMPLE_MSG: &'static [u8] = b"d3:bari1e3:fooli2ei3eee";
+
+ fn decode_tokens(msg: &[u8]) -> Vec<Token> {
+ let tokens: Vec<Result<Token, Error>> = Decoder::new(msg).tokens().collect();
+ if tokens.iter().all(Result::is_ok) {
+ tokens.into_iter().map(Result::unwrap).collect()
+ } else {
+ panic!(
+ "Unexpected tokenization error. Received tokens: {:?}",
+ tokens
+ );
+ }
+ }
+
+ fn decode_err(msg: &[u8], err_regex: &str) {
+ let mut tokens: Vec<Result<Token, Error>> = Decoder::new(msg).tokens().collect();
+ if tokens.iter().all(Result::is_ok) {
+ panic!("Unexpected parse success: {:?}", tokens);
+ } else {
+ let err = format!("{}", tokens.pop().unwrap().err().unwrap());
+ let err_regex = regex::Regex::new(err_regex).expect("Test regexes should be valid");
+ if !err_regex.is_match(&err) {
+ panic!("Unexpected error: {}", err);
+ }
+ }
+ }
+
+ #[test]
+ fn simple_bdecode_tokenization() {
+ use self::Token::*;
+ let tokens: Vec<_> = decode_tokens(SIMPLE_MSG);
+ assert_eq!(
+ tokens,
+ vec![
+ Dict,
+ String(&b"bar"[..]),
+ Num(&"1"[..]),
+ String(&b"foo"[..]),
+ List,
+ Num(&"2"[..]),
+ Num(&"3"[..]),
+ End,
+ End,
+ ]
+ );
+ }
+
+ #[test]
+ fn short_dict_should_fail() {
+ decode_err(b"d", r"EOF");
+ }
+
+ #[test]
+ fn short_list_should_fail() {
+ decode_err(b"l", r"EOF");
+ }
+
+ #[test]
+ fn short_int_should_fail() {
+ decode_err(b"i12", r"EOF");
+ }
+
+ #[test]
+ fn negative_numbers_and_zero_should_parse() {
+ use self::Token::*;
+ let tokens: Vec<_> = decode_tokens(b"i0ei-1e");
+ assert_eq!(tokens, vec![Num(&"0"), Num(&"-1")],);
+ }
+
+ #[test]
+ fn negative_zero_is_illegal() {
+ decode_err(b"i-0e", "got '0'");
+ }
+
+ #[test]
+ fn leading_zeros_are_illegal() {
+ decode_err(b"i01e", "got '1'");
+ decode_err(b"i-01e", "got '0'");
+ }
+
+ #[test]
+ fn map_keys_must_be_strings() {
+ decode_err(b"d3:fooi1ei2ei3ee", r"Map keys must be strings");
+ }
+
+ #[test]
+ fn map_keys_must_ascend() {
+ decode_err(b"d3:fooi1e3:bari1ee", r"Keys were not sorted");
+ }
+
+ #[test]
+ fn map_keys_must_be_unique() {
+ decode_err(b"d3:fooi1e3:fooi1ee", r"Keys were not sorted");
+ }
+
+ #[test]
+ fn map_keys_must_have_values() {
+ decode_err(b"d3:fooe", r"Missing map value");
+ }
+
+ #[test]
+ fn strings_must_have_bodies() {
+ decode_err(b"3:", r"EOF");
+ }
+
+ #[test]
+ fn ints_must_have_bodies() {
+ decode_err(b"ie", r"Expected.*got 'e'");
+ }
+
+ #[test]
+ fn recursion_should_be_limited() {
+ let mut msg = Vec::new();
+ msg.extend(iter::repeat(b'l').take(4096));
+ msg.extend(iter::repeat(b'e').take(4096));
+ decode_err(&msg, r"nesting depth");
+ }
+
+ #[test]
+ fn recursion_bounds_should_be_tight() {
+ let test_msg = b"lllleeee";
+ assert!(Decoder::new(test_msg)
+ .with_max_depth(4)
+ .tokens()
+ .last()
+ .unwrap()
+ .is_ok());
+ assert!(Decoder::new(test_msg)
+ .with_max_depth(3)
+ .tokens()
+ .last()
+ .unwrap()
+ .is_err());
+ }
+
+ #[test]
+ fn dict_drop_should_consume_struct() {
+ let mut decoder = Decoder::new(b"d3:fooi1e3:quxi2eei1000e");
+ drop(decoder.next_object());
+
+ let token = decoder.tokens().next().unwrap().unwrap();
+ assert_eq!(token, Token::Num("1000"));
+ }
+
+ #[test]
+ fn list_drop_should_consume_struct() {
+ let mut decoder = Decoder::new(b"li1ei2ei3eei1000e");
+ drop(decoder.next_object());
+
+ let token = decoder.tokens().next().unwrap().unwrap();
+ assert_eq!(token, Token::Num("1000"));
+ }
+
+ #[test]
+ fn bytes_or_should_work_on_bytes() {
+ assert_eq!(
+ Ok(&b"foo"[..]),
+ Object::Bytes(b"foo").bytes_or(Err("failure"))
+ );
+ }
+
+ #[test]
+ fn bytes_or_should_not_work_on_other_types() {
+ assert_eq!(
+ Err("failure"),
+ Object::Integer("123").bytes_or(Err("failure"))
+ );
+
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ Err("failure"),
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .bytes_or(Err("failure"))
+ );
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ Err("failure"),
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .bytes_or(Err("failure"))
+ );
+ }
+
+ #[test]
+ fn bytes_or_else_should_work_on_bytes() {
+ assert_eq!(
+ Ok(&b"foo"[..]),
+ Object::Bytes(b"foo").bytes_or_else(|_| Err("failure"))
+ );
+ }
+
+ #[test]
+ fn bytes_or_else_should_not_work_on_other_types() {
+ assert_eq!(
+ Err("failure"),
+ Object::Integer("123").bytes_or_else(|_| Err("failure"))
+ );
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ Err("failure"),
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .bytes_or_else(|_| Err("failure"))
+ );
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ Err("failure"),
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .bytes_or_else(|_| Err("failure"))
+ );
+ }
+
+ #[test]
+ fn integer_str_or_should_work_on_int() {
+ assert_eq!(
+ Ok(&"123"[..]),
+ Object::Integer("123").integer_or(Err("failure"))
+ );
+ }
+
+ #[test]
+ fn integer_str_or_should_not_work_on_other_types() {
+ assert_eq!(
+ Err("failure"),
+ Object::Bytes(b"foo").integer_or(Err("failure"))
+ );
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ Err("failure"),
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .integer_or(Err("failure"))
+ );
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ Err("failure"),
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .integer_or(Err("failure"))
+ );
+ }
+
+ #[test]
+ fn integer_str_or_else_should_work_on_int() {
+ assert_eq!(
+ Ok(&"123"[..]),
+ Object::Integer("123").integer_or_else(|_| Err("failure"))
+ );
+ }
+
+ #[test]
+ fn integer_str_or_else_should_not_work_on_other_types() {
+ assert_eq!(
+ Err("failure"),
+ Object::Bytes(b"foo").integer_or_else(|_| Err("failure"))
+ );
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ Err("failure"),
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .integer_or_else(|_| Err("failure"))
+ );
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ Err("failure"),
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .integer_or_else(|_| Err("failure"))
+ );
+ }
+
+ #[test]
+ fn list_or_should_work_on_list() {
+ let mut list_decoder = Decoder::new(b"le");
+ assert!(list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .list_or(Err("failure"))
+ .is_ok());
+ }
+ #[test]
+ fn list_or_should_not_work_on_other_types() {
+ assert_eq!(
+ "failure",
+ Object::Bytes(b"foo").list_or(Err("failure")).unwrap_err()
+ );
+ assert_eq!(
+ "failure",
+ Object::Integer("foo").list_or(Err("failure")).unwrap_err()
+ );
+
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ "failure",
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .list_or(Err("failure"))
+ .unwrap_err()
+ );
+ }
+
+ #[test]
+ fn list_or_else_should_work_on_list() {
+ let mut list_decoder = Decoder::new(b"le");
+ assert!(list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .list_or_else(|_| Err("failure"))
+ .is_ok());
+ }
+ #[test]
+ fn list_or_else_should_not_work_on_other_types() {
+ assert_eq!(
+ "failure",
+ Object::Bytes(b"foo")
+ .list_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+ assert_eq!(
+ "failure",
+ Object::Integer("foo")
+ .list_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+
+ let mut dict_decoder = Decoder::new(b"de");
+ assert_eq!(
+ "failure",
+ dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .list_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+ }
+
+ #[test]
+ fn dictionary_or_should_work_on_dict() {
+ let mut dict_decoder = Decoder::new(b"de");
+ assert!(dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .dictionary_or(Err("failure"))
+ .is_ok());
+ }
+
+ #[test]
+ fn dictionary_or_should_not_work_on_other_types() {
+ assert_eq!(
+ "failure",
+ Object::Bytes(b"foo")
+ .dictionary_or(Err("failure"))
+ .unwrap_err()
+ );
+ assert_eq!(
+ "failure",
+ Object::Integer("foo")
+ .dictionary_or(Err("failure"))
+ .unwrap_err()
+ );
+
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ "failure",
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .dictionary_or(Err("failure"))
+ .unwrap_err()
+ );
+ }
+
+ #[test]
+ fn dictionary_or_else_should_work_on_dict() {
+ let mut dict_decoder = Decoder::new(b"de");
+ assert!(dict_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .dictionary_or_else(|_| Err("failure"))
+ .is_ok());
+ }
+
+ #[test]
+ fn dictionary_or_else_should_not_work_on_other_types() {
+ assert_eq!(
+ "failure",
+ Object::Bytes(b"foo")
+ .dictionary_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+ assert_eq!(
+ "failure",
+ Object::Integer("foo")
+ .dictionary_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+
+ let mut list_decoder = Decoder::new(b"le");
+ assert_eq!(
+ "failure",
+ list_decoder
+ .next_object()
+ .unwrap()
+ .unwrap()
+ .dictionary_or_else(|_| Err("failure"))
+ .unwrap_err()
+ );
+ }
+}
diff --git a/rust/vendor/bendy/src/decoding/error.rs b/rust/vendor/bendy/src/decoding/error.rs
new file mode 100644
index 0000000..7c9347d
--- /dev/null
+++ b/rust/vendor/bendy/src/decoding/error.rs
@@ -0,0 +1,155 @@
+#[cfg(not(feature = "std"))]
+use alloc::{str::Utf8Error, string::FromUtf8Error};
+#[cfg(not(feature = "std"))]
+use core::num::ParseIntError;
+
+use alloc::{
+ format,
+ string::{String, ToString},
+};
+use core::fmt::{self, Display, Formatter};
+
+#[cfg(feature = "std")]
+use std::{error::Error as StdError, sync::Arc};
+
+use failure::Fail;
+
+use crate::state_tracker::StructureError;
+
+#[derive(Debug, Clone, Fail)]
+pub struct Error {
+ #[fail(context)]
+ context: Option<String>,
+ #[fail(cause)]
+ error: ErrorKind,
+}
+
+/// An enumeration of potential errors that appear during bencode deserialization.
+#[derive(Debug, Clone, Fail)]
+pub enum ErrorKind {
+ /// Error that occurs if the serialized structure contains invalid semantics.
+ #[cfg(feature = "std")]
+ #[fail(display = "malformed content discovered: {}", _0)]
+ MalformedContent(Arc<failure::Error>),
+ /// Error that occurs if the serialized structure contains invalid semantics.
+ #[cfg(not(feature = "std"))]
+ #[fail(display = "malformed content discovered")]
+ MalformedContent,
+ /// Error that occurs if the serialized structure is incomplete.
+ #[fail(display = "missing field: {}", _0)]
+ MissingField(String),
+ /// Error in the bencode structure (e.g. a missing field end separator).
+ #[fail(display = "bencode encoding corrupted ({})", _0)]
+ StructureError(#[fail(cause)] StructureError),
+ /// Error that occurs if the serialized structure contains an unexpected field.
+ #[fail(display = "unexpected field: {}", _0)]
+ UnexpectedField(String),
+ /// Error through an unexpected bencode token during deserialization.
+ #[fail(display = "discovered {} but expected {}", _0, _1)]
+ UnexpectedToken(String, String),
+}
+
+pub trait ResultExt {
+ fn context(self, context: impl Display) -> Self;
+}
+
+impl Error {
+ pub fn context(mut self, context: impl Display) -> Self {
+ if let Some(current) = self.context.as_mut() {
+ *current = format!("{}.{}", context, current);
+ } else {
+ self.context = Some(context.to_string());
+ }
+
+ self
+ }
+
+ /// Raised when there is a general error while deserializing a type.
+ /// The message should not be capitalized and should not end with a period.
+ #[cfg(feature = "std")]
+ pub fn malformed_content(cause: impl Into<failure::Error>) -> Error {
+ let error = Arc::new(cause.into());
+ Self::from(ErrorKind::MalformedContent(error))
+ }
+
+ #[cfg(not(feature = "std"))]
+ pub fn malformed_content<T>(_cause: T) -> Error {
+ Self::from(ErrorKind::MalformedContent)
+ }
+
+ /// Returns a `Error::MissingField` which contains the name of the field.
+ pub fn missing_field(field_name: impl Display) -> Error {
+ Self::from(ErrorKind::MissingField(field_name.to_string()))
+ }
+
+ /// Returns a `Error::UnexpectedField` which contains the name of the field.
+ pub fn unexpected_field(field_name: impl Display) -> Error {
+ Self::from(ErrorKind::UnexpectedField(field_name.to_string()))
+ }
+
+ /// Returns a `Error::UnexpectedElement` which contains a custom error message.
+ pub fn unexpected_token(expected: impl Display, discovered: impl Display) -> Error {
+ Self::from(ErrorKind::UnexpectedToken(
+ expected.to_string(),
+ discovered.to_string(),
+ ))
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+ match &self.context {
+ Some(context) => write!(f, "Error: {} in {}", self.error, context),
+ None => write!(f, "Error: {}", self.error),
+ }
+ }
+}
+
+impl From<StructureError> for Error {
+ fn from(error: StructureError) -> Self {
+ Self::from(ErrorKind::StructureError(error))
+ }
+}
+
+impl From<ErrorKind> for Error {
+ fn from(kind: ErrorKind) -> Self {
+ Self {
+ context: None,
+ error: kind,
+ }
+ }
+}
+
+#[cfg(not(feature = "std"))]
+impl From<FromUtf8Error> for Error {
+ fn from(err: FromUtf8Error) -> Self {
+ Self::malformed_content(err)
+ }
+}
+
+#[cfg(not(feature = "std"))]
+impl From<Utf8Error> for Error {
+ fn from(err: Utf8Error) -> Self {
+ Self::malformed_content(err)
+ }
+}
+
+#[cfg(not(feature = "std"))]
+impl From<ParseIntError> for Error {
+ fn from(err: ParseIntError) -> Self {
+ Self::malformed_content(err)
+ }
+}
+
+#[cfg(feature = "std")]
+impl<T: StdError + Send + Sync + 'static> From<T> for Error {
+ fn from(error: T) -> Self {
+ Self::malformed_content(error)
+ }
+}
+
+impl<T> ResultExt for Result<T, Error> {
+ fn context(self, context: impl Display) -> Result<T, Error> {
+ self.map_err(|err| err.context(context))
+ }
+}
diff --git a/rust/vendor/bendy/src/decoding/from_bencode.rs b/rust/vendor/bendy/src/decoding/from_bencode.rs
new file mode 100644
index 0000000..4a803a3
--- /dev/null
+++ b/rust/vendor/bendy/src/decoding/from_bencode.rs
@@ -0,0 +1,216 @@
+#[cfg(not(feature = "std"))]
+use alloc::{collections::BTreeMap, rc::Rc, string::String, vec::Vec};
+
+#[cfg(feature = "std")]
+use std::{
+ collections::{BTreeMap, HashMap},
+ hash::{BuildHasher, Hash},
+ rc::Rc,
+};
+
+use crate::{
+ decoding::{Decoder, Error, Object},
+ encoding::AsString,
+ state_tracker::StructureError,
+};
+
+///Basic trait for bencode based value deserialization.
+pub trait FromBencode {
+ /// Maximum allowed depth of nested structures before the decoding should be aborted.
+ const EXPECTED_RECURSION_DEPTH: usize = 2048;
+
+ /// Deserialize an object from its byte representation.
+ fn from_bencode(bytes: &[u8]) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut decoder = Decoder::new(bytes).with_max_depth(Self::EXPECTED_RECURSION_DEPTH);
+ let object = decoder.next_object()?;
+
+ object.map_or(
+ Err(Error::from(StructureError::UnexpectedEof)),
+ Self::decode_bencode_object,
+ )
+ }
+
+ /// Deserialize an object from its intermediate bencode representation.
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized;
+}
+
+macro_rules! impl_from_bencode_for_integer {
+ ($($type:ty)*) => {$(
+ impl FromBencode for $type {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let content = object.try_into_integer()?;
+ let number = content.parse::<$type>()?;
+
+ Ok(number)
+ }
+ }
+ )*}
+}
+
+impl_from_bencode_for_integer!(u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize);
+
+impl<ContentT: FromBencode> FromBencode for Vec<ContentT> {
+ const EXPECTED_RECURSION_DEPTH: usize = ContentT::EXPECTED_RECURSION_DEPTH + 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut list = object.try_into_list()?;
+ let mut results = Vec::new();
+
+ while let Some(object) = list.next_object()? {
+ let item = ContentT::decode_bencode_object(object)?;
+ results.push(item);
+ }
+
+ Ok(results)
+ }
+}
+
+impl FromBencode for String {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let content = object.try_into_bytes()?;
+ let content = String::from_utf8(content.to_vec())?;
+
+ Ok(content)
+ }
+}
+
+impl<K, V> FromBencode for BTreeMap<K, V>
+where
+ K: FromBencode + Ord,
+ V: FromBencode,
+{
+ const EXPECTED_RECURSION_DEPTH: usize = V::EXPECTED_RECURSION_DEPTH + 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut dict = object.try_into_dictionary()?;
+ let mut result = BTreeMap::default();
+
+ while let Some((key, value)) = dict.next_pair()? {
+ let key = K::decode_bencode_object(Object::Bytes(key))?;
+ let value = V::decode_bencode_object(value)?;
+
+ result.insert(key, value);
+ }
+
+ Ok(result)
+ }
+}
+
+#[cfg(feature = "std")]
+impl<K, V, H> FromBencode for HashMap<K, V, H>
+where
+ K: FromBencode + Hash + Eq,
+ V: FromBencode,
+ H: BuildHasher + Default,
+{
+ const EXPECTED_RECURSION_DEPTH: usize = V::EXPECTED_RECURSION_DEPTH + 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ let mut dict = object.try_into_dictionary()?;
+ let mut result = HashMap::default();
+
+ while let Some((key, value)) = dict.next_pair()? {
+ let key = K::decode_bencode_object(Object::Bytes(key))?;
+ let value = V::decode_bencode_object(value)?;
+
+ result.insert(key, value);
+ }
+
+ Ok(result)
+ }
+}
+
+impl<T: FromBencode> FromBencode for Rc<T> {
+ const EXPECTED_RECURSION_DEPTH: usize = T::EXPECTED_RECURSION_DEPTH;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ T::decode_bencode_object(object).map(Rc::new)
+ }
+}
+
+impl FromBencode for AsString<Vec<u8>> {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error>
+ where
+ Self: Sized,
+ {
+ object.try_into_bytes().map(Vec::from).map(AsString)
+ }
+}
+
+#[cfg(test)]
+mod test {
+
+ #[cfg(not(feature = "std"))]
+ use alloc::{format, vec::Vec};
+
+ use crate::encoding::AsString;
+
+ use super::*;
+
+ #[test]
+ fn from_bencode_to_string_should_work_with_valid_input() {
+ let expected_message = "hello";
+ let serialized_message =
+ format!("{}:{}", expected_message.len(), expected_message).into_bytes();
+
+ let decoded_message = String::from_bencode(&serialized_message).unwrap();
+ assert_eq!(expected_message, decoded_message);
+ }
+
+ #[test]
+ fn from_bencode_to_as_string_should_work_with_valid_input() {
+ let expected_message = "hello";
+ let serialized_message =
+ format!("{}:{}", expected_message.len(), expected_message).into_bytes();
+
+ let decoded_vector = AsString::from_bencode(&serialized_message).unwrap();
+ assert_eq!(expected_message.as_bytes(), &decoded_vector.0[..]);
+ }
+
+ #[test]
+ #[should_panic(expected = "Num")]
+ fn from_bencode_to_as_string_should_fail_for_integer() {
+ AsString::<Vec<u8>>::from_bencode(&b"i1e"[..]).unwrap();
+ }
+
+ #[test]
+ #[should_panic(expected = "NestingTooDeep")]
+ fn from_bencode_to_as_string_should_fail_for_list() {
+ AsString::<Vec<u8>>::from_bencode(&b"l1:ae"[..]).unwrap();
+ }
+
+ #[test]
+ #[should_panic(expected = "NestingTooDeep")]
+ fn from_bencode_to_as_string_should_fail_for_dictionary() {
+ AsString::<Vec<u8>>::from_bencode(&b"d1:a1:ae"[..]).unwrap();
+ }
+}
diff --git a/rust/vendor/bendy/src/decoding/object.rs b/rust/vendor/bendy/src/decoding/object.rs
new file mode 100644
index 0000000..c2a43a9
--- /dev/null
+++ b/rust/vendor/bendy/src/decoding/object.rs
@@ -0,0 +1,394 @@
+use crate::{
+ decoding::{DictDecoder, Error, ListDecoder},
+ state_tracker::Token,
+};
+
+/// An object read from a decoder
+pub enum Object<'obj, 'ser: 'obj> {
+ /// A list of arbitrary objects
+ List(ListDecoder<'obj, 'ser>),
+ /// A map of string-valued keys to arbitrary objects
+ Dict(DictDecoder<'obj, 'ser>),
+ /// An unparsed integer
+ Integer(&'ser str),
+ /// A byte string
+ Bytes(&'ser [u8]),
+}
+
+impl<'obj, 'ser: 'obj> Object<'obj, 'ser> {
+ pub fn into_token(self) -> Token<'ser> {
+ match self {
+ Object::List(_) => Token::List,
+ Object::Dict(_) => Token::Dict,
+ Object::Bytes(bytes) => Token::String(bytes),
+ Object::Integer(num) => Token::Num(num),
+ }
+ }
+
+ /// Try to treat the object as a byte string, mapping [`Object::Bytes(v)`] into
+ /// [`Ok(v)`]. Any other variant returns the given default value.
+ ///
+ /// Default arguments passed into `bytes_or` are eagerly evaluated; if you
+ /// are passing the result of a function call, it is recommended to use
+ /// [`bytes_or_else`], which is lazily evaluated.
+ ///
+ /// [`Object::Bytes(v)`]: self::Object::Bytes
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`bytes_or_else`]: self::Object::bytes_or_else
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(Ok(&b"foo"[..]), x.bytes_or(Err("failure")));
+ ///
+ /// let x = Object::Integer("foo");
+ /// assert_eq!(Err("failure"), x.bytes_or(Err("failure")));
+ /// ```
+ pub fn bytes_or<ErrorT>(
+ self,
+ default: Result<&'ser [u8], ErrorT>,
+ ) -> Result<&'ser [u8], ErrorT> {
+ match self {
+ Object::Bytes(content) => Ok(content),
+ _ => default,
+ }
+ }
+
+ /// Try to treat the object as a byte string, mapping [`Object::Bytes(v)`] into
+ /// [`Ok(v)`]. Any other variant is passed into the given fallback method.
+ ///
+ /// [`Object::Bytes(v)`]: self::Object::Bytes
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(
+ /// Ok(&b"foo"[..]),
+ /// x.bytes_or_else(|obj| Err(obj.into_token().name()))
+ /// );
+ ///
+ /// let x = Object::Integer("foo");
+ /// assert_eq!(
+ /// Err("Num"),
+ /// x.bytes_or_else(|obj| Err(obj.into_token().name()))
+ /// );
+ /// ```
+ pub fn bytes_or_else<ErrorT>(
+ self,
+ op: impl FnOnce(Self) -> Result<&'ser [u8], ErrorT>,
+ ) -> Result<&'ser [u8], ErrorT> {
+ match self {
+ Object::Bytes(content) => Ok(content),
+ _ => op(self),
+ }
+ }
+
+ /// Try to treat the object as a byte string, mapping [`Object::Bytes(v)`] into
+ /// [`Ok(v)`]. Any other variant results in an [`Error::UnexpectedElement`].
+ ///
+ /// [`Object::Bytes(v)`]: self::Object::Bytes
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Error::UnexpectedElement`]: self::Error::UnexpectedElement
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(b"foo", x.try_into_bytes().unwrap());
+ ///
+ /// let x = Object::Integer("foo");
+ /// assert!(x.try_into_bytes().is_err());
+ /// ```
+ pub fn try_into_bytes(self) -> Result<&'ser [u8], Error> {
+ self.bytes_or_else(|obj| Err(Error::unexpected_token("String", obj.into_token().name())))
+ }
+
+ /// Try to treat the object as an integer and return the internal string representation,
+ /// mapping [`Object::Integer(v)`] into [`Ok(v)`]. Any other variant returns the given
+ /// default value.
+ ///
+ /// Default arguments passed into `integer_or` are eagerly evaluated; if you are passing
+ /// the result of a function call, it is recommended to use [`integer_or_else`], which
+ /// is lazily evaluated.
+ ///
+ /// [`Object::Integer(v)`]: self::Object::Integer
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`integer_or_else`]: self::Object::integer_or_else
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Integer("123");
+ /// assert_eq!(Ok(&"123"[..]), x.integer_or(Err("failure")));
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(Err("failure"), x.integer_or(Err("failure")));
+ /// ```
+ pub fn integer_or<ErrorT>(
+ self,
+ default: Result<&'ser str, ErrorT>,
+ ) -> Result<&'ser str, ErrorT> {
+ match self {
+ Object::Integer(content) => Ok(content),
+ _ => default,
+ }
+ }
+
+ /// Try to treat the object as an integer and return the internal string representation,
+ /// mapping [`Object::Integer(v)`] into [`Ok(v)`]. Any other variant is passed into the
+ /// given fallback method.
+ ///
+ /// [`Object::Integer(v)`]: self::Object::Integer
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Integer("123");
+ /// assert_eq!(
+ /// Ok(&"123"[..]),
+ /// x.integer_or_else(|obj| Err(obj.into_token().name()))
+ /// );
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(
+ /// Err("String"),
+ /// x.integer_or_else(|obj| Err(obj.into_token().name()))
+ /// );
+ /// ```
+ pub fn integer_or_else<ErrorT>(
+ self,
+ op: impl FnOnce(Self) -> Result<&'ser str, ErrorT>,
+ ) -> Result<&'ser str, ErrorT> {
+ match self {
+ Object::Integer(content) => Ok(content),
+ _ => op(self),
+ }
+ }
+
+ /// Try to treat the object as an integer and return the internal string representation,
+ /// mapping [`Object::Integer(v)`] into [`Ok(v)`]. Any other variant results in an
+ /// [`Error::UnexpectedElement`].
+ ///
+ /// [`Object::Integer(v)`]: self::Object::Integer
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Error::UnexpectedElement`]: self::Error::UnexpectedElement
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::Object;
+ ///
+ /// let x = Object::Integer("123");
+ /// assert_eq!("123", x.try_into_integer().unwrap());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert!(x.try_into_integer().is_err());
+ /// ```
+ pub fn try_into_integer(self) -> Result<&'ser str, Error> {
+ self.integer_or_else(|obj| Err(Error::unexpected_token("Num", obj.into_token().name())))
+ }
+
+ /// Try to treat the object as a list and return the internal list content decoder,
+ /// mapping [`Object::List(v)`] into [`Ok(v)`]. Any other variant returns the given
+ /// default value.
+ ///
+ /// Default arguments passed into `list_or` are eagerly evaluated; if you are passing
+ /// the result of a function call, it is recommended to use [`list_or_else`], which is
+ /// lazily evaluated.
+ ///
+ /// [`Object::List(v)`]: self::Object::List
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`list_or_else`]: self::Object::list_or_else
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut list_decoder = Decoder::new(b"le");
+ /// let x = list_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x.list_or(Err("failure")).is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!("failure", x.list_or(Err("failure")).unwrap_err());
+ /// ```
+ pub fn list_or<ErrorT>(
+ self,
+ default: Result<ListDecoder<'obj, 'ser>, ErrorT>,
+ ) -> Result<ListDecoder<'obj, 'ser>, ErrorT> {
+ match self {
+ Object::List(content) => Ok(content),
+ _ => default,
+ }
+ }
+
+ /// Try to treat the object as a list and return the internal list content decoder,
+ /// mapping [`Object::List(v)`] into [`Ok(v)`]. Any other variant is passed into the
+ /// given fallback method.
+ ///
+ /// [`Object::List(v)`]: self::Object::List
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut list_decoder = Decoder::new(b"le");
+ /// let x = list_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x.list_or_else(|obj| Err(obj.into_token().name())).is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(
+ /// "String",
+ /// x.list_or_else(|obj| Err(obj.into_token().name()))
+ /// .unwrap_err()
+ /// );
+ /// ```
+ pub fn list_or_else<ErrorT>(
+ self,
+ op: impl FnOnce(Self) -> Result<ListDecoder<'obj, 'ser>, ErrorT>,
+ ) -> Result<ListDecoder<'obj, 'ser>, ErrorT> {
+ match self {
+ Object::List(content) => Ok(content),
+ _ => op(self),
+ }
+ }
+
+ /// Try to treat the object as a list and return the internal list content decoder,
+ /// mapping [`Object::List(v)`] into [`Ok(v)`]. Any other variant results in an
+ /// [`Error::UnexpectedElement`].
+ ///
+ /// [`Object::List(v)`]: self::Object::List
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Error::UnexpectedElement`]: self::Error::UnexpectedElement
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut list_decoder = Decoder::new(b"le");
+ /// let x = list_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x.try_into_list().is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert!(x.try_into_list().is_err());
+ /// ```
+ pub fn try_into_list(self) -> Result<ListDecoder<'obj, 'ser>, Error> {
+ self.list_or_else(|obj| Err(Error::unexpected_token("List", obj.into_token().name())))
+ }
+
+ /// Try to treat the object as a dictionary and return the internal dictionary content
+ /// decoder, mapping [`Object::Dict(v)`] into [`Ok(v)`]. Any other variant returns the
+ /// given default value.
+ ///
+ /// Default arguments passed to `dictionary_or` are eagerly evaluated; if you are passing
+ /// the result of a function call, it is recommended to use [`dictionary_or_else`], which
+ /// is lazily evaluated.
+ ///
+ /// [`Object::Dict(v)`]: self::Object::Dict
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`dictionary_or_else`]: self::Object::dictionary_or_else
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut dict_decoder = Decoder::new(b"de");
+ /// let x = dict_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x.dictionary_or(Err("failure")).is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!("failure", x.dictionary_or(Err("failure")).unwrap_err());
+ /// ```
+ pub fn dictionary_or<ErrorT>(
+ self,
+ default: Result<DictDecoder<'obj, 'ser>, ErrorT>,
+ ) -> Result<DictDecoder<'obj, 'ser>, ErrorT> {
+ match self {
+ Object::Dict(content) => Ok(content),
+ _ => default,
+ }
+ }
+
+ /// Try to treat the object as a dictionary and return the internal dictionary content
+ /// decoder, mapping [`Object::Dict(v)`] into [`Ok(v)`]. Any other variant is passed
+ /// into the given fallback method.
+ ///
+ /// [`Object::Dict(v)`]: self::Object::Dict
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut dict_decoder = Decoder::new(b"de");
+ /// let x = dict_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x
+ /// .dictionary_or_else(|obj| Err(obj.into_token().name()))
+ /// .is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert_eq!(
+ /// "String",
+ /// x.dictionary_or_else(|obj| Err(obj.into_token().name()))
+ /// .unwrap_err()
+ /// );
+ /// ```
+ pub fn dictionary_or_else<ErrorT>(
+ self,
+ op: impl FnOnce(Self) -> Result<DictDecoder<'obj, 'ser>, ErrorT>,
+ ) -> Result<DictDecoder<'obj, 'ser>, ErrorT> {
+ match self {
+ Object::Dict(content) => Ok(content),
+ _ => op(self),
+ }
+ }
+
+ /// Try to treat the object as a dictionary and return the internal dictionary content
+ /// decoder, mapping [`Object::Dict(v)`] into [`Ok(v)`]. Any other variant results in
+ /// an [`Error::UnexpectedElement`].
+ ///
+ /// [`Object::Dict(v)`]: self::Object::Dict
+ /// [`Ok(v)`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Error::UnexpectedElement`]: self::Error::UnexpectedElement
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bendy::decoding::{Decoder, Object};
+ ///
+ /// let mut dict_decoder = Decoder::new(b"de");
+ /// let x = dict_decoder.next_object().unwrap().unwrap();
+ ///
+ /// assert!(x.try_into_dictionary().is_ok());
+ ///
+ /// let x = Object::Bytes(b"foo");
+ /// assert!(x.try_into_dictionary().is_err());
+ /// ```
+ pub fn try_into_dictionary(self) -> Result<DictDecoder<'obj, 'ser>, Error> {
+ self.dictionary_or_else(|obj| Err(Error::unexpected_token("Dict", obj.into_token().name())))
+ }
+}
diff --git a/rust/vendor/bendy/src/encoding.rs b/rust/vendor/bendy/src/encoding.rs
new file mode 100644
index 0000000..9f4fddb
--- /dev/null
+++ b/rust/vendor/bendy/src/encoding.rs
@@ -0,0 +1,129 @@
+//! An encoder for bencode. Guarantees that the output string is valid bencode
+//!
+//! # Encoding a structure
+//!
+//! The easiest way to encode a structure is to implement [`ToBencode`] for it. For most structures,
+//! this should be very simple:
+//!
+//! ```
+//! # use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+//!
+//! struct Message {
+//! foo: i32,
+//! bar: String,
+//! }
+//!
+//! impl ToBencode for Message {
+//! // Atoms have depth one. The struct wrapper adds one level to that
+//! const MAX_DEPTH: usize = 1;
+//!
+//! fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+//! encoder.emit_dict(|mut e| {
+//! // Use e to emit the values
+//! e.emit_pair(b"bar", &self.bar)?;
+//! e.emit_pair(b"foo", &self.foo)
+//! })?;
+//! Ok(())
+//! }
+//! }
+//! #
+//! # fn main() -> Result<(), Error> {
+//! # let message = Message{
+//! # foo: 1,
+//! # bar: "quux".to_string(),
+//! # };
+//! #
+//! # message.to_bencode().map(|_| ())
+//! # }
+//! ```
+//!
+//! Then, messages can be serialized using [`ToBencode::to_bencode`]:
+//!
+//! ```
+//! # use bendy::encoding::{ToBencode, SingleItemEncoder, Error};
+//! #
+//! # struct Message {
+//! # foo: i32,
+//! # bar: String,
+//! # }
+//! #
+//! # impl ToBencode for Message {
+//! # // Atoms have depth zero. The struct wrapper adds one level to that
+//! # const MAX_DEPTH: usize = 1;
+//! #
+//! # fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+//! # encoder.emit_dict(|mut e| {
+//! # // Use e to emit the values. They must be in sorted order here.
+//! # // If sorting the dict first is annoying, you can also use
+//! # // encoder.emit_and_sort_dict
+//! # e.emit_pair(b"bar", &self.bar)?;
+//! # e.emit_pair(b"foo", &self.foo)
+//! # })?;
+//! # Ok(())
+//! # }
+//! # }
+//! #
+//! # fn main() -> Result<(), Error> {
+//! let message = Message {
+//! foo: 1,
+//! bar: "quux".to_string(),
+//! };
+//!
+//! message.to_bencode()
+//! # .map(|_| ())
+//! # }
+//! ```
+//!
+//! Most primitive types already implement [`ToBencode`].
+//!
+//! # Nesting depth limits
+//!
+//! To allow this to be used on limited platforms, all implementations of [`ToBencode`] include a
+//! maximum nesting depth. Atoms (integers and byte strings) are considered to have depth 0. An
+//! object (a list or dict) containing only atoms has depth 1, and in general, an object has a depth
+//! equal to the depth of its deepest member plus one. In some cases, an object doesn't have a
+//! statically known depth. For example, ASTs may be arbitrarily nested. Such objects should
+//! have their depth set to 0, and callers should construct the Encoder manually, adding an
+//! appropriate buffer for the depth:
+//!
+//! ```
+//! # use bendy::encoding::{ToBencode, Encoder, Error};
+//! #
+//! # type ObjectType = u32;
+//! # static OBJECT: u32 = 0;
+//! #
+//! # fn main() -> Result<(), Error> {
+//! let mut encoder = Encoder::new().with_max_depth(ObjectType::MAX_DEPTH + 10);
+//!
+//! encoder.emit(OBJECT)?;
+//! encoder.get_output()
+//! # .map_err(Error::from)
+//! # .map(|_| ()) // ignore a success return value
+//! # }
+//! ```
+//!
+//! # Error handling
+//!
+//! Once an error occurs during encoding, all future calls to the same encoding stream will fail
+//! early with the same error. It is not defined whether any callback or implementation of
+//! [`ToBencode::encode`] is called before returning an error; such callbacks should
+//! respond to failure by bailing out as quickly as possible.
+//!
+//! Not all values in [`Error`] can be caused by an encoding operation. Specifically, you only need
+//! to worry about [`UnsortedKeys`] and [`NestingTooDeep`].
+//!
+//! [`ToBencode::encode`]: self::ToBencode::encode
+//! [`UnsortedKeys`]: self::Error#UnsortedKeys
+//! [`NestingTooDeep`]: self::Error#NestingTooDeep
+
+mod encoder;
+mod error;
+mod printable_integer;
+mod to_bencode;
+
+pub use self::{
+ encoder::{Encoder, SingleItemEncoder, SortedDictEncoder, UnsortedDictEncoder},
+ error::{Error, ErrorKind},
+ printable_integer::PrintableInteger,
+ to_bencode::{AsString, ToBencode},
+};
diff --git a/rust/vendor/bendy/src/encoding/encoder.rs b/rust/vendor/bendy/src/encoding/encoder.rs
new file mode 100644
index 0000000..ba4a164
--- /dev/null
+++ b/rust/vendor/bendy/src/encoding/encoder.rs
@@ -0,0 +1,482 @@
+#[cfg(not(feature = "std"))]
+use alloc::{
+ borrow::ToOwned,
+ collections::BTreeMap,
+ format,
+ string::{String, ToString},
+ vec::Vec,
+};
+#[cfg(feature = "std")]
+use std::{collections::BTreeMap, vec::Vec};
+
+use crate::{
+ encoding::{Error, PrintableInteger, ToBencode},
+ state_tracker::{StateTracker, StructureError, Token},
+};
+
+/// The actual encoder. Unlike the decoder, this is not zero-copy, as that would
+/// result in a horrible interface
+#[derive(Default, Debug)]
+pub struct Encoder {
+ state: StateTracker<Vec<u8>, Error>,
+ output: Vec<u8>,
+}
+
+impl Encoder {
+ /// Create a new encoder
+ pub fn new() -> Self {
+ <Self as Default>::default()
+ }
+
+ /// Set the max depth of the encoded object
+ #[must_use]
+ pub fn with_max_depth(mut self, max_depth: usize) -> Self {
+ self.state.set_max_depth(max_depth);
+ self
+ }
+
+ /// Emit a single token to the encoder
+ pub(crate) fn emit_token(&mut self, token: Token) -> Result<(), Error> {
+ self.state.check_error()?;
+ self.state.observe_token(&token)?;
+ match token {
+ Token::List => self.output.push(b'l'),
+ Token::Dict => self.output.push(b'd'),
+ Token::String(s) => {
+ // Writing to a vec can't fail
+ let length = s.len().to_string();
+ self.output.extend_from_slice(length.as_bytes());
+ self.output.push(b':');
+ self.output.extend_from_slice(s);
+ },
+ Token::Num(num) => {
+ // Alas, this doesn't verify that the given number is valid
+ self.output.push(b'i');
+ self.output.extend_from_slice(num.as_bytes());
+ self.output.push(b'e');
+ },
+ Token::End => self.output.push(b'e'),
+ }
+
+ Ok(())
+ }
+
+ /// Emit an arbitrary encodable object
+ pub fn emit<E: ToBencode>(&mut self, value: E) -> Result<(), Error> {
+ self.emit_with(|e| value.encode(e))
+ }
+
+ /// Emit a single object using an encoder
+ pub fn emit_with<F>(&mut self, value_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SingleItemEncoder) -> Result<(), Error>,
+ {
+ let mut value_written = false;
+ let ret = value_cb(SingleItemEncoder {
+ encoder: self,
+ value_written: &mut value_written,
+ });
+
+ self.state.latch_err(ret)?;
+
+ if !value_written {
+ return self
+ .state
+ .latch_err(Err(Error::from(StructureError::invalid_state(
+ "No value was emitted",
+ ))));
+ }
+
+ Ok(())
+ }
+
+ /// Emit an integer
+ pub fn emit_int<T: PrintableInteger>(&mut self, value: T) -> Result<(), Error> {
+ // This doesn't use emit_token, as that would require that I write the integer to a
+ // temporary buffer and then copy it to the output; writing it directly saves at
+ // least one memory allocation
+ self.state.check_error()?;
+ // We observe an int here, as we need something that isn't a string (and therefore
+ // possibly valid as a key) but we also want to require as few state transitions as
+ // possible (for performance)
+ self.state.observe_token(&Token::Num(""))?;
+ self.output.push(b'i');
+ self.output.extend_from_slice(value.to_string().as_bytes());
+ self.output.push(b'e');
+ Ok(())
+ }
+
+ /// Emit a string
+ pub fn emit_str(&mut self, value: &str) -> Result<(), Error> {
+ self.emit_token(Token::String(value.as_bytes()))
+ }
+
+ /// Emit a byte array
+ pub fn emit_bytes(&mut self, value: &[u8]) -> Result<(), Error> {
+ self.emit_token(Token::String(value))
+ }
+
+ /// Emit a dictionary where you know that the keys are already
+ /// sorted. The callback must emit key/value pairs to the given
+ /// encoder in sorted order. If the key/value pairs may not be
+ /// sorted, [`emit_unsorted_dict`] should be used instead.
+ ///
+ /// [`emit_unsorted_dict`]: SingleItemEncoder::emit_unsorted_dict
+ ///
+ /// Example:
+ ///
+ /// ```
+ /// # use bendy::encoding::{Encoder, Error};
+ /// #
+ /// # fn main() -> Result<(), Error>{
+ /// let mut encoder = Encoder::new();
+ /// encoder.emit_dict(|mut e| {
+ /// e.emit_pair(b"a", "foo")?;
+ /// e.emit_pair(b"b", 2)
+ /// })
+ /// # }
+ /// ```
+ pub fn emit_dict<F>(&mut self, content_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SortedDictEncoder) -> Result<(), Error>,
+ {
+ self.emit_token(Token::Dict)?;
+ content_cb(SortedDictEncoder { encoder: self })?;
+ self.emit_token(Token::End)
+ }
+
+ /// Emit an arbitrary list. The callback should emit the contents
+ /// of the list to the given encoder.
+ ///
+ /// E.g., to emit the list `[1,2,3]`, you would write
+ ///
+ /// ```
+ /// # use bendy::encoding::{Encoder, Error};
+ /// # fn main() -> Result<(), Error> {
+ /// let mut encoder = Encoder::new();
+ /// encoder.emit_list(|e| {
+ /// e.emit_int(1)?;
+ /// e.emit_int(2)?;
+ /// e.emit_int(3)
+ /// })
+ /// # }
+ /// ```
+ pub fn emit_list<F>(&mut self, list_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(&mut Encoder) -> Result<(), Error>,
+ {
+ self.emit_token(Token::List)?;
+ list_cb(self)?;
+ self.emit_token(Token::End)
+ }
+
+ /// Emit a dictionary that may have keys out of order. This will write the dict
+ /// values to temporary memory, then sort them before adding them to the serialized
+ /// stream
+ ///
+ /// Example.
+ ///
+ /// ```
+ /// # use bendy::encoding::{Encoder, Error};
+ /// #
+ /// # fn main() -> Result<(), Error> {
+ /// let mut encoder = Encoder::new();
+ /// encoder.emit_and_sort_dict(|e| {
+ /// // Unlike in the example for Encoder::emit_dict(), these keys aren't sorted
+ /// e.emit_pair(b"b", 2)?;
+ /// e.emit_pair(b"a", "foo")
+ /// })
+ /// # }
+ /// ```
+ pub fn emit_and_sort_dict<F>(&mut self, content_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(&mut UnsortedDictEncoder) -> Result<(), Error>,
+ {
+ let mut encoder = self.begin_unsorted_dict()?;
+
+ content_cb(&mut encoder)?;
+
+ self.end_unsorted_dict(encoder)
+ }
+
+ /// Return the encoded string, if all objects written are complete
+ pub fn get_output(mut self) -> Result<Vec<u8>, Error> {
+ self.state.observe_eof()?;
+ Ok(self.output)
+ }
+
+ pub(crate) fn begin_unsorted_dict(&mut self) -> Result<UnsortedDictEncoder, Error> {
+ // emit the dict token so that a pre-existing state error is reported early
+ self.emit_token(Token::Dict)?;
+
+ Ok(UnsortedDictEncoder::new(self.state.remaining_depth()))
+ }
+
+ pub(crate) fn end_unsorted_dict(&mut self, encoder: UnsortedDictEncoder) -> Result<(), Error> {
+ let content = encoder.done()?;
+
+ for (k, v) in content {
+ self.emit_bytes(&k)?;
+ // We know that the output is a single object by construction
+ self.state.observe_token(&Token::Num(""))?;
+ self.output.extend_from_slice(&v);
+ }
+
+ self.emit_token(Token::End)?;
+
+ Ok(())
+ }
+}
+
+/// An encoder that can only encode a single item. See [`Encoder`]
+/// for usage examples; the only difference between these classes is
+/// that `SingleItemEncoder` can only be used once.
+pub struct SingleItemEncoder<'a> {
+ encoder: &'a mut Encoder,
+ /// Whether we attempted to write a value to the encoder. The value
+ /// of the referent of this field is meaningless if the encode method
+ /// failed.
+ value_written: &'a mut bool,
+}
+
+impl<'a> SingleItemEncoder<'a> {
+ /// Emit an arbitrary encodable object
+ pub fn emit<E: ToBencode + ?Sized>(self, value: &E) -> Result<(), Error> {
+ value.encode(self)
+ }
+
+ /// Emit a single object using an encoder
+ pub fn emit_with<F>(self, value_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SingleItemEncoder) -> Result<(), Error>,
+ {
+ value_cb(self)
+ }
+
+ /// Emit an integer
+ pub fn emit_int<T: PrintableInteger>(self, value: T) -> Result<(), Error> {
+ *self.value_written = true;
+ self.encoder.emit_int(value)
+ }
+
+ /// Emit a string
+ pub fn emit_str(self, value: &str) -> Result<(), Error> {
+ *self.value_written = true;
+ self.encoder.emit_str(value)
+ }
+
+ /// Emit a byte array
+ pub fn emit_bytes(self, value: &[u8]) -> Result<(), Error> {
+ *self.value_written = true;
+ self.encoder.emit_bytes(value)
+ }
+
+ /// Emit an arbitrary list
+ pub fn emit_list<F>(self, list_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(&mut Encoder) -> Result<(), Error>,
+ {
+ *self.value_written = true;
+ self.encoder.emit_list(list_cb)
+ }
+
+ /// Emit a sorted dictionary. If the input dictionary is unsorted, this will return an error.
+ pub fn emit_dict<F>(self, content_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SortedDictEncoder) -> Result<(), Error>,
+ {
+ *self.value_written = true;
+ self.encoder.emit_dict(content_cb)
+ }
+
+ /// Emit a dictionary that may have keys out of order. This will write the dict
+ /// values to temporary memory, then sort them before adding them to the serialized
+ /// stream
+ pub fn emit_unsorted_dict<F>(self, content_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(&mut UnsortedDictEncoder) -> Result<(), Error>,
+ {
+ *self.value_written = true;
+ self.encoder.emit_and_sort_dict(content_cb)
+ }
+
+ /// Emit an arbitrary list.
+ ///
+ /// Attention: If this method is used while canonical output is required
+ /// the caller needs to ensure that the iterator has a defined order.
+ pub fn emit_unchecked_list(
+ self,
+ iterable: impl Iterator<Item = impl ToBencode>,
+ ) -> Result<(), Error> {
+ self.emit_list(|e| {
+ for item in iterable {
+ e.emit(item)?;
+ }
+ Ok(())
+ })
+ }
+}
+
+/// Encodes a map with pre-sorted keys
+pub struct SortedDictEncoder<'a> {
+ encoder: &'a mut Encoder,
+}
+
+impl<'a> SortedDictEncoder<'a> {
+ /// Emit a key/value pair
+ pub fn emit_pair<E>(&mut self, key: &[u8], value: E) -> Result<(), Error>
+ where
+ E: ToBencode,
+ {
+ self.encoder.emit_token(Token::String(key))?;
+ self.encoder.emit(value)
+ }
+
+ /// Equivalent to [`SortedDictEncoder::emit_pair()`], but forces the type of the value
+ /// to be a callback
+ pub fn emit_pair_with<F>(&mut self, key: &[u8], value_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SingleItemEncoder) -> Result<(), Error>,
+ {
+ self.encoder.emit_token(Token::String(key))?;
+ self.encoder.emit_with(value_cb)
+ }
+}
+
+/// Helper to write a dictionary that may have keys out of order. This will buffer the
+/// dict values in temporary memory, then sort them before adding them to the serialized
+/// stream
+pub struct UnsortedDictEncoder {
+ content: BTreeMap<Vec<u8>, Vec<u8>>,
+ error: Result<(), Error>,
+ remaining_depth: usize,
+}
+
+impl UnsortedDictEncoder {
+ pub(crate) fn new(remaining_depth: usize) -> Self {
+ Self {
+ content: BTreeMap::new(),
+ error: Ok(()),
+ remaining_depth,
+ }
+ }
+
+ /// Emit a key/value pair
+ pub fn emit_pair<E>(&mut self, key: &[u8], value: E) -> Result<(), Error>
+ where
+ E: ToBencode,
+ {
+ self.emit_pair_with(key, |e| value.encode(e))
+ }
+
+ /// Emit a key/value pair where the value is produced by a callback
+ pub fn emit_pair_with<F>(&mut self, key: &[u8], value_cb: F) -> Result<(), Error>
+ where
+ F: FnOnce(SingleItemEncoder) -> Result<(), Error>,
+ {
+ let mut value_written = false;
+
+ let mut encoder = Encoder::new().with_max_depth(self.remaining_depth);
+
+ let ret = value_cb(SingleItemEncoder {
+ encoder: &mut encoder,
+ value_written: &mut value_written,
+ });
+
+ if ret.is_err() {
+ self.error = ret.clone();
+ return ret;
+ }
+
+ if !value_written {
+ self.error = Err(Error::from(StructureError::InvalidState(
+ "No value was emitted".to_owned(),
+ )));
+ } else {
+ self.error = encoder.state.observe_eof().map_err(Error::from);
+ }
+
+ if self.error.is_err() {
+ return self.error.clone();
+ }
+
+ let encoded_object = encoder
+ .get_output()
+ .expect("Any errors should have been caught by observe_eof");
+
+ self.save_pair(key, encoded_object)
+ }
+
+ #[cfg(feature = "serde")]
+ pub(crate) fn remaining_depth(&self) -> usize {
+ self.remaining_depth
+ }
+
+ pub(crate) fn save_pair(
+ &mut self,
+ unencoded_key: &[u8],
+ encoded_value: Vec<u8>,
+ ) -> Result<(), Error> {
+ #[cfg(not(feature = "std"))]
+ use alloc::collections::btree_map::Entry;
+ #[cfg(feature = "std")]
+ use std::collections::btree_map::Entry;
+
+ if self.error.is_err() {
+ return self.error.clone();
+ }
+
+ let vacancy = match self.content.entry(unencoded_key.to_owned()) {
+ Entry::Vacant(vacancy) => vacancy,
+ Entry::Occupied(occupation) => {
+ self.error = Err(Error::from(StructureError::InvalidState(format!(
+ "Duplicate key {}",
+ String::from_utf8_lossy(occupation.key())
+ ))));
+ return self.error.clone();
+ },
+ };
+
+ vacancy.insert(encoded_value);
+
+ Ok(())
+ }
+
+ pub(crate) fn done(self) -> Result<BTreeMap<Vec<u8>, Vec<u8>>, Error> {
+ self.error?;
+ Ok(self.content)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ pub fn simple_encoding_works() {
+ let mut encoder = Encoder::new();
+ encoder
+ .emit_dict(|mut e| {
+ e.emit_pair(b"bar", 25)?;
+ e.emit_pair_with(b"foo", |e| {
+ e.emit_list(|e| {
+ e.emit_str("baz")?;
+ e.emit_str("qux")
+ })
+ })
+ })
+ .expect("Encoding shouldn't fail");
+ assert_eq!(
+ &encoder
+ .get_output()
+ .expect("Complete object should have been written"),
+ &b"d3:bari25e3:fool3:baz3:quxee"
+ );
+ }
+
+ #[test]
+ fn emit_cb_must_emit() {
+ let mut encoder = Encoder::new();
+ assert!(encoder.emit_with(|_| Ok(())).is_err());
+ }
+}
diff --git a/rust/vendor/bendy/src/encoding/error.rs b/rust/vendor/bendy/src/encoding/error.rs
new file mode 100644
index 0000000..cc651d8
--- /dev/null
+++ b/rust/vendor/bendy/src/encoding/error.rs
@@ -0,0 +1,56 @@
+#[cfg(feature = "std")]
+use std::sync::Arc;
+
+use failure::Fail;
+
+use crate::state_tracker::StructureError;
+
+#[derive(Debug, Clone, Fail)]
+#[fail(display = "encoding failed: {}", _0)]
+pub struct Error(#[fail(cause)] pub ErrorKind);
+
+/// An enumeration of potential errors that appear during bencode encoding.
+#[derive(Debug, Clone, Fail)]
+pub enum ErrorKind {
+ /// Error that occurs if the serialized structure contains invalid semantics.
+ #[cfg(feature = "std")]
+ #[fail(display = "malformed content discovered: {}", _0)]
+ MalformedContent(Arc<failure::Error>),
+ /// Error that occurs if the serialized structure contains invalid semantics.
+ #[cfg(not(feature = "std"))]
+ #[fail(display = "malformed content discovered")]
+ MalformedContent,
+ /// Error in the bencode structure (e.g. a missing field end separator).
+ #[fail(display = "bencode encoding corrupted")]
+ StructureError(#[fail(cause)] StructureError),
+}
+
+impl Error {
+ /// Raised when there is a general error while deserializing a type.
+ /// The message should not be capitalized and should not end with a period.
+ ///
+ /// Note that, when building with no_std, this method accepts any type as
+ /// its argument.
+ #[cfg(feature = "std")]
+ pub fn malformed_content(cause: impl Into<failure::Error>) -> Error {
+ let error = Arc::new(cause.into());
+ Self(ErrorKind::MalformedContent(error))
+ }
+
+ #[cfg(not(feature = "std"))]
+ pub fn malformed_content<T>(_cause: T) -> Error {
+ Self(ErrorKind::MalformedContent)
+ }
+}
+
+impl From<StructureError> for Error {
+ fn from(error: StructureError) -> Self {
+ Self(ErrorKind::StructureError(error))
+ }
+}
+
+impl From<ErrorKind> for Error {
+ fn from(kind: ErrorKind) -> Self {
+ Self(kind)
+ }
+}
diff --git a/rust/vendor/bendy/src/encoding/printable_integer.rs b/rust/vendor/bendy/src/encoding/printable_integer.rs
new file mode 100644
index 0000000..8140dca
--- /dev/null
+++ b/rust/vendor/bendy/src/encoding/printable_integer.rs
@@ -0,0 +1,15 @@
+#[cfg(not(feature = "std"))]
+use core::fmt::Display;
+#[cfg(feature = "std")]
+use std::fmt::Display;
+
+/// A value that can be formatted as a decimal integer
+pub trait PrintableInteger: Display {}
+
+macro_rules! impl_integer {
+ ($($type:ty)*) => {$(
+ impl PrintableInteger for $type {}
+ )*}
+}
+
+impl_integer!(u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize);
diff --git a/rust/vendor/bendy/src/encoding/to_bencode.rs b/rust/vendor/bendy/src/encoding/to_bencode.rs
new file mode 100644
index 0000000..a19dcb3
--- /dev/null
+++ b/rust/vendor/bendy/src/encoding/to_bencode.rs
@@ -0,0 +1,265 @@
+#[cfg(not(feature = "std"))]
+use alloc::{
+ collections::{BTreeMap, LinkedList, VecDeque},
+ rc::Rc,
+ string::String,
+ sync::Arc,
+ vec::Vec,
+};
+
+#[cfg(feature = "std")]
+use std::{
+ collections::{BTreeMap, HashMap, LinkedList, VecDeque},
+ hash::{BuildHasher, Hash},
+ rc::Rc,
+ sync::Arc,
+};
+
+use crate::encoding::{Encoder, Error, SingleItemEncoder};
+
+/// An object that can be encoded into a single bencode object
+pub trait ToBencode {
+ /// The maximum depth that this object could encode to. Leaves do not consume a level, so an
+ /// `i1e` has depth 0 and `li1ee` has depth 1.
+ const MAX_DEPTH: usize;
+
+ /// Encode this object into the bencode stream
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error>;
+
+ /// Encode this object to a byte string
+ fn to_bencode(&self) -> Result<Vec<u8>, Error> {
+ let mut encoder = Encoder::new().with_max_depth(Self::MAX_DEPTH);
+ encoder.emit_with(|e| self.encode(e).map_err(Error::into))?;
+
+ let bytes = encoder.get_output()?;
+ Ok(bytes)
+ }
+}
+
+/// Wrapper to allow `Vec<u8>` encoding as bencode string element.
+#[derive(Clone, Copy, Debug, Default, Hash, Eq, PartialEq, PartialOrd, Ord)]
+pub struct AsString<I>(pub I);
+
+// Forwarding impls
+impl<'a, E: 'a + ToBencode + Sized> ToBencode for &'a E {
+ const MAX_DEPTH: usize = E::MAX_DEPTH;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ E::encode(self, encoder)
+ }
+}
+
+#[cfg(feature = "std")]
+impl<E: ToBencode> ToBencode for Box<E> {
+ const MAX_DEPTH: usize = E::MAX_DEPTH;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ E::encode(&*self, encoder)
+ }
+}
+
+impl<E: ToBencode> ToBencode for Rc<E> {
+ const MAX_DEPTH: usize = E::MAX_DEPTH;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ E::encode(&*self, encoder)
+ }
+}
+
+impl<E: ToBencode> ToBencode for Arc<E> {
+ const MAX_DEPTH: usize = E::MAX_DEPTH;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ E::encode(&*self, encoder)
+ }
+}
+
+// Base type impls
+impl<'a> ToBencode for &'a str {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_str(self).map_err(Error::from)
+ }
+}
+
+impl ToBencode for String {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_str(self).map_err(Error::from)
+ }
+}
+
+macro_rules! impl_encodable_integer {
+ ($($type:ty)*) => {$(
+ impl ToBencode for $type {
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_int(*self).map_err(Error::from)
+ }
+ }
+ )*}
+}
+
+impl_encodable_integer!(u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize);
+
+macro_rules! impl_encodable_iterable {
+ ($($type:ident)*) => {$(
+ impl <ContentT> ToBencode for $type<ContentT>
+ where
+ ContentT: ToBencode
+ {
+ const MAX_DEPTH: usize = ContentT::MAX_DEPTH + 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_list(|e| {
+ for item in self {
+ e.emit(item)?;
+ }
+ Ok(())
+ })?;
+
+ Ok(())
+ }
+ }
+ )*}
+}
+
+impl_encodable_iterable!(Vec VecDeque LinkedList);
+
+impl<'a, ContentT> ToBencode for &'a [ContentT]
+where
+ ContentT: ToBencode,
+{
+ const MAX_DEPTH: usize = ContentT::MAX_DEPTH + 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_list(|e| {
+ for item in *self {
+ e.emit(item)?;
+ }
+ Ok(())
+ })?;
+
+ Ok(())
+ }
+}
+
+impl<K: AsRef<[u8]>, V: ToBencode> ToBencode for BTreeMap<K, V> {
+ const MAX_DEPTH: usize = V::MAX_DEPTH + 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_dict(|mut e| {
+ for (k, v) in self {
+ e.emit_pair(k.as_ref(), v)?;
+ }
+ Ok(())
+ })?;
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "std")]
+impl<K, V, S> ToBencode for HashMap<K, V, S>
+where
+ K: AsRef<[u8]> + Eq + Hash,
+ V: ToBencode,
+ S: BuildHasher,
+{
+ const MAX_DEPTH: usize = V::MAX_DEPTH + 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_dict(|mut e| {
+ let mut pairs = self
+ .iter()
+ .map(|(k, v)| (k.as_ref(), v))
+ .collect::<Vec<_>>();
+ pairs.sort_by_key(|&(k, _)| k);
+ for (k, v) in pairs {
+ e.emit_pair(k, v)?;
+ }
+ Ok(())
+ })?;
+
+ Ok(())
+ }
+}
+
+impl<I> ToBencode for AsString<I>
+where
+ I: AsRef<[u8]>,
+{
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_bytes(self.0.as_ref())?;
+ Ok(())
+ }
+}
+
+impl<I> AsRef<[u8]> for AsString<I>
+where
+ I: AsRef<[u8]>,
+{
+ fn as_ref(&self) -> &'_ [u8] {
+ self.0.as_ref()
+ }
+}
+
+impl<'a, I> From<&'a [u8]> for AsString<I>
+where
+ I: From<&'a [u8]>,
+{
+ fn from(content: &'a [u8]) -> Self {
+ AsString(I::from(content))
+ }
+}
+
+#[cfg(test)]
+mod test {
+
+ #[cfg(not(feature = "std"))]
+ use alloc::{borrow::ToOwned, vec};
+
+ use super::*;
+
+ struct Foo {
+ bar: u32,
+ baz: Vec<String>,
+ qux: Vec<u8>,
+ }
+
+ impl ToBencode for Foo {
+ const MAX_DEPTH: usize = 2;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_dict(|mut e| {
+ e.emit_pair(b"bar", &self.bar)?;
+ e.emit_pair(b"baz", &self.baz)?;
+ e.emit_pair(b"qux", AsString(&self.qux))?;
+ Ok(())
+ })?;
+
+ Ok(())
+ }
+ }
+
+ #[test]
+ fn simple_encodable_works() {
+ let mut encoder = Encoder::new();
+ encoder
+ .emit(Foo {
+ bar: 5,
+ baz: vec!["foo".to_owned(), "bar".to_owned()],
+ qux: b"qux".to_vec(),
+ })
+ .unwrap();
+ assert_eq!(
+ &encoder.get_output().unwrap()[..],
+ &b"d3:bari5e3:bazl3:foo3:bare3:qux3:quxe"[..]
+ );
+ }
+}
diff --git a/rust/vendor/bendy/src/lib.rs b/rust/vendor/bendy/src/lib.rs
new file mode 100644
index 0000000..58b35e5
--- /dev/null
+++ b/rust/vendor/bendy/src/lib.rs
@@ -0,0 +1,23 @@
+//! Encodes and decodes bencoded structures.
+//!
+//! The decoder is explicitly designed to be zero-copy as much as possible, and to not
+//! accept any sort of invalid encoding in any mode (including non-canonical encodings)
+//!
+//! The encoder is likewise designed to ensure that it only produces valid structures.
+
+#![cfg_attr(not(feature = "std"), no_std)]
+
+extern crate alloc;
+
+#[cfg(all(test, feature = "serde"))]
+#[macro_use]
+mod assert_matches;
+
+pub mod decoding;
+pub mod encoding;
+pub mod state_tracker;
+
+#[cfg(feature = "serde")]
+pub mod serde;
+
+pub mod value;
diff --git a/rust/vendor/bendy/src/serde.rs b/rust/vendor/bendy/src/serde.rs
new file mode 100644
index 0000000..1cf6be4
--- /dev/null
+++ b/rust/vendor/bendy/src/serde.rs
@@ -0,0 +1,553 @@
+//! Serde Serialization and Deserialization
+//! =======================================
+//!
+//! Values can be serialized to bencode with `bendy::serde::to_bytes`, and
+//! deserialized from bencode with `bendy::serde::from_bytes`:
+//!
+//! ```
+//! use bendy::serde::{from_bytes, to_bytes};
+//! use serde_ as serde;
+//! use serde_derive::{Deserialize, Serialize};
+//!
+//! assert_eq!(to_bytes(&10).unwrap(), b"i10e");
+//! assert_eq!(from_bytes::<u64>(b"i10e").unwrap(), 10);
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize, Deserialize, Debug, PartialEq)]
+//! struct Foo {
+//! bar: bool,
+//! }
+//!
+//! assert_eq!(to_bytes(&Foo { bar: true }).unwrap(), b"d3:bari1ee");
+//! assert_eq!(from_bytes::<Foo>(b"d3:bari1ee").unwrap(), Foo { bar: true });
+//! ```
+//!
+//! Bencode Representations
+//! -----------------------
+//!
+//! Rust types and values are represented in bencode as follows:
+//!
+//! - `true`: The integer value `1`.
+//! - `false`: The integer value `0`.
+//! - `char`: A string containing the UTF-8 encoding of the value.
+//! - `f32`: Represented as a length-four bencode byte string containing the big-
+//! endian order bytes of the IEEE-754 representation of the value.
+//! - `f64`: Represented as a length-eight bencode byte string containing the big-
+//! endian order bytes of the IEEE-754 representation of the value.
+//! - `()`: Represented as the empty bencode list, `le`.
+//! - `Some(t)`: Represented as a list containing the bencoding of `t`.
+//! - `None`: Represented as the empty list.
+//! - maps, including BTreeMap and HashMap: bencoded dictionaries.
+//! - record structs: Represented as bencoded dictionaries with the fields of the
+//! struct represented as UTF-8 keys mapped to the bencoded serializations of the
+//! values.
+//! - tuple structs: Represented as bencoded lists containing the serialized values
+//! of the fields.
+//! - unit structs: Represented as the empty bencode list, `le`.
+//! - enum unit variants: Represented as a string containing the name of the variant,
+//! - enum newtype variants: Represented as a dict mapping the name of the variant
+//! to the value the variant contains.
+//! - enum tuple variants: Represented as a dict mapping the name of the variant
+//! to a list containing the fields of the enum.
+//! - enum struct variants: Represented as a dict mapping the name of the variant
+//! to the struct representation of the fields of the variant.
+//! - untagged enums: Repesented as the variant value without any surrounding dictionary.
+//!
+//! Bencode dictionary keys may only be byte strings. For this reason, map types with
+//! keys that do not serialize as byte strings are unsupported.
+//!
+//! Note that values of type `f32` and `f64` do not conform to bencode's canonical
+//! representation rules. For example, both `f32` and `f64` support negative zero
+//! values which have different bit patterns, but which represent the same logical
+//! value as positive zero.
+//!
+//! If you require bencoded values to have canonical representations, then it is best
+//! to avoid floating point values.
+//!
+//! Example Representations
+//! -----------------------
+//!
+//! ```
+//! use bendy::serde::to_bytes;
+//! use serde::Serialize;
+//! use serde_ as serde;
+//! use serde_derive::Serialize;
+//! use std::collections::HashMap;
+//!
+//! fn repr(value: impl Serialize, bencode: impl AsRef<[u8]>) {
+//! assert_eq!(to_bytes(&value).unwrap(), bencode.as_ref());
+//! }
+//!
+//! repr(true, "i1e");
+//! repr(false, "i0e");
+//! repr((), "le");
+//! repr('a', "1:a");
+//! repr('Å', b"2:\xC3\x85");
+//! repr(0, "i0e");
+//! repr(-15, "i-15e");
+//! repr(1.0f32, b"4:\x3F\x80\x00\x00");
+//! repr(1.0f64, b"8:\x3F\xF0\x00\x00\x00\x00\x00\x00");
+//!
+//! let none: Option<i32> = None;
+//! repr(none, "le");
+//! repr(Some(0), "li0ee");
+//!
+//! let mut map = HashMap::new();
+//! map.insert("foo", 1);
+//! map.insert("bar", 2);
+//! repr(map, "d3:bari2e3:fooi1ee");
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! struct Unit;
+//! repr(Unit, "le");
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! struct Newtype(String);
+//! repr(Newtype("foo".into()), "3:foo");
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! struct Tuple(bool, i32);
+//! repr(Tuple(false, 100), "li0ei100ee");
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! struct Record {
+//! a: String,
+//! b: bool,
+//! }
+//!
+//! repr(
+//! Record {
+//! a: "hello".into(),
+//! b: false,
+//! },
+//! "d1:a5:hello1:bi0ee",
+//! );
+//!
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! enum Enum {
+//! Unit,
+//! Newtype(i32),
+//! Tuple(bool, i32),
+//! Struct { a: char, b: bool },
+//! }
+//!
+//! repr(Enum::Unit, "4:Unit");
+//! repr(Enum::Newtype(-1), "d7:Newtypei-1ee");
+//! repr(Enum::Tuple(true, 10), "d5:Tupleli1ei10eee");
+//! repr(Enum::Struct { a: 'x', b: true }, "d6:Structd1:a1:x1:bi1eee");
+//!
+//! #[serde(untagged)]
+//! #[serde(crate = "serde_")]
+//! #[derive(Serialize)]
+//! enum Untagged {
+//! Foo { x: i32 },
+//! Bar { y: char },
+//! }
+//!
+//! repr(Untagged::Foo { x: -1 }, "d1:xi-1ee");
+//! repr(Untagged::Bar { y: 'z' }, "d1:y1:ze");
+//! ```
+
+mod common;
+
+pub mod de;
+pub mod error;
+pub mod ser;
+
+pub use de::{from_bytes, Deserializer};
+pub use error::{Error, Result};
+pub use ser::{to_bytes, Serializer};
+
+#[cfg(test)]
+mod tests {
+ use super::common::*;
+
+ use std::{collections::HashMap, fmt::Debug};
+
+ use super::{
+ de::{from_bytes, Deserializer},
+ ser::to_bytes,
+ };
+
+ use serde::{de::DeserializeOwned, ser::Serialize};
+ use serde_derive::{Deserialize, Serialize};
+
+ fn case<V, B>(value: V, want: B)
+ where
+ V: Serialize + DeserializeOwned + PartialEq + Debug,
+ B: AsRef<[u8]>,
+ {
+ let want = want.as_ref();
+
+ let encoded = match to_bytes(&value) {
+ Ok(have) => {
+ assert_eq!(
+ have,
+ want,
+ "Expected `{}` but got `{}` when serializing `{:?}`",
+ String::from_utf8_lossy(&want),
+ String::from_utf8_lossy(&have),
+ value
+ );
+ have
+ },
+ Err(err) => panic!("Failed to serialize `{:?}`: {}", value, err),
+ };
+
+ let deserialized = match from_bytes::<V>(&encoded) {
+ Ok(deserialized) => deserialized,
+ Err(error) => panic!(
+ "Failed to deserialize `{:?}` from `{}`: {}",
+ value,
+ String::from_utf8_lossy(&encoded),
+ error
+ ),
+ };
+
+ assert_eq!(
+ deserialized, value,
+ "Deserialized value != original: `{:?}` != `{:?}`",
+ deserialized, value
+ );
+ }
+
+ fn case_borrowed<V, B>(value: V, want: B)
+ where
+ V: Serialize + Debug,
+ B: AsRef<[u8]>,
+ {
+ let want = want.as_ref();
+
+ match to_bytes(&value) {
+ Ok(have) => {
+ assert_eq!(
+ have,
+ want,
+ "Expected `{}` but got `{}` when serializing `{:?}`",
+ String::from_utf8_lossy(&want),
+ String::from_utf8_lossy(&have),
+ value
+ );
+ },
+ Err(err) => panic!("Failed to serialize `{:?}`: {}", value, err),
+ }
+ }
+
+ #[test]
+ fn scalar() {
+ case(false, "i0e");
+ case(true, "i1e");
+ case(0u8, "i0e");
+ case(1u8, "i1e");
+ case(0u16, "i0e");
+ case(1u16, "i1e");
+ case(0u32, "i0e");
+ case(1u32, "i1e");
+ case(0u64, "i0e");
+ case(1u64, "i1e");
+ case(0u128, "i0e");
+ case(1u128, "i1e");
+ case(0usize, "i0e");
+ case(1usize, "i1e");
+ case(0i8, "i0e");
+ case(1i8, "i1e");
+ case(-1i8, "i-1e");
+ case(0i16, "i0e");
+ case(1i16, "i1e");
+ case(-1i16, "i-1e");
+ case(0i32, "i0e");
+ case(1i32, "i1e");
+ case(-1i32, "i-1e");
+ case(0i64, "i0e");
+ case(1i64, "i1e");
+ case(-1i64, "i-1e");
+ case(0i128, "i0e");
+ case(1i128, "i1e");
+ case(-1i128, "i-1e");
+ case(0isize, "i0e");
+ case(1isize, "i1e");
+ case(-1isize, "i-1e");
+ }
+
+ #[test]
+ fn f32() {
+ let value = 100.100f32;
+ let bytes = value.to_bits().to_be_bytes();
+ let mut bencode: Vec<u8> = Vec::new();
+ bencode.extend(b"4:");
+ bencode.extend(&bytes);
+ case(value, bencode);
+ }
+
+ #[test]
+ fn f64() {
+ let value = 100.100f64;
+ let bytes = value.to_bits().to_be_bytes();
+ let mut bencode: Vec<u8> = Vec::new();
+ bencode.extend(b"8:");
+ bencode.extend(&bytes);
+ case(value, bencode);
+ }
+
+ #[test]
+ fn unit() {
+ case((), "le");
+ }
+
+ #[test]
+ fn none() {
+ case::<Option<u8>, &str>(None, "le");
+ }
+
+ #[test]
+ fn some() {
+ case(Some(0), "li0ee");
+ }
+
+ #[test]
+ fn char() {
+ case('a', "1:a");
+ case('\u{1F9D0}', "4:\u{1F9D0}");
+ }
+
+ #[test]
+ fn str() {
+ case_borrowed("foo", "3:foo");
+ }
+
+ #[test]
+ fn string() {
+ case("foo".to_string(), "3:foo");
+ }
+
+ #[test]
+ fn bytes_default() {
+ let value: Vec<u8> = vec![1, 2, 3, 4];
+ case(value, "li1ei2ei3ei4ee");
+ }
+
+ #[test]
+ fn bytes_with_serde_bytes() {
+ #[derive(Debug, Serialize, Deserialize, PartialEq)]
+ #[serde(crate = "serde_")]
+ #[serde(transparent)]
+ struct Owned {
+ #[serde(with = "serde_bytes")]
+ bytes: Vec<u8>,
+ }
+
+ case(
+ Owned {
+ bytes: vec![1, 2, 3],
+ },
+ "3:\x01\x02\x03",
+ );
+
+ #[derive(Debug, Serialize, Deserialize, PartialEq)]
+ #[serde(crate = "serde_")]
+ #[serde(transparent)]
+ struct Borrowed<'bytes> {
+ #[serde(with = "serde_bytes")]
+ bytes: &'bytes [u8],
+ }
+
+ case_borrowed(Borrowed { bytes: &[1, 2, 3] }, b"3:\x01\x02\x03");
+ }
+
+ #[test]
+ fn map() {
+ let mut map = HashMap::new();
+ map.insert("foo".to_owned(), 1);
+ map.insert("bar".to_owned(), 2);
+ case(map, "d3:bari2e3:fooi1ee");
+ }
+
+ #[test]
+ fn map_non_byte_key() {
+ let mut map = HashMap::new();
+ map.insert(1, 1);
+ map.insert(2, 2);
+ assert_matches!(to_bytes(&map), Err(Error::ArbitraryMapKeysUnsupported));
+ }
+
+ #[test]
+ fn unit_struct() {
+ #[derive(Debug, Serialize, Deserialize, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Foo;
+ case(Foo, "le");
+ }
+
+ #[test]
+ fn newtype_struct() {
+ #[derive(Debug, Serialize, Deserialize, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Foo(u8);
+ case(Foo(1), "i1e");
+ }
+
+ #[test]
+ fn seq() {
+ case(vec![1, 0, 1], "li1ei0ei1ee");
+ }
+
+ #[test]
+ fn tuple_struct() {
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Foo(String, u32, i32);
+
+ case(Foo("hello".to_string(), 1, -100), "l5:helloi1ei-100ee");
+ }
+
+ #[test]
+ fn record_struct() {
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Foo {
+ a: u8,
+ b: String,
+ }
+
+ case(
+ Foo {
+ a: 1,
+ b: "hello".to_string(),
+ },
+ "d1:ai1e1:b5:helloe",
+ );
+ }
+
+ #[test]
+ fn struct_field_order() {
+ // Serde serializes the fields of this struct in the opposite
+ // order to that mandated by bencode. This would trigger an
+ // error if the struct serializer failed to correctly order
+ // the fields during serialization.
+ #[derive(Serialize, Deserialize, Debug, PartialEq, Default)]
+ #[serde(crate = "serde_")]
+ struct Foo {
+ fac: u8,
+ fb: u8,
+ }
+
+ case(Foo { fac: 0, fb: 1 }, "d3:faci0e2:fbi1ee");
+ }
+
+ #[test]
+ fn enum_tests() {
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ enum Enum {
+ Unit,
+ Newtype(i32),
+ Tuple(bool, i32),
+ Struct { a: char, b: bool },
+ }
+
+ case(Enum::Unit, "4:Unit");
+ case(Enum::Newtype(-1), "d7:Newtypei-1ee");
+ case(Enum::Tuple(true, 10), "d5:Tupleli1ei10eee");
+ case(Enum::Struct { a: 'x', b: true }, "d6:Structd1:a1:x1:bi1eee");
+ }
+
+ #[test]
+ fn untagged_enum() {
+ #[serde(untagged)]
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ enum Untagged {
+ Foo { x: i32 },
+ Bar { y: String },
+ }
+
+ case(Untagged::Foo { x: -1 }, "d1:xi-1ee");
+ case(Untagged::Bar { y: "z".into() }, "d1:y1:ze");
+ }
+
+ #[test]
+ fn flatten() {
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Foo {
+ #[serde(flatten)]
+ bar: Bar,
+ }
+
+ #[derive(Serialize, Deserialize, Debug, PartialEq)]
+ #[serde(crate = "serde_")]
+ struct Bar {
+ x: i32,
+ }
+
+ case(Foo { bar: Bar { x: 1 } }, "d1:xi1ee");
+ }
+
+ #[test]
+ fn invalid_bool() {
+ assert_matches!(
+ from_bytes::<bool>(b"i100e"),
+ Err(Error::InvalidBool(ref value)) if value == "100"
+ );
+ }
+
+ #[test]
+ fn invalid_f32() {
+ assert_matches!(from_bytes::<f32>(b"8:10000000"), Err(Error::InvalidF32(8)));
+ }
+
+ #[test]
+ fn invalid_f64() {
+ assert_matches!(from_bytes::<f64>(b"4:1000"), Err(Error::InvalidF64(4)));
+ }
+
+ #[test]
+ fn invalid_char() {
+ assert_matches!(from_bytes::<char>(b"2:00"), Err(Error::InvalidChar(2)));
+ }
+
+ #[test]
+ fn trailing_bytes_forbid() {
+ assert_matches!(
+ Deserializer::from_bytes(b"i1ei1e")
+ .with_forbid_trailing_bytes(true)
+ .deserialize::<u32>(),
+ Err(Error::TrailingBytes)
+ );
+ }
+
+ #[test]
+ fn trailing_bytes_allow() {
+ assert_matches!(
+ Deserializer::from_bytes(b"i1ei1e").deserialize::<u32>(),
+ Ok(1)
+ );
+ }
+
+ #[test]
+ fn borrowed_value() {
+ use crate::value::Value;
+ use std::borrow::Cow;
+
+ #[derive(Debug, Deserialize, PartialEq, Eq)]
+ #[serde(crate = "serde_")]
+ struct Dict<'a> {
+ #[serde(borrow)]
+ v: Value<'a>,
+ }
+
+ assert_eq!(
+ Deserializer::from_bytes(b"d1:v3:\x01\x02\x03e")
+ .deserialize::<Dict<'_>>()
+ .unwrap(),
+ Dict {
+ v: Value::Bytes(Cow::Owned(vec![1, 2, 3]))
+ },
+ );
+ }
+}
diff --git a/rust/vendor/bendy/src/serde/common.rs b/rust/vendor/bendy/src/serde/common.rs
new file mode 100644
index 0000000..9da688e
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/common.rs
@@ -0,0 +1,30 @@
+/// Standard library
+pub(crate) use std::{
+ convert::TryInto,
+ fmt::{self, Display, Formatter},
+ iter::Peekable,
+ num::ParseIntError,
+ str::{self, Utf8Error},
+};
+
+pub(crate) use serde_ as serde;
+
+/// Dependencies
+pub(crate) use serde::{
+ de::{
+ DeserializeSeed, EnumAccess, IntoDeserializer, MapAccess, SeqAccess, VariantAccess, Visitor,
+ },
+ ser::{
+ Serialize, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant,
+ SerializeTuple, SerializeTupleStruct, SerializeTupleVariant,
+ },
+ Deserialize,
+};
+
+/// Structs and enums
+pub(crate) use crate::{
+ decoding::{self, Decoder, Tokens},
+ encoding::{self, Encoder, UnsortedDictEncoder},
+ serde::{ser::Serializer, Error, Result},
+ state_tracker::{StructureError, Token},
+};
diff --git a/rust/vendor/bendy/src/serde/de.rs b/rust/vendor/bendy/src/serde/de.rs
new file mode 100644
index 0000000..55fe263
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/de.rs
@@ -0,0 +1,487 @@
+//! Serde bencode deserialization.
+
+use crate::serde::common::*;
+
+/// Deserialize an instance of `T` from bencode
+pub fn from_bytes<'a, T>(s: &'a [u8]) -> Result<T>
+where
+ T: Deserialize<'a>,
+{
+ Deserializer::from_bytes(s).deserialize()
+}
+
+/// Bencode deserializer
+pub struct Deserializer<'de> {
+ forbid_trailing_bytes: bool,
+ tokens: Peekable<Tokens<'de>>,
+}
+
+impl<'de> Deserializer<'de> {
+ /// Create a new `Deserializer` with the give byte slice
+ pub fn from_bytes(input: &'de [u8]) -> Self {
+ Deserializer {
+ forbid_trailing_bytes: false,
+ tokens: Decoder::new(input).tokens().peekable(),
+ }
+ }
+
+ /// Return an error if trailing bytes remain after deserialization
+ pub fn with_forbid_trailing_bytes(mut self, forbid_trailing_bytes: bool) -> Self {
+ self.forbid_trailing_bytes = forbid_trailing_bytes;
+ self
+ }
+
+ /// Consume the deserializer, producing an instance of `T`
+ pub fn deserialize<T>(mut self) -> Result<T, Error>
+ where
+ T: Deserialize<'de>,
+ {
+ let t = T::deserialize(&mut self)?;
+
+ if self.forbid_trailing_bytes {
+ if let Some(_) = self.tokens.next() {
+ return Err(Error::TrailingBytes);
+ }
+ }
+
+ Ok(t)
+ }
+}
+
+impl<'de> Deserializer<'de> {
+ fn next_token(&mut self) -> Result<Token<'de>> {
+ match self.tokens.next() {
+ Some(result) => Ok(result?),
+ None => Err(Error::Decode(StructureError::UnexpectedEof.into())),
+ }
+ }
+
+ fn next_integer(&mut self) -> Result<&'de str> {
+ match self.next_token()? {
+ Token::Num(num) => Ok(num),
+ other => Err(decoding::Error::unexpected_token("Num", other.name()).into()),
+ }
+ }
+
+ fn next_bytes(&mut self) -> Result<&'de [u8]> {
+ match self.next_token()? {
+ Token::String(bytes) => Ok(bytes),
+ other => Err(decoding::Error::unexpected_token("String", other.name()).into()),
+ }
+ }
+
+ fn next_string(&mut self) -> Result<&'de str> {
+ let bytes = self.next_bytes()?;
+ let string = str::from_utf8(bytes)?;
+ Ok(string)
+ }
+
+ fn expect_list_begin(&mut self) -> Result<()> {
+ match self.next_token()? {
+ Token::List => Ok(()),
+ other => Err(decoding::Error::unexpected_token("List", other.name()).into()),
+ }
+ }
+
+ fn expect_dict_begin(&mut self) -> Result<()> {
+ match self.next_token()? {
+ Token::Dict => Ok(()),
+ other => Err(decoding::Error::unexpected_token("Dict", other.name()).into()),
+ }
+ }
+
+ fn expect_end(&mut self) -> Result<()> {
+ match self.next_token()? {
+ Token::End => Ok(()),
+ other => Err(decoding::Error::unexpected_token("End", other.name()).into()),
+ }
+ }
+
+ fn expect_empty_list(&mut self) -> Result<()> {
+ self.expect_list_begin()?;
+ self.expect_end()?;
+ Ok(())
+ }
+
+ fn peek_end(&mut self) -> bool {
+ self.peek() == Some(Token::End)
+ }
+
+ fn peek(&mut self) -> Option<Token<'de>> {
+ if let Some(Ok(token)) = self.tokens.peek() {
+ Some(*token)
+ } else {
+ None
+ }
+ }
+}
+
+impl<'de, 'a> serde::de::Deserializer<'de> for &'a mut Deserializer<'de> {
+ type Error = Error;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ match self.peek() {
+ Some(Token::Dict) => self.deserialize_map(visitor),
+ Some(Token::String(_)) => self.deserialize_bytes(visitor),
+ Some(Token::List) => self.deserialize_seq(visitor),
+ Some(Token::Num(_)) => self.deserialize_i64(visitor),
+ Some(Token::End) => Err(Error::Decode(StructureError::invalid_state("End").into())),
+ None => Err(Error::Decode(StructureError::UnexpectedEof.into())),
+ }
+ }
+
+ fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ match self.next_integer()? {
+ "0" => visitor.visit_bool(false),
+ "1" => visitor.visit_bool(true),
+ other => Err(Error::InvalidBool(other.to_owned())),
+ }
+ }
+
+ fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_i8(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_i16(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_i32(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_i64(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_i128(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_u8(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_u16(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_u32(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_u64(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_u128(self.next_integer()?.parse()?)
+ }
+
+ fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ let bytes = self.next_bytes()?;
+ let bits = u32::from_be_bytes(
+ bytes
+ .try_into()
+ .map_err(|_| Error::InvalidF32(bytes.len()))?,
+ );
+ let value = f32::from_bits(bits);
+ visitor.visit_f32(value)
+ }
+
+ fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ let bytes = self.next_bytes()?;
+ let bits = u64::from_be_bytes(
+ bytes
+ .try_into()
+ .map_err(|_| Error::InvalidF64(bytes.len()))?,
+ );
+ let value = f64::from_bits(bits);
+ visitor.visit_f64(value)
+ }
+
+ fn deserialize_char<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ let s: &str = self.next_string()?;
+ let count = s.chars().count();
+ if count != 1 {
+ return Err(Error::InvalidChar(count));
+ }
+ visitor.visit_char(s.chars().next().unwrap())
+ }
+
+ fn deserialize_str<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_borrowed_str(self.next_string()?)
+ }
+
+ fn deserialize_string<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_str(visitor)
+ }
+
+ fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_borrowed_bytes(self.next_bytes()?)
+ }
+
+ fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_bytes(visitor)
+ }
+
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.expect_list_begin()?;
+ let value = if self.peek_end() {
+ visitor.visit_none()
+ } else {
+ visitor.visit_some(&mut *self)
+ };
+ self.expect_end()?;
+ value
+ }
+
+ fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.expect_empty_list()?;
+ visitor.visit_unit()
+ }
+
+ fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_unit(visitor)
+ }
+
+ fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ visitor.visit_newtype_struct(self)
+ }
+
+ fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.expect_list_begin()?;
+ let value = visitor.visit_seq(&mut *self)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+
+ fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_seq(visitor)
+ }
+
+ fn deserialize_tuple_struct<V>(
+ self,
+ _name: &'static str,
+ _len: usize,
+ visitor: V,
+ ) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_seq(visitor)
+ }
+
+ fn deserialize_map<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.expect_dict_begin()?;
+ let value = visitor.visit_map(&mut *self)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ _name: &'static str,
+ _fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.expect_dict_begin()?;
+ let value = visitor.visit_map(&mut *self)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+
+ fn deserialize_enum<V>(
+ self,
+ _name: &'static str,
+ _variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ if self.peek() == Some(Token::Dict) {
+ self.expect_dict_begin()?;
+ visitor.visit_enum(self)
+ } else {
+ visitor.visit_enum(self.next_string()?.into_deserializer())
+ }
+ }
+
+ fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_str(visitor)
+ }
+
+ fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ self.deserialize_any(visitor)
+ }
+}
+
+impl<'de> SeqAccess<'de> for Deserializer<'de> {
+ type Error = Error;
+
+ fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>>
+ where
+ T: DeserializeSeed<'de>,
+ {
+ if self.peek_end() {
+ return Ok(None);
+ }
+ seed.deserialize(self).map(Some)
+ }
+}
+
+impl<'de> MapAccess<'de> for Deserializer<'de> {
+ type Error = Error;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>>
+ where
+ K: DeserializeSeed<'de>,
+ {
+ if self.peek_end() {
+ return Ok(None);
+ }
+ seed.deserialize(self).map(Some)
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value>
+ where
+ V: DeserializeSeed<'de>,
+ {
+ seed.deserialize(self)
+ }
+}
+
+impl<'de> EnumAccess<'de> for &mut Deserializer<'de> {
+ type Error = Error;
+ type Variant = Self;
+
+ fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self)>
+ where
+ V: DeserializeSeed<'de>,
+ {
+ Ok((seed.deserialize(&mut *self)?, self))
+ }
+}
+
+impl<'de> VariantAccess<'de> for &mut Deserializer<'de> {
+ type Error = Error;
+
+ fn unit_variant(self) -> Result<()> {
+ Ok(())
+ }
+
+ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value>
+ where
+ T: DeserializeSeed<'de>,
+ {
+ let value = seed.deserialize(&mut *self)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+
+ fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ let value = serde::de::Deserializer::deserialize_seq(&mut *self, visitor)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+
+ fn struct_variant<V>(self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>
+ where
+ V: Visitor<'de>,
+ {
+ let value = serde::de::Deserializer::deserialize_map(&mut *self, visitor)?;
+ self.expect_end()?;
+ Ok(value)
+ }
+}
diff --git a/rust/vendor/bendy/src/serde/error.rs b/rust/vendor/bendy/src/serde/error.rs
new file mode 100644
index 0000000..1203fee
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/error.rs
@@ -0,0 +1,108 @@
+///! Serde error and result types
+use crate::serde::common::*;
+
+pub type Result<T, E = Error> = std::result::Result<T, E>;
+
+/// An enumeration of potential errors that appear during serde serialiation and
+/// deserialization
+#[derive(Debug)]
+pub enum Error {
+ /// Error that occurs if a map with a key type which does not serialize to
+ /// a byte string is encountered
+ ArbitraryMapKeysUnsupported,
+ /// Error that occurs if methods on MapSerializer are called out of order
+ MapSerializationCallOrder,
+ /// Error that occurs if a bool is deserialized from an integer value other
+ /// than `0` or `1`
+ InvalidBool(String),
+ /// Error that occurs if an f32 is deserialized from an string of length other
+ /// than 4
+ InvalidF32(usize),
+ /// Error that occurs if an f64 is deserialized from an string of length other
+ /// than 8
+ InvalidF64(usize),
+ /// Error that occurs if a char is deserialized from a string containing more
+ /// than one character
+ InvalidChar(usize),
+ /// Error that occurs if trailing bytes remain after deserialization, if the
+ /// deserializer is configured to forbid trailing bytes
+ TrailingBytes,
+ /// Error that occurs if a serde-related error occurs during serialization
+ CustomEncode(String),
+ /// Error that occurs if a serde-related error occurs during deserialization
+ CustomDecode(String),
+ /// Error that occurs if a problem is encountered during serialization
+ Encode(encoding::Error),
+ /// Error that occurs if a problem is encountered during deserialization
+ Decode(decoding::Error),
+}
+
+impl From<encoding::Error> for Error {
+ fn from(encoding_error: encoding::Error) -> Self {
+ Error::Encode(encoding_error)
+ }
+}
+
+impl From<decoding::Error> for Error {
+ fn from(decoding_error: decoding::Error) -> Self {
+ Error::Decode(decoding_error)
+ }
+}
+
+impl From<ParseIntError> for Error {
+ fn from(parse_int_error: ParseIntError) -> Self {
+ Error::Decode(parse_int_error.into())
+ }
+}
+
+impl From<Utf8Error> for Error {
+ fn from(utf8_error: Utf8Error) -> Self {
+ Error::Decode(utf8_error.into())
+ }
+}
+
+impl serde::ser::Error for Error {
+ fn custom<T>(msg: T) -> Self
+ where
+ T: Display,
+ {
+ Error::CustomEncode(msg.to_string())
+ }
+}
+
+impl serde::de::Error for Error {
+ fn custom<T: Display>(msg: T) -> Self {
+ Error::CustomDecode(msg.to_string())
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+ match self {
+ Error::CustomEncode(message) => write!(f, "Serialization failed: {}", message),
+ Error::CustomDecode(message) => write!(f, "Deserialization failed: {}", message),
+ Error::Encode(error) => write!(f, "{}", error),
+ Error::Decode(error) => write!(f, "{}", error),
+ Error::InvalidBool(value) => write!(f, "Invalid integer value for bool: `{}`", value),
+ Error::InvalidF32(length) => {
+ write!(f, "Invalid length byte string value for f32: {}", length)
+ },
+ Error::InvalidF64(length) => {
+ write!(f, "Invalid length byte string value for f64: {}", length)
+ },
+ Error::InvalidChar(length) => {
+ write!(f, "Invalid length string value for char: {}", length)
+ },
+ Error::TrailingBytes => write!(f, "Trailing bytes remain after deserializing value"),
+ Error::ArbitraryMapKeysUnsupported => write!(
+ f,
+ "Maps with key types that do not serialize to byte strings are unsupported",
+ ),
+ Error::MapSerializationCallOrder => {
+ write!(f, "Map serialization methods called out of order")
+ },
+ }
+ }
+}
+
+impl std::error::Error for Error {}
diff --git a/rust/vendor/bendy/src/serde/ser.rs b/rust/vendor/bendy/src/serde/ser.rs
new file mode 100644
index 0000000..8d59324
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/ser.rs
@@ -0,0 +1,368 @@
+//! Serde bencode serialization.
+
+use crate::serde::common::*;
+
+pub use map_serializer::MapSerializer;
+pub use struct_serializer::StructSerializer;
+
+mod map_serializer;
+mod struct_serializer;
+
+/// Serialize an instance of `T` to bencode
+pub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>
+where
+ T: ?Sized + Serialize,
+{
+ let mut serializer = Serializer::new();
+ value.serialize(&mut serializer)?;
+ serializer.into_bytes()
+}
+
+/// A serde Bencode serializer
+pub struct Serializer {
+ encoder: Encoder,
+}
+
+impl Serializer {
+ /// Create a new `Serializer`
+ pub fn new() -> Self {
+ Serializer {
+ encoder: Encoder::new(),
+ }
+ }
+
+ /// Create a new `Serializer` with a given maximum serialization depth
+ pub fn with_max_depth(max_depth: usize) -> Serializer {
+ Serializer {
+ encoder: Encoder::new().with_max_depth(max_depth),
+ }
+ }
+
+ /// Consume this `Serializer`, returning the encoded bencode
+ pub fn into_bytes(self) -> Result<Vec<u8>> {
+ Ok(self.encoder.get_output()?)
+ }
+
+ fn emit_empty_list(&mut self) -> Result<()> {
+ self.encoder.emit_list(|_| Ok(()))?;
+ Ok(())
+ }
+
+ fn begin_struct(&mut self) -> Result<StructSerializer> {
+ let encoder = self.encoder.begin_unsorted_dict()?;
+ Ok(StructSerializer::new(&mut self.encoder, encoder))
+ }
+
+ fn begin_map(&mut self) -> Result<MapSerializer> {
+ let encoder = self.encoder.begin_unsorted_dict()?;
+ Ok(MapSerializer::new(&mut self.encoder, encoder))
+ }
+}
+
+impl<'a> serde::ser::Serializer for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+ type SerializeMap = MapSerializer<'a>;
+ type SerializeSeq = Self;
+ type SerializeStruct = StructSerializer<'a>;
+ type SerializeStructVariant = StructSerializer<'a>;
+ type SerializeTuple = Self;
+ type SerializeTupleStruct = Self;
+ type SerializeTupleVariant = Self;
+
+ fn serialize_bool(self, v: bool) -> Result<()> {
+ self.encoder.emit(if v { 1 } else { 0 })?;
+ Ok(())
+ }
+
+ fn serialize_i8(self, v: i8) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_i16(self, v: i16) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_i32(self, v: i32) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_i64(self, v: i64) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_i128(self, v: i128) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_u8(self, v: u8) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_u16(self, v: u16) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_u32(self, v: u32) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_u64(self, v: u64) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_u128(self, v: u128) -> Result<()> {
+ self.encoder.emit(v)?;
+ Ok(())
+ }
+
+ fn serialize_f32(self, v: f32) -> Result<()> {
+ let bytes = v.to_bits().to_be_bytes();
+ self.serialize_bytes(&bytes)
+ }
+
+ fn serialize_f64(self, v: f64) -> Result<()> {
+ let bytes = v.to_bits().to_be_bytes();
+ self.serialize_bytes(&bytes)
+ }
+
+ fn serialize_char(self, v: char) -> Result<()> {
+ let mut buffer: [u8; 4] = [0; 4];
+ self.serialize_str(v.encode_utf8(&mut buffer))
+ }
+
+ fn serialize_str(self, v: &str) -> Result<()> {
+ self.serialize_bytes(v.as_bytes())
+ }
+
+ fn serialize_bytes(self, v: &[u8]) -> Result<()> {
+ self.encoder.emit_bytes(v)?;
+ Ok(())
+ }
+
+ fn serialize_none(self) -> Result<()> {
+ self.emit_empty_list()
+ }
+
+ fn serialize_some<T>(self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ self.encoder.emit_token(Token::List)?;
+ value.serialize(&mut *self)?;
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+
+ fn serialize_unit(self) -> Result<()> {
+ self.emit_empty_list()
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<()> {
+ self.emit_empty_list()
+ }
+
+ fn serialize_newtype_struct<T>(self, _name: &'static str, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq> {
+ self.encoder.emit_token(Token::List)?;
+ Ok(self)
+ }
+
+ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple> {
+ self.encoder.emit_token(Token::List)?;
+ Ok(self)
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleStruct> {
+ self.encoder.emit_token(Token::List)?;
+ Ok(self)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
+ self.begin_map()
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ ) -> Result<()> {
+ self.serialize_str(variant)
+ }
+
+ fn serialize_newtype_variant<T>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ value: &T,
+ ) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ self.encoder.emit_token(Token::Dict)?;
+ self.serialize_str(variant)?;
+ value.serialize(&mut *self)?;
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+
+ fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct> {
+ self.begin_struct()
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant> {
+ self.encoder.emit_token(Token::Dict)?;
+ self.serialize_str(variant)?;
+ self.encoder.emit_token(Token::List)?;
+ Ok(self)
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant> {
+ self.encoder.emit_token(Token::Dict)?;
+ self.serialize_str(variant)?;
+ self.begin_struct()
+ }
+}
+
+impl<'a> SerializeSeq for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ value.serialize(&mut **self)
+ }
+
+ fn end(self) -> Result<()> {
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+}
+
+impl<'a> SerializeTuple for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_element<T>(&mut self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ value.serialize(&mut **self)
+ }
+
+ fn end(self) -> Result<()> {
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+}
+
+impl<'a> SerializeTupleStruct for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ value.serialize(&mut **self)
+ }
+
+ fn end(self) -> Result<()> {
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+}
+
+impl<'a> SerializeMap for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_key<T>(&mut self, _key: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ unreachable!()
+ }
+
+ fn serialize_value<T>(&mut self, _value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ unreachable!()
+ }
+
+ fn end(self) -> Result<()> {
+ unreachable!()
+ }
+}
+
+impl<'a> SerializeTupleVariant for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_field<T>(&mut self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ value.serialize(&mut **self)
+ }
+
+ fn end(self) -> Result<()> {
+ self.encoder.emit_token(Token::End)?;
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+}
+
+impl<'a> SerializeStructVariant for &'a mut Serializer {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_field<T>(&mut self, _key: &'static str, _value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ unreachable!()
+ }
+
+ fn end(self) -> Result<()> {
+ self.encoder.emit_token(Token::End)?;
+ self.encoder.emit_token(Token::End)?;
+ Ok(())
+ }
+}
diff --git a/rust/vendor/bendy/src/serde/ser/map_serializer.rs b/rust/vendor/bendy/src/serde/ser/map_serializer.rs
new file mode 100644
index 0000000..9b9cce3
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/ser/map_serializer.rs
@@ -0,0 +1,80 @@
+use crate::serde::common::*;
+
+/// Bencode sub-serializer for maps.
+pub struct MapSerializer<'outer> {
+ pub(crate) outer: &'outer mut Encoder,
+ encoder: UnsortedDictEncoder,
+ key: Option<Vec<u8>>,
+}
+
+impl<'outer> MapSerializer<'outer> {
+ pub(crate) fn new(
+ outer: &'outer mut Encoder,
+ encoder: UnsortedDictEncoder,
+ ) -> MapSerializer<'outer> {
+ MapSerializer {
+ encoder,
+ outer,
+ key: None,
+ }
+ }
+
+ fn serialize<T>(&self, value: &T) -> Result<Vec<u8>>
+ where
+ T: ?Sized + Serialize,
+ {
+ let mut serializer = Serializer::with_max_depth(self.encoder.remaining_depth());
+ value.serialize(&mut serializer)?;
+ serializer.into_bytes()
+ }
+}
+
+impl<'outer> SerializeMap for MapSerializer<'outer> {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_key<T>(&mut self, key: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ if self.key.is_some() {
+ return Err(Error::MapSerializationCallOrder);
+ }
+
+ let mut encoded = self.serialize(key)?;
+
+ match encoded.first() {
+ Some(b'0'..=b'9') => {},
+ _ => return Err(Error::ArbitraryMapKeysUnsupported),
+ }
+
+ let colon = encoded.iter().position(|b| *b == b':').unwrap();
+ encoded.drain(0..colon + 1);
+
+ self.key = Some(encoded);
+
+ Ok(())
+ }
+
+ fn serialize_value<T>(&mut self, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ match self.key.take() {
+ Some(bytes) => {
+ let encoded = self.serialize(value)?;
+ self.encoder.save_pair(&bytes, encoded)?;
+ Ok(())
+ },
+ None => Err(Error::MapSerializationCallOrder),
+ }
+ }
+
+ fn end(self) -> Result<()> {
+ if self.key.is_some() {
+ return Err(Error::MapSerializationCallOrder);
+ }
+ self.outer.end_unsorted_dict(self.encoder)?;
+ Ok(())
+ }
+}
diff --git a/rust/vendor/bendy/src/serde/ser/struct_serializer.rs b/rust/vendor/bendy/src/serde/ser/struct_serializer.rs
new file mode 100644
index 0000000..c0cfa96
--- /dev/null
+++ b/rust/vendor/bendy/src/serde/ser/struct_serializer.rs
@@ -0,0 +1,64 @@
+use crate::serde::common::*;
+
+/// Bencode sub-serializer for structs.
+pub struct StructSerializer<'outer> {
+ pub(crate) outer: &'outer mut Encoder,
+ encoder: UnsortedDictEncoder,
+}
+
+impl<'outer> StructSerializer<'outer> {
+ pub(crate) fn new(
+ outer: &'outer mut Encoder,
+ encoder: UnsortedDictEncoder,
+ ) -> StructSerializer<'outer> {
+ StructSerializer { encoder, outer }
+ }
+
+ fn save_field<T>(&mut self, key: &'static str, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ let mut serializer = Serializer::with_max_depth(self.encoder.remaining_depth());
+ value.serialize(&mut serializer)?;
+ let value_bytes = serializer.into_bytes()?;
+
+ self.encoder.save_pair(key.as_bytes(), value_bytes)?;
+
+ Ok(())
+ }
+}
+
+impl<'outer> SerializeStruct for StructSerializer<'outer> {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ self.save_field(key, value)
+ }
+
+ fn end(self) -> Result<()> {
+ self.outer.end_unsorted_dict(self.encoder)?;
+ Ok(())
+ }
+}
+
+impl<'outer> SerializeStructVariant for StructSerializer<'outer> {
+ type Error = Error;
+ type Ok = ();
+
+ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<()>
+ where
+ T: ?Sized + Serialize,
+ {
+ self.save_field(key, value)
+ }
+
+ fn end(self) -> Result<()> {
+ self.outer.end_unsorted_dict(self.encoder)?;
+ self.outer.emit_token(Token::End)?;
+ Ok(())
+ }
+}
diff --git a/rust/vendor/bendy/src/state_tracker.rs b/rust/vendor/bendy/src/state_tracker.rs
new file mode 100644
index 0000000..b56ee2d
--- /dev/null
+++ b/rust/vendor/bendy/src/state_tracker.rs
@@ -0,0 +1,7 @@
+mod stack;
+mod state;
+mod structure_error;
+mod token;
+
+pub use self::token::Token;
+pub(crate) use self::{stack::Stack, state::StateTracker, structure_error::StructureError};
diff --git a/rust/vendor/bendy/src/state_tracker/stack.rs b/rust/vendor/bendy/src/state_tracker/stack.rs
new file mode 100644
index 0000000..b61c03e
--- /dev/null
+++ b/rust/vendor/bendy/src/state_tracker/stack.rs
@@ -0,0 +1,36 @@
+#[cfg(not(feature = "std"))]
+use alloc::vec::Vec;
+
+pub trait Stack<T> {
+ fn peek_mut(&mut self) -> Option<&mut T>;
+
+ fn peek(&self) -> Option<&T>;
+
+ fn replace_top(&mut self, new_value: T);
+}
+
+impl<T> Stack<T> for Vec<T> {
+ fn peek_mut(&mut self) -> Option<&mut T> {
+ let len = self.len();
+ if len == 0 {
+ None
+ } else {
+ Some(&mut self[len - 1])
+ }
+ }
+
+ fn peek(&self) -> Option<&T> {
+ let len = self.len();
+ if len == 0 {
+ None
+ } else {
+ Some(&self[len - 1])
+ }
+ }
+
+ fn replace_top(&mut self, new_value: T) {
+ self.peek_mut()
+ .map(|top| *top = new_value)
+ .expect("Shouldn't replace_top with nothing on the stack");
+ }
+}
diff --git a/rust/vendor/bendy/src/state_tracker/state.rs b/rust/vendor/bendy/src/state_tracker/state.rs
new file mode 100644
index 0000000..783fa1a
--- /dev/null
+++ b/rust/vendor/bendy/src/state_tracker/state.rs
@@ -0,0 +1,159 @@
+#[cfg(not(feature = "std"))]
+use alloc::vec::Vec;
+
+use crate::state_tracker::{Stack, StructureError, Token};
+
+/// The state of current level of the decoder
+#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]
+enum State<S: AsRef<[u8]>, E> {
+ /// An inner list. Allows any token
+ Seq,
+ /// Inside a map, expecting a key. Contains the last key read, so sorting can be validated
+ MapKey(Option<S>),
+ /// Inside a map, expecting a value. Contains the last key read, so sorting can be validated
+ MapValue(S),
+ /// Received an error while decoding
+ Failed(E),
+}
+
+/// Used to validate that a structure is valid
+#[derive(Debug)]
+pub struct StateTracker<S: AsRef<[u8]>, E = StructureError> {
+ state: Vec<State<S, E>>,
+ max_depth: usize,
+}
+
+impl<S: AsRef<[u8]>, E> Default for StateTracker<S, E> {
+ fn default() -> Self {
+ StateTracker {
+ state: Vec::new(),
+ max_depth: 2048,
+ }
+ }
+}
+
+impl<S: AsRef<[u8]>, E> StateTracker<S, E>
+where
+ S: AsRef<[u8]>,
+ E: From<StructureError> + Clone,
+{
+ pub fn new() -> Self {
+ <Self as Default>::default()
+ }
+
+ pub fn set_max_depth(&mut self, new_max_depth: usize) {
+ self.max_depth = new_max_depth
+ }
+
+ pub fn remaining_depth(&self) -> usize {
+ self.max_depth - self.state.len()
+ }
+
+ /// Observe that an EOF was seen. This function is idempotent.
+ pub fn observe_eof(&mut self) -> Result<(), E> {
+ self.check_error()?;
+
+ if self.state.is_empty() {
+ Ok(())
+ } else {
+ self.latch_err(Err(E::from(StructureError::UnexpectedEof)))
+ }
+ }
+
+ #[allow(clippy::match_same_arms)]
+ pub fn observe_token<'a>(&mut self, token: &Token<'a>) -> Result<(), E>
+ where
+ S: From<&'a [u8]>,
+ {
+ use self::{State::*, Token::*};
+
+ match (self.state.pop(), *token) {
+ (None, End) => {
+ return self.latch_err(Err(E::from(StructureError::invalid_state(
+ "End not allowed at top level",
+ ))));
+ },
+ (Some(Seq), End) => {},
+ (Some(MapKey(_)), End) => {},
+ (Some(MapKey(None)), String(label)) => {
+ self.state.push(MapValue(S::from(label)));
+ },
+ (Some(MapKey(Some(oldlabel))), String(label)) => {
+ if oldlabel.as_ref() >= label {
+ return self.latch_err(Err(E::from(StructureError::UnsortedKeys)));
+ }
+ self.state.push(MapValue(S::from(label)));
+ },
+ (Some(oldstate @ MapKey(_)), _tok) => {
+ self.state.push(oldstate);
+ return self.latch_err(Err(E::from(StructureError::invalid_state(
+ "Map keys must be strings",
+ ))));
+ },
+ (Some(MapValue(label)), List) => {
+ self.state.push(MapKey(Some(label)));
+ if self.state.len() >= self.max_depth {
+ return self.latch_err(Err(E::from(StructureError::NestingTooDeep)));
+ }
+ self.state.push(Seq);
+ },
+ (Some(MapValue(label)), Dict) => {
+ self.state.push(MapKey(Some(label)));
+ if self.state.len() >= self.max_depth {
+ return self.latch_err(Err(E::from(StructureError::NestingTooDeep)));
+ }
+ self.state.push(MapKey(None));
+ },
+ (Some(oldstate @ MapValue(_)), End) => {
+ self.state.push(oldstate);
+ return self.latch_err(Err(E::from(StructureError::invalid_state(
+ "Missing map value",
+ ))));
+ },
+ (Some(MapValue(label)), _) => {
+ self.state.push(MapKey(Some(label)));
+ },
+ (oldstate, List) => {
+ if let Some(oldstate) = oldstate {
+ self.state.push(oldstate);
+ }
+ if self.state.len() >= self.max_depth {
+ return self.latch_err(Err(E::from(StructureError::NestingTooDeep)));
+ }
+ self.state.push(Seq);
+ },
+ (oldstate, Dict) => {
+ if let Some(oldstate) = oldstate {
+ self.state.push(oldstate);
+ }
+
+ if self.state.len() >= self.max_depth {
+ return self.latch_err(Err(E::from(StructureError::NestingTooDeep)));
+ }
+ self.state.push(MapKey(None));
+ },
+ (oldstate, _) => {
+ if let Some(oldstate) = oldstate {
+ self.state.push(oldstate);
+ }
+ },
+ }
+ Ok(())
+ }
+
+ pub fn latch_err<T>(&mut self, result: Result<T, E>) -> Result<T, E> {
+ self.check_error()?;
+ if let Err(ref err) = result {
+ self.state.push(State::Failed(err.clone()))
+ }
+ result
+ }
+
+ pub fn check_error(&self) -> Result<(), E> {
+ if let Some(&State::Failed(ref error)) = self.state.peek() {
+ Err(error.clone())
+ } else {
+ Ok(())
+ }
+ }
+}
diff --git a/rust/vendor/bendy/src/state_tracker/structure_error.rs b/rust/vendor/bendy/src/state_tracker/structure_error.rs
new file mode 100644
index 0000000..64dae70
--- /dev/null
+++ b/rust/vendor/bendy/src/state_tracker/structure_error.rs
@@ -0,0 +1,44 @@
+#[cfg(not(feature = "std"))]
+use alloc::{
+ format,
+ string::{String, ToString},
+};
+#[cfg(not(feature = "std"))]
+use core::fmt::Display;
+#[cfg(feature = "std")]
+use std::fmt::Display;
+
+use failure::Fail;
+
+/// An encoding or decoding error
+#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Fail)]
+pub enum StructureError {
+ #[fail(display = "Saw the wrong type of token: {}", _0)]
+ /// Wrong type of token detected.
+ InvalidState(String),
+ #[fail(display = "Keys were not sorted")]
+ /// Keys were not sorted.
+ UnsortedKeys,
+ #[fail(display = "Reached EOF in the middle of a message")]
+ /// EOF reached to early.
+ UnexpectedEof,
+ #[fail(display = "Malformed number of unexpected character: {}", _0)]
+ /// Unexpected characters detected.
+ SyntaxError(String),
+ #[fail(display = "Maximum nesting depth exceeded")]
+ /// Exceeded the recursion limit.
+ NestingTooDeep,
+}
+
+impl StructureError {
+ pub fn unexpected(expected: impl Display, got: char, offset: usize) -> Self {
+ StructureError::SyntaxError(format!(
+ "Expected {}, got {:?} at offset {}",
+ expected, got, offset
+ ))
+ }
+
+ pub fn invalid_state(expected: impl Display) -> Self {
+ StructureError::InvalidState(expected.to_string())
+ }
+}
diff --git a/rust/vendor/bendy/src/state_tracker/token.rs b/rust/vendor/bendy/src/state_tracker/token.rs
new file mode 100644
index 0000000..b26aaba
--- /dev/null
+++ b/rust/vendor/bendy/src/state_tracker/token.rs
@@ -0,0 +1,26 @@
+/// A raw bencode token
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub enum Token<'a> {
+ /// The beginning of a list
+ List,
+ /// The beginning of a dictionary
+ Dict,
+ /// A byte string; may not be UTF-8
+ String(&'a [u8]),
+ /// A number; we explicitly *don't* parse it here, as it could be signed, unsigned, or a bignum
+ Num(&'a str),
+ /// The end of a list or dictionary
+ End,
+}
+
+impl<'a> Token<'a> {
+ pub fn name(&self) -> &'static str {
+ match *self {
+ Token::Dict => "Dict",
+ Token::End => "End",
+ Token::List => "List",
+ Token::Num(_) => "Num",
+ Token::String(_) => "String",
+ }
+ }
+}
diff --git a/rust/vendor/bendy/src/value.rs b/rust/vendor/bendy/src/value.rs
new file mode 100644
index 0000000..3df0338
--- /dev/null
+++ b/rust/vendor/bendy/src/value.rs
@@ -0,0 +1,312 @@
+//! `Value`s hold arbitrary borrowed or owneed bencode data. Unlike `Objects`,
+//! they can be cloned and traversed multiple times.
+//!
+//! `Value` implements `FromBencode`, `ToBencode`. If the `serde` feature is
+//! enabled, it also implements `Serialize` and `Deserialize`.
+
+use alloc::{
+ borrow::{Cow, ToOwned},
+ collections::BTreeMap,
+ vec::Vec,
+};
+
+#[cfg(feature = "serde")]
+use std::{
+ convert::TryInto,
+ fmt::{self, Formatter},
+ marker::PhantomData,
+};
+
+#[cfg(feature = "serde")]
+use serde_ as serde;
+
+#[cfg(feature = "serde")]
+use serde::{
+ ser::{SerializeMap, SerializeSeq},
+ Serialize,
+};
+
+use crate::{
+ decoding::{FromBencode, Object},
+ encoding::{SingleItemEncoder, ToBencode},
+};
+
+/// An owned or borrowed bencoded value.
+#[derive(PartialEq, Eq, Clone, Debug)]
+pub enum Value<'a> {
+ /// An owned or borrowed byte string
+ Bytes(Cow<'a, [u8]>),
+ /// A dictionary mapping byte strings to values
+ Dict(BTreeMap<Cow<'a, [u8]>, Value<'a>>),
+ /// A signed integer
+ Integer(i64),
+ /// A list of values
+ List(Vec<Value<'a>>),
+}
+
+impl<'a> Value<'a> {
+ /// Convert this Value into an owned Value with static lifetime
+ pub fn into_owned(self) -> Value<'static> {
+ match self {
+ Value::Bytes(bytes) => Value::Bytes(Cow::Owned(bytes.into_owned())),
+ Value::Dict(dict) => Value::Dict(
+ dict.into_iter()
+ .map(|(key, value)| (Cow::Owned(key.into_owned()), value.into_owned()))
+ .collect(),
+ ),
+ Value::Integer(integer) => Value::Integer(integer),
+ Value::List(list) => Value::List(list.into_iter().map(Value::into_owned).collect()),
+ }
+ }
+}
+
+impl<'a> ToBencode for Value<'a> {
+ // This leaves some room for external containers.
+ // TODO(#38): Change this to 0 for v0.4
+ const MAX_DEPTH: usize = usize::max_value() / 4;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), crate::encoding::Error> {
+ match self {
+ Value::Bytes(bytes) => encoder.emit_bytes(bytes),
+ Value::Dict(dict) => dict.encode(encoder),
+ Value::Integer(integer) => integer.encode(encoder),
+ Value::List(list) => list.encode(encoder),
+ }
+ }
+}
+
+impl<'a> FromBencode for Value<'a> {
+ const EXPECTED_RECURSION_DEPTH: usize = <Self as ToBencode>::MAX_DEPTH;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, crate::decoding::Error> {
+ match object {
+ Object::Bytes(bytes) => Ok(Value::Bytes(Cow::Owned(bytes.to_owned()))),
+ Object::Dict(mut decoder) => {
+ let mut dict = BTreeMap::new();
+ while let Some((key, value)) = decoder.next_pair()? {
+ dict.insert(
+ Cow::Owned(key.to_owned()),
+ Value::decode_bencode_object(value)?,
+ );
+ }
+ Ok(Value::Dict(dict))
+ },
+ Object::Integer(text) => Ok(Value::Integer(text.parse()?)),
+ Object::List(mut decoder) => {
+ let mut list = Vec::new();
+ while let Some(object) = decoder.next_object()? {
+ list.push(Value::decode_bencode_object(object)?);
+ }
+ Ok(Value::List(list))
+ },
+ }
+ }
+}
+
+#[cfg(feature = "serde")]
+mod serde_impls {
+ use super::*;
+
+ use serde_bytes::Bytes;
+
+ impl<'a> Serialize for Value<'a> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::ser::Serializer,
+ {
+ match self {
+ Value::Bytes(string) => serializer.serialize_bytes(string),
+ Value::Integer(int) => serializer.serialize_i64(*int),
+ Value::List(list) => {
+ let mut seed = serializer.serialize_seq(Some(list.len()))?;
+ for value in list {
+ seed.serialize_element(value)?;
+ }
+ seed.end()
+ },
+ Value::Dict(dict) => {
+ let mut seed = serializer.serialize_map(Some(dict.len()))?;
+ for (k, v) in dict {
+ let bytes = Bytes::new(k);
+ seed.serialize_entry(bytes, v)?;
+ }
+ seed.end()
+ },
+ }
+ }
+ }
+
+ impl<'de: 'a, 'a> serde::de::Deserialize<'de> for Value<'a> {
+ #[inline]
+ fn deserialize<D>(deserializer: D) -> Result<Value<'a>, D::Error>
+ where
+ D: serde::de::Deserializer<'de>,
+ {
+ deserializer.deserialize_any(Visitor(PhantomData))
+ }
+ }
+
+ struct Visitor<'a>(PhantomData<&'a ()>);
+
+ impl<'de: 'a, 'a> serde::de::Visitor<'de> for Visitor<'a> {
+ type Value = Value<'a>;
+
+ fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+ formatter.write_str("any valid BEncode value")
+ }
+
+ fn visit_i64<E>(self, value: i64) -> Result<Value<'a>, E> {
+ Ok(Value::Integer(value))
+ }
+
+ fn visit_u64<E>(self, value: u64) -> Result<Value<'a>, E> {
+ Ok(Value::Integer(value.try_into().unwrap()))
+ }
+
+ fn visit_borrowed_bytes<E>(self, value: &'de [u8]) -> Result<Value<'a>, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(Value::Bytes(Cow::Borrowed(value)))
+ }
+
+ fn visit_borrowed_str<E>(self, value: &'de str) -> Result<Value<'a>, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(Value::Bytes(Cow::Borrowed(value.as_bytes())))
+ }
+
+ fn visit_string<E>(self, value: String) -> Result<Value<'a>, E> {
+ Ok(Value::Bytes(Cow::Owned(value.into_bytes())))
+ }
+
+ fn visit_byte_buf<E>(self, value: Vec<u8>) -> Result<Value<'a>, E> {
+ Ok(Value::Bytes(Cow::Owned(value)))
+ }
+
+ fn visit_seq<V>(self, mut access: V) -> Result<Value<'a>, V::Error>
+ where
+ V: serde::de::SeqAccess<'de>,
+ {
+ let mut list = Vec::new();
+ while let Some(e) = access.next_element()? {
+ list.push(e);
+ }
+ Ok(Value::List(list))
+ }
+
+ fn visit_map<V>(self, mut access: V) -> Result<Value<'a>, V::Error>
+ where
+ V: serde::de::MapAccess<'de>,
+ {
+ let mut map = BTreeMap::new();
+ while let Some((k, v)) = access.next_entry::<&Bytes, _>()? {
+ map.insert(Cow::Borrowed(k.as_ref()), v);
+ }
+ Ok(Value::Dict(map))
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use alloc::{string::String, vec};
+
+ fn case(value: Value, expected: impl AsRef<[u8]>) {
+ let expected = expected.as_ref();
+
+ let encoded = match value.to_bencode() {
+ Ok(bytes) => bytes,
+ Err(err) => panic!("Failed to encode `{:?}`: {}", value, err),
+ };
+
+ if encoded != expected {
+ panic!(
+ "Expected `{:?}` to encode as `{}`, but got `{}",
+ value,
+ String::from_utf8_lossy(expected),
+ String::from_utf8_lossy(&encoded)
+ )
+ }
+
+ let decoded = match Value::from_bencode(&encoded) {
+ Ok(decoded) => decoded,
+ Err(err) => panic!(
+ "Failed to decode value from `{}`: {}",
+ String::from_utf8_lossy(&encoded),
+ err,
+ ),
+ };
+
+ assert_eq!(decoded, value);
+
+ #[cfg(feature = "serde")]
+ {
+ let deserialized = match crate::serde::de::from_bytes::<Value>(expected) {
+ Ok(deserialized) => deserialized,
+ Err(err) => panic!(
+ "Failed to deserialize value from `{}`: {}",
+ String::from_utf8_lossy(&expected),
+ err
+ ),
+ };
+
+ if deserialized != value {
+ panic!(
+ "Deserialize Serialize produced unexpected value: `{:?}` != `{:?}`",
+ deserialized, value
+ );
+ }
+
+ let serialized = match crate::serde::ser::to_bytes(&value) {
+ Ok(serialized) => serialized,
+ Err(err) => panic!("Failed to serialize `{:?}`: {}", value, err),
+ };
+
+ if serialized != expected {
+ panic!(
+ "Serialize Serialize produced unexpected bencode: `{:?}` != `{:?}`",
+ String::from_utf8_lossy(&serialized),
+ String::from_utf8_lossy(expected)
+ );
+ }
+ }
+ }
+
+ #[test]
+ fn bytes() {
+ case(Value::Bytes(Cow::Borrowed(&[1, 2, 3])), b"3:\x01\x02\x03");
+ case(Value::Bytes(Cow::Owned(vec![1, 2, 3])), b"3:\x01\x02\x03");
+ }
+
+ #[test]
+ fn dict() {
+ case(Value::Dict(BTreeMap::new()), "de");
+
+ let mut dict = BTreeMap::new();
+ dict.insert(Cow::Borrowed("foo".as_bytes()), Value::Integer(1));
+ dict.insert(Cow::Borrowed("bar".as_bytes()), Value::Integer(2));
+ case(Value::Dict(dict), "d3:bari2e3:fooi1ee");
+ }
+
+ #[test]
+ fn integer() {
+ case(Value::Integer(0), "i0e");
+ case(Value::Integer(-1), "i-1e");
+ }
+
+ #[test]
+ fn list() {
+ case(Value::List(Vec::new()), "le");
+ case(
+ Value::List(vec![
+ Value::Integer(0),
+ Value::Bytes(Cow::Borrowed(&[1, 2, 3])),
+ ]),
+ b"li0e3:\x01\x02\x03e",
+ );
+ }
+}
diff --git a/rust/vendor/bendy/tests/core_test.rs b/rust/vendor/bendy/tests/core_test.rs
new file mode 100644
index 0000000..66fa9e5
--- /dev/null
+++ b/rust/vendor/bendy/tests/core_test.rs
@@ -0,0 +1,428 @@
+//! Port of https://github.com/jamesleonis/bencode-cljc/blob/master/test/bencode_cljc/core_test.cljc
+//!
+//! Should only use #![no_std] compatible features but still requires the
+//! `std` feature flag to avoid that we need to define a global allocator.
+
+extern crate alloc;
+use alloc::collections::BTreeMap;
+
+use bendy::{
+ decoding::{Error as DecodingError, FromBencode, Object},
+ encoding::{Error as EncodingError, SingleItemEncoder, ToBencode},
+};
+
+// -----------------------------------------------------------------------------
+// Macros
+// -----------------------------------------------------------------------------
+
+macro_rules! list(
+ {} => { Vec::<Something>::new() };
+ { $($value:expr),+ } => {
+ {
+ let mut list = Vec::new();
+ $( list.push(Something::from($value)); )+
+
+ list
+ }
+ };
+);
+
+macro_rules! map(
+ { $($key:expr => $value:expr),+ } => {
+ {
+ let mut map = BTreeMap::new();
+ $( map.insert($key.to_owned(), Something::from($value)); )+
+
+ map
+ }
+ };
+);
+
+// -----------------------------------------------------------------------------
+// Tests
+// -----------------------------------------------------------------------------
+
+#[test]
+fn string_test_pairs() -> Result<(), Error> {
+ let pairs = [
+ ("", "0:"),
+ ("hello", "5:hello"),
+ ("goodbye", "7:goodbye"),
+ ("hello world", "11:hello world"),
+ ("1-5%3~]+=\\| []>.,`??", "20:1-5%3~]+=\\| []>.,`??"),
+ ];
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = String::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn integer_test_pairs() -> Result<(), Error> {
+ let pairs = [
+ (0, "i0e"),
+ (5, "i5e"),
+ (-5, "i-5e"),
+ (005, "i5e"),
+ (-005, "i-5e"),
+ (1234567890, "i1234567890e"),
+ (-1234567890, "i-1234567890e"),
+ (i64::max_value(), "i9223372036854775807e"),
+ (i64::min_value(), "i-9223372036854775808e"),
+ ];
+ // Bendy currently doesn't contain a big number implementation..
+ //
+ // (
+ // 123456789012345678901234567890123456789012345678901234567890,
+ // "i123456789012345678901234567890123456789012345678901234567890e"
+ // ),
+ // (
+ // -123456789012345678901234567890123456789012345678901234567890,
+ // "i-123456789012345678901234567890123456789012345678901234567890e"
+ // )
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = i64::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn list_test_pairs() -> Result<(), Error> {
+ let pairs = [
+ (list![], "le"),
+ (list!["abra", "cadabra"], "l4:abra7:cadabrae"),
+ (list!["spam", "eggs"], "l4:spam4:eggse"),
+ (
+ list![vec!["list", "of", "lists"], vec!["like", "omygawd!"]],
+ "ll4:list2:of5:listsel4:like8:omygawd!ee",
+ ),
+ ];
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = Vec::<Something>::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn map_test_pairs() -> Result<(), Error> {
+ let pairs = [
+ (BTreeMap::new(), "de"),
+ (
+ map! {"cow" => "moo", "spam" => "eggs"},
+ "d3:cow3:moo4:spam4:eggse",
+ ),
+ (
+ map! {"cow" => "moo", "dog" => "bark"},
+ "d3:cow3:moo3:dog4:barke",
+ ),
+ (
+ map! {"dog" => "bark", "cow" => "moo"},
+ "d3:cow3:moo3:dog4:barke",
+ ),
+ (
+ map! {"first" => "first", "2ace" => "second", "3ace" => "third"},
+ "d4:2ace6:second4:3ace5:third5:first5:firste",
+ ),
+ (
+ map! {"Goodbye" => map! {"maps" => "that don't work", "number" => 100}},
+ "d7:Goodbyed4:maps15:that don't work6:numberi100eee",
+ ),
+ (
+ map! {
+ "publisher" => "bob", "publisher-webpage" => "www.example.com",
+ "publisher.location" => "home"
+ },
+ "d9:publisher3:bob17:publisher-webpage15:www.example.com18:publisher.location4:homee",
+ ),
+ ];
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = BTreeMap::<String, Something>::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn mixed_use_list_pairs() -> Result<(), Error> {
+ let pairs = [(
+ list![0, "heterogeneous", -5, "lists", 10, map! {"map" => "well"}],
+ "li0e13:heterogeneousi-5e5:listsi10ed3:map4:wellee",
+ )];
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = Vec::<Something>::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn mixed_use_dict_pairs() -> Result<(), Error> {
+ let pairs = [
+ (
+ map! {
+ "hello" => list!["world!", "gaia!", "mother earth!"],
+ "Goodbye" => map! {"maps" => "that don't work", "number" => 100}
+ },
+ "d7:Goodbyed4:maps15:that don't work6:numberi100ee5:hellol6:world!5:gaia!13:mother earth!ee"
+ ),
+ (
+ map! {"hello" => list!["world!", "gaia!", "mother earth!"]},
+ "d5:hellol6:world!5:gaia!13:mother earth!ee"
+ ),
+ (
+ map! {"spam" => list!["a", "b"]}, "d4:spaml1:a1:bee"),
+ (
+ map! {
+ "t" => "aa", "y" => "q", "q" => "ping",
+ "a" => map! { "id" => "abcdefghij0123456789" }
+ },
+ "d1:ad2:id20:abcdefghij0123456789e1:q4:ping1:t2:aa1:y1:qe",
+ ),
+ (
+ map! {
+ "t" => "aa", "y" => "q", "q" => "find_node",
+ "a" => map! { "id" => "abcdefghij0123456789", "target" => "mnopqrstuvwxyz123456" }
+ },
+ "d1:ad2:id20:abcdefghij01234567896:target20:mnopqrstuvwxyz123456e1:q9:find_node1:t2:aa1:y1:qe"
+ ),
+ (
+ map! {
+ "t" => "aa", "y" => "q", "q" => "get_peers",
+ "a" => map! { "id" => "abcdefghij0123456789", "info_hash" => "mnopqrstuvwxyz123456" }
+ },
+ "d1:ad2:id20:abcdefghij01234567899:info_hash20:mnopqrstuvwxyz123456e1:q9:get_peers1:t2:aa1:y1:qe"
+ ),
+ (
+ map! {
+ "t" => "aa", "y" => "r",
+ "r" => map! {
+ "id" => "abcdefghij0123456789",
+ "token" => "aoeusnth", "values" => vec!["axje.u", "idhtnm"]
+ }
+ },
+ "d1:rd2:id20:abcdefghij01234567895:token8:aoeusnth6:valuesl6:axje.u6:idhtnmee1:t2:aa1:y1:re"
+ )
+ ];
+
+ for (original, expected_encoding) in &pairs {
+ let encoded = original.to_bencode()?;
+ assert_eq!(expected_encoding.as_bytes(), encoded.as_slice());
+
+ let decoded = BTreeMap::<String, Something>::from_bencode(&encoded)?;
+ assert_eq!(original, &decoded);
+ }
+
+ Ok(())
+}
+
+#[test]
+fn illegal_integer_encodings() {
+ let values = [
+ "i-0e", "i09e", "i-09e", "i-0123e", "i-00123e", "i0123e", "i00123e", "i12-345", "i-12-345",
+ "i-1", "i1",
+ ];
+ // Bendy currently doesn't fail if it encounters unused tokens
+ //
+ // "i12345ei10e5:eoeoee",
+ // "i-12345ei10e5:eoeoee"
+
+ for value in &values {
+ let error = i64::from_bencode(value.as_bytes()).unwrap_err();
+ assert!(error.to_string().contains("encoding corrupted"));
+ }
+}
+
+#[test]
+fn illegal_string_encodings() {
+ let values = [":hello", "-5:hello", "-5:", "5:", "10:hello"];
+ // Bendy currently doesn't fail if it encounters unused tokens
+ //
+ // "5:hello5:hello",
+ // "5:helloi10e",
+ // 10:hello5:hello",
+ // "10:helloi0e",
+ // "10:helloi123456789e"
+
+ for value in &values {
+ let error = String::from_bencode(value.as_bytes()).unwrap_err();
+ assert!(error.to_string().contains("encoding corrupted"));
+ }
+}
+
+#[test]
+fn illegal_list_encodings() {
+ let values = [
+ "l",
+ "lsde",
+ "li10e5hello",
+ "l10:helloi123456789ee",
+ "l10:helloi123456789e5:helloe",
+ "l5:helloi123456789e10:helloe",
+ ];
+ // Bendy currently doesn't fail if it encounters unused tokens
+ //
+ // "l5:hello5:worldei10e",
+
+ for value in &values {
+ let error = Vec::<Something>::from_bencode(value.as_bytes()).unwrap_err();
+ assert!(error.to_string().contains("encoding corrupted"));
+ }
+}
+
+#[test]
+fn illegal_dictionary_encodings() {
+ let values = [
+ "d",
+ "duuuuure",
+ "d5:hello5:world",
+ "d10:helloi123456789ee",
+ "d5:helloi123456789e5:helloe",
+ "di10e5:hello5:worldi10ee",
+ "d5:worldi10ei10e5:helloe",
+ "dle5:hello5:worldi10ee",
+ "dli10ei11ee5:hello5:worldi10ee",
+ "dde5:hello5:worldi10ee",
+ "dd8:innermapi11ee5:hello5:worldi10ee",
+ ];
+ // Bendy currently doesn't fail if it encounters unused tokens
+ //
+ // "d5:hello5:worldei10e",
+
+ for value in &values {
+ let error = BTreeMap::<String, Something>::from_bencode(value.as_bytes()).unwrap_err();
+ assert!(error.to_string().contains("encoding corrupted"));
+ }
+}
+
+// -----------------------------------------------------------------------------
+// Dynamic Typing Utility
+// -----------------------------------------------------------------------------
+
+#[derive(Debug, PartialEq)]
+enum Something {
+ Bytes(String),
+ Dict(BTreeMap<String, Something>),
+ Integer(i64),
+ List(Vec<Something>),
+}
+
+impl From<&str> for Something {
+ fn from(content: &str) -> Self {
+ Something::Bytes(content.to_owned())
+ }
+}
+
+impl<ContentT> From<BTreeMap<String, ContentT>> for Something
+where
+ Something: From<ContentT>,
+{
+ fn from(content: BTreeMap<String, ContentT>) -> Self {
+ let content = content
+ .into_iter()
+ .map(|(key, value)| (key, value.into()))
+ .collect();
+
+ Something::Dict(content)
+ }
+}
+
+impl From<i64> for Something {
+ fn from(content: i64) -> Self {
+ Something::Integer(content)
+ }
+}
+
+impl<ContentT> From<Vec<ContentT>> for Something
+where
+ Something: From<ContentT>,
+{
+ fn from(content: Vec<ContentT>) -> Self {
+ let content = content.into_iter().map(Into::into).collect();
+ Something::List(content)
+ }
+}
+
+impl FromBencode for Something {
+ fn decode_bencode_object(object: Object) -> Result<Self, DecodingError>
+ where
+ Self: Sized,
+ {
+ let something = match object {
+ Object::Bytes(content) => {
+ Something::Bytes(String::from_utf8_lossy(content).to_string())
+ },
+ Object::Integer(number) => Something::Integer(number.parse().unwrap()),
+ object @ Object::Dict(_) => {
+ Something::Dict(BTreeMap::decode_bencode_object(object).unwrap())
+ },
+ object @ Object::List(_) => {
+ Something::List(Vec::decode_bencode_object(object).unwrap())
+ },
+ };
+
+ Ok(something)
+ }
+}
+
+impl ToBencode for Something {
+ const MAX_DEPTH: usize = 999;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), EncodingError> {
+ match self {
+ Something::Bytes(content) => encoder.emit(content),
+ Something::Dict(content) => encoder.emit(content),
+ Something::Integer(content) => encoder.emit(content),
+ Something::List(content) => encoder.emit(content),
+ }
+ }
+}
+
+// -----------------------------------------------------------------------------
+// Error
+// -----------------------------------------------------------------------------
+
+#[derive(Debug)]
+enum Error {
+ DecodingError(DecodingError),
+ EncodingError(EncodingError),
+}
+
+impl From<DecodingError> for Error {
+ fn from(error: DecodingError) -> Self {
+ Error::DecodingError(error)
+ }
+}
+
+impl From<EncodingError> for Error {
+ fn from(error: EncodingError) -> Self {
+ Error::EncodingError(error)
+ }
+}
diff --git a/rust/vendor/bendy/tests/readme.rs b/rust/vendor/bendy/tests/readme.rs
new file mode 100644
index 0000000..35b342f
--- /dev/null
+++ b/rust/vendor/bendy/tests/readme.rs
@@ -0,0 +1,365 @@
+// Please keep the code below in sync with `README.md`.
+//
+// If `cfg(doctest)` gets stablized or `cfg(test)` gets fixed, we can use
+// doc-comment for running tests in `README.md`.
+
+mod encoding_1 {
+ use bendy::encoding::{Error, ToBencode};
+
+ #[test]
+ fn encode_vector() -> Result<(), Error> {
+ let my_data = vec!["hello", "world"];
+ let encoded = my_data.to_bencode()?;
+
+ assert_eq!(b"l5:hello5:worlde", encoded.as_slice());
+ Ok(())
+ }
+}
+
+mod encoding_2 {
+ use bendy::encoding::{Error, SingleItemEncoder, ToBencode};
+
+ struct IntegerWrapper(i64);
+
+ impl ToBencode for IntegerWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_int(self.0)
+ }
+ }
+
+ #[test]
+ fn encode_integer() -> Result<(), Error> {
+ let example = IntegerWrapper(21);
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"i21e", encoded.as_slice());
+
+ let encoded = 21.to_bencode()?;
+ assert_eq!(b"i21e", encoded.as_slice());
+
+ Ok(())
+ }
+}
+
+mod encoding_3 {
+ use bendy::encoding::{Error, SingleItemEncoder, ToBencode};
+
+ struct StringWrapper(String);
+
+ impl ToBencode for StringWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_str(&self.0)
+ }
+ }
+
+ #[test]
+ fn encode_string() -> Result<(), Error> {
+ let example = StringWrapper("content".to_string());
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ let encoded = "content".to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ Ok(())
+ }
+}
+
+mod encoding_4 {
+ use bendy::encoding::{AsString, Error, SingleItemEncoder, ToBencode};
+
+ struct ByteStringWrapper(Vec<u8>);
+
+ impl ToBencode for ByteStringWrapper {
+ const MAX_DEPTH: usize = 0;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ let content = AsString(&self.0);
+ encoder.emit(&content)
+ }
+ }
+
+ #[test]
+ fn encode_byte_string() -> Result<(), Error> {
+ let example = ByteStringWrapper(b"content".to_vec());
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ let encoded = AsString(b"content").to_bencode()?;
+ assert_eq!(b"7:content", encoded.as_slice());
+
+ Ok(())
+ }
+}
+
+mod encoding_5 {
+ use bendy::encoding::{Error, SingleItemEncoder, ToBencode};
+
+ struct Example {
+ label: String,
+ counter: u64,
+ }
+
+ impl ToBencode for Example {
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_dict(|mut e| {
+ e.emit_pair(b"counter", &self.counter)?;
+ e.emit_pair(b"label", &self.label)?;
+
+ Ok(())
+ })
+ }
+ }
+
+ #[test]
+ fn encode_dictionary() -> Result<(), Error> {
+ let example = Example {
+ label: "Example".to_string(),
+ counter: 0,
+ };
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"d7:counteri0e5:label7:Examplee", encoded.as_slice());
+
+ Ok(())
+ }
+}
+
+mod encoding_6 {
+ use bendy::encoding::{Error, SingleItemEncoder, ToBencode};
+
+ struct Location(i64, i64);
+
+ impl ToBencode for Location {
+ const MAX_DEPTH: usize = 1;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), Error> {
+ encoder.emit_list(|e| {
+ e.emit_int(self.0)?;
+ e.emit_int(self.1)
+ })
+ }
+ }
+
+ #[test]
+ fn encode_list() -> Result<(), Error> {
+ let example = Location(2, 3);
+
+ let encoded = example.to_bencode()?;
+ assert_eq!(b"li2ei3ee", encoded.as_slice());
+
+ Ok(())
+ }
+}
+
+mod decoding_1 {
+ use bendy::decoding::{Error, FromBencode};
+
+ #[test]
+ fn decode_vector() -> Result<(), Error> {
+ let encoded = b"l5:hello5:worlde".to_vec();
+ let decoded = Vec::<String>::from_bencode(&encoded)?;
+
+ assert_eq!(vec!["hello", "world"], decoded);
+ Ok(())
+ }
+}
+
+mod decoding_2 {
+ use bendy::decoding::{Error, FromBencode, Object};
+
+ #[derive(Debug, Eq, PartialEq)]
+ struct IntegerWrapper(i64);
+
+ impl FromBencode for IntegerWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ // This is an example for content handling. It would also be possible
+ // to call `i64::decode_bencode_object(object)` directly.
+ let content = object.try_into_integer()?;
+ let number = content.parse::<i64>()?;
+
+ Ok(IntegerWrapper(number))
+ }
+ }
+
+ #[test]
+ fn decode_integer() -> Result<(), Error> {
+ let encoded = b"i21e".to_vec();
+
+ let example = IntegerWrapper::from_bencode(&encoded)?;
+ assert_eq!(IntegerWrapper(21), example);
+
+ let example = i64::from_bencode(&encoded)?;
+ assert_eq!(21, example);
+
+ Ok(())
+ }
+}
+
+mod decoding_3 {
+ use bendy::decoding::{Error, FromBencode, Object};
+
+ #[derive(Debug, Eq, PartialEq)]
+ struct StringWrapper(String);
+
+ impl FromBencode for StringWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ // This is an example for content handling. It would also be possible
+ // to call `String::decode_bencode_object(object)` directly.
+ let content = object.try_into_bytes()?;
+ let content = String::from_utf8(content.to_vec())?;
+
+ Ok(StringWrapper(content))
+ }
+ }
+
+ #[test]
+ fn decode_string() -> Result<(), Error> {
+ let encoded = b"7:content".to_vec();
+
+ let example = StringWrapper::from_bencode(&encoded)?;
+ assert_eq!(StringWrapper("content".to_string()), example);
+
+ let example = String::from_bencode(&encoded)?;
+ assert_eq!("content".to_string(), example);
+
+ Ok(())
+ }
+}
+
+mod decoding_4 {
+ use bendy::{
+ decoding::{Error, FromBencode, Object},
+ encoding::AsString,
+ };
+
+ #[derive(Debug, Eq, PartialEq)]
+ struct ByteStringWrapper(Vec<u8>);
+
+ impl FromBencode for ByteStringWrapper {
+ const EXPECTED_RECURSION_DEPTH: usize = 0;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let content = AsString::decode_bencode_object(object)?;
+ Ok(ByteStringWrapper(content.0))
+ }
+ }
+
+ #[test]
+ fn decode_byte_string() -> Result<(), Error> {
+ let encoded = b"7:content".to_vec();
+
+ let example = ByteStringWrapper::from_bencode(&encoded)?;
+ assert_eq!(ByteStringWrapper(b"content".to_vec()), example);
+
+ let example = AsString::from_bencode(&encoded)?;
+ assert_eq!(b"content".to_vec(), example.0);
+
+ Ok(())
+ }
+}
+
+mod decoding_5 {
+ use bendy::decoding::{Error, FromBencode, Object, ResultExt};
+
+ #[derive(Debug, Eq, PartialEq)]
+ struct Example {
+ label: String,
+ counter: u64,
+ }
+
+ impl FromBencode for Example {
+ const EXPECTED_RECURSION_DEPTH: usize = 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let mut counter = None;
+ let mut label = None;
+
+ let mut dict = object.try_into_dictionary()?;
+ while let Some(pair) = dict.next_pair()? {
+ match pair {
+ (b"counter", value) => {
+ counter = u64::decode_bencode_object(value)
+ .context("counter")
+ .map(Some)?;
+ },
+ (b"label", value) => {
+ label = String::decode_bencode_object(value)
+ .context("label")
+ .map(Some)?;
+ },
+ (unknown_field, _) => {
+ return Err(Error::unexpected_field(String::from_utf8_lossy(
+ unknown_field,
+ )));
+ },
+ }
+ }
+
+ let counter = counter.ok_or_else(|| Error::missing_field("counter"))?;
+ let label = label.ok_or_else(|| Error::missing_field("label"))?;
+
+ Ok(Example { counter, label })
+ }
+ }
+
+ #[test]
+ fn decode_dictionary() -> Result<(), Error> {
+ let encoded = b"d7:counteri0e5:label7:Examplee".to_vec();
+ let expected = Example {
+ label: "Example".to_string(),
+ counter: 0,
+ };
+
+ let example = Example::from_bencode(&encoded)?;
+ assert_eq!(expected, example);
+
+ Ok(())
+ }
+}
+
+mod decoding_6 {
+ use bendy::decoding::{Error, FromBencode, Object};
+
+ #[derive(Debug, PartialEq, Eq)]
+ struct Location(i64, i64);
+
+ impl FromBencode for Location {
+ const EXPECTED_RECURSION_DEPTH: usize = 1;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, Error> {
+ let mut list = object.try_into_list()?;
+
+ let x = list.next_object()?.ok_or(Error::missing_field("x"))?;
+ let x = i64::decode_bencode_object(x)?;
+
+ let y = list.next_object()?.ok_or(Error::missing_field("y"))?;
+ let y = i64::decode_bencode_object(y)?;
+
+ Ok(Location(x, y))
+ }
+ }
+
+ #[test]
+ fn decode_list() -> Result<(), Error> {
+ let encoded = b"li2ei3ee".to_vec();
+ let expected = Location(2, 3);
+
+ let example = Location::from_bencode(&encoded)?;
+ assert_eq!(expected, example);
+
+ Ok(())
+ }
+}
diff --git a/rust/vendor/bendy/tests/struct_codec.rs b/rust/vendor/bendy/tests/struct_codec.rs
new file mode 100644
index 0000000..2769545
--- /dev/null
+++ b/rust/vendor/bendy/tests/struct_codec.rs
@@ -0,0 +1,89 @@
+use bendy::{
+ decoding::{Error as DecodingError, FromBencode, Object},
+ encoding::{Error as EncodingError, SingleItemEncoder, ToBencode},
+};
+
+#[derive(PartialEq, Eq, Debug)]
+struct Example {
+ foo: Vec<i64>,
+ bar: i64,
+}
+
+impl ToBencode for Example {
+ const MAX_DEPTH: usize = 2;
+
+ fn encode(&self, encoder: SingleItemEncoder) -> Result<(), EncodingError> {
+ encoder.emit_dict(|mut dict| {
+ dict.emit_pair(b"bar", &self.bar)?;
+ dict.emit_pair(b"foo", &self.foo)
+ })
+ }
+}
+
+impl FromBencode for Example {
+ const EXPECTED_RECURSION_DEPTH: usize = 2;
+
+ fn decode_bencode_object(object: Object) -> Result<Self, DecodingError>
+ where
+ Self: Sized,
+ {
+ let mut foo = None;
+ let mut bar = None;
+
+ let mut dict = object.try_into_dictionary()?;
+ while let Some((key, value)) = dict.next_pair()? {
+ match key {
+ b"foo" => {
+ foo = Vec::decode_bencode_object(value).map(Some)?;
+ },
+ b"bar" => {
+ bar = i64::decode_bencode_object(value).map(Some)?;
+ },
+ _ => (), // ignore unknown keys
+ }
+ }
+
+ Ok(Example {
+ foo: foo.ok_or_else(|| DecodingError::missing_field("foo"))?,
+ bar: bar.ok_or_else(|| DecodingError::missing_field("bar"))?,
+ })
+ }
+}
+
+#[test]
+fn should_encode_struct() {
+ let example = Example {
+ foo: vec![2, 3],
+ bar: 1,
+ };
+ let encoded = example.to_bencode().expect("example encoding is broken");
+
+ assert_eq!(encoded, b"d3:bari1e3:fooli2ei3eee".to_vec(),)
+}
+
+#[test]
+fn should_decode_struct() {
+ let encoded = b"d3:bari1e3:fooli2ei3eee".to_vec();
+ let example = Example::from_bencode(&encoded).expect("example decoding is broken");
+
+ assert_eq!(
+ example,
+ Example {
+ foo: vec![2, 3],
+ bar: 1,
+ }
+ )
+}
+
+#[test]
+fn validate_round_trip() {
+ let example = Example {
+ foo: vec![2, 3],
+ bar: 1,
+ };
+
+ let encoded = example.to_bencode().expect("example encoding is broken");
+ let decoded = Example::from_bencode(&encoded).expect("example decoding is broken");
+
+ assert_eq!(example, decoded);
+}