summaryrefslogtreecommitdiffstats
path: root/third_party/rust/bumpalo
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/rust/bumpalo')
-rw-r--r--third_party/rust/bumpalo/.cargo-checksum.json1
-rw-r--r--third_party/rust/bumpalo/CHANGELOG.md654
-rw-r--r--third_party/rust/bumpalo/Cargo.toml66
-rw-r--r--third_party/rust/bumpalo/LICENSE-APACHE201
-rw-r--r--third_party/rust/bumpalo/LICENSE-MIT25
-rw-r--r--third_party/rust/bumpalo/README.md216
-rw-r--r--third_party/rust/bumpalo/src/alloc.rs794
-rw-r--r--third_party/rust/bumpalo/src/boxed.rs683
-rw-r--r--third_party/rust/bumpalo/src/collections/collect_in.rs152
-rw-r--r--third_party/rust/bumpalo/src/collections/mod.rs93
-rw-r--r--third_party/rust/bumpalo/src/collections/raw_vec.rs730
-rw-r--r--third_party/rust/bumpalo/src/collections/str/lossy.rs209
-rw-r--r--third_party/rust/bumpalo/src/collections/str/mod.rs43
-rw-r--r--third_party/rust/bumpalo/src/collections/string.rs2141
-rw-r--r--third_party/rust/bumpalo/src/collections/vec.rs2614
-rw-r--r--third_party/rust/bumpalo/src/lib.rs2023
16 files changed, 10645 insertions, 0 deletions
diff --git a/third_party/rust/bumpalo/.cargo-checksum.json b/third_party/rust/bumpalo/.cargo-checksum.json
new file mode 100644
index 0000000000..d747ed0ac6
--- /dev/null
+++ b/third_party/rust/bumpalo/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"8b5a7a49c720ba2678c07184f50b3608e2165fbf6704da494fba23c864e691e0","Cargo.toml":"8d5fd21d2b3ed1d7149e864d43f843fd469ccdcd9893ac3c2bef8518294a61dd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"65f94e99ddaf4f5d1782a6dae23f35d4293a9a01444a13135a6887017d353cee","README.md":"00c9224790248ec71d1505615429699fd685b0290a0c2b6d7c0df0214e7f80eb","src/alloc.rs":"ab0f23fa11c26efdd8f0596ebdf0e3faa75d097881fb59639b0fb23340c106bc","src/boxed.rs":"5fc935f8e1a7bc1b8f6a39b2bcc4355a2be4743f2308fe3ffd557455a3a27cb2","src/collections/collect_in.rs":"0588a4ff3967a4323abb4218bbd615af4b123639ab4fae9130c6590c258b3d15","src/collections/mod.rs":"d58dc46eb4f9fcdde574f09bc5b8646f53e42d49c169561d98e0c23e5b36848a","src/collections/raw_vec.rs":"8829cc9a693fde38aa93e47a7bbbc2dac247620d07f60519f2e6cb44f5494bc5","src/collections/str/lossy.rs":"c5d62b16e01071e2a574ae41ef6693ad12f1e6c786c5d38f7a13ebd6cb23c088","src/collections/str/mod.rs":"d82a8bd417fbf52a589d89a16ea2a0ac4f6ac920c3976ab1f5b6ac0c8493c4f2","src/collections/string.rs":"388d39b999788baf5c14ccc3f5cb57da728060ea3295ddfc28f0f2e1ca5858ec","src/collections/vec.rs":"2eaf52e085e6d04767e97b224e82688dd0debd231c6536d6034f431376aa8bf0","src/lib.rs":"9eb2bdb8359b368a6f3091a66b3a5eb1216672ec1605cb18d5da28292c381cb9"},"package":"0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"} \ No newline at end of file
diff --git a/third_party/rust/bumpalo/CHANGELOG.md b/third_party/rust/bumpalo/CHANGELOG.md
new file mode 100644
index 0000000000..afc142eb90
--- /dev/null
+++ b/third_party/rust/bumpalo/CHANGELOG.md
@@ -0,0 +1,654 @@
+## Unreleased
+
+Released YYYY-MM-DD.
+
+### Added
+
+* TODO (or remove section if none)
+
+### Changed
+
+* TODO (or remove section if none)
+
+### Deprecated
+
+* TODO (or remove section if none)
+
+### Removed
+
+* TODO (or remove section if none)
+
+### Fixed
+
+* TODO (or remove section if none)
+
+### Security
+
+* TODO (or remove section if none)
+
+--------------------------------------------------------------------------------
+
+## 3.12.0
+
+Released 2023-01-17.
+
+### Added
+
+* Added the `bumpalo::boxed::Box::bump` and `bumpalo::collections::String::bump`
+ getters to get the underlying `Bump` that a string or box was allocated into.
+
+### Changed
+
+* Some uses of `Box` that MIRI did not previously consider as UB are now
+ reported as UB, and `bumpalo`'s internals have been adjusted to avoid the new
+ UB.
+
+--------------------------------------------------------------------------------
+
+## 3.11.1
+
+Released 2022-10-18.
+
+### Security
+
+* Fixed a bug where when `std::vec::IntoIter` was ported to
+ `bumpalo::collections::vec::IntoIter`, it didn't get its underlying `Bump`'s
+ lifetime threaded through. This meant that `rustc` was not checking the
+ borrows for `bumpalo::collections::IntoIter` and this could result in
+ use-after-free bugs.
+
+--------------------------------------------------------------------------------
+
+## 3.11.0
+
+Released 2022-08-17.
+
+### Added
+
+* Added support for per-`Bump` allocation limits. These are enforced only in the
+ slow path when allocating new chunks in the `Bump`, not in the bump allocation
+ hot path, and therefore impose near zero overhead.
+* Added the `bumpalo::boxed::Box::into_inner` method.
+
+### Changed
+
+* Updated to Rust 2021 edition.
+* The minimum supported Rust version (MSRV) is now 1.56.0.
+
+--------------------------------------------------------------------------------
+
+## 3.10.0
+
+Released 2022-06-01.
+
+### Added
+
+* Implement `bumpalo::collections::FromIteratorIn` for `Option` and `Result`,
+ just like `core` does for `FromIterator`.
+* Implement `bumpalo::collections::FromIteratorIn` for `bumpalo::boxed::Box<'a,
+ [T]>`.
+* Added running tests under MIRI in CI for additional confidence in unsafe code.
+* Publicly exposed `bumpalo::collections::Vec::drain_filter` since the
+ corresponding `std::vec::Vec` method has stabilized.
+
+### Changed
+
+* `Bump::new` will not allocate a backing chunk until the first allocation
+ inside the bump arena now.
+
+### Fixed
+
+* Properly account for alignment changes when growing or shrinking an existing
+ allocation.
+* Removed all internal integer-to-pointer casts, to play better with UB checkers
+ like MIRI.
+
+--------------------------------------------------------------------------------
+
+## 3.9.1
+
+Released 2022-01-06.
+
+### Fixed
+
+* Fixed link to logo in docs and README.md
+
+--------------------------------------------------------------------------------
+
+## 3.9.0
+
+Released 2022-01-05.
+
+### Changed
+
+* The minimum supported Rust version (MSRV) has been raised to Rust 1.54.0.
+
+* `bumpalo::collections::Vec<T>` implements relevant traits for all arrays of
+ any size `N` via const generics. Previously, it was just arrays up to length
+ 32. Similar for `bumpalo::boxed::Box<[T; N]>`.
+
+--------------------------------------------------------------------------------
+
+## 3.8.0
+
+Released 2021-10-19.
+
+### Added
+
+* Added the `CollectIn` and `FromIteratorIn` traits to make building a
+ collection from an iterator easier. These new traits live in the
+ `bumpalo::collections` module and are implemented by
+ `bumpalo::collections::{String,Vec}`.
+
+* Added the `Bump::iter_allocated_chunks_raw` method, which is an `unsafe`, raw
+ version of `Bump::iter_allocated_chunks`. The new method does not take an
+ exclusive borrow of the `Bump` and yields raw pointer-and-length pairs for
+ each chunk in the bump. It is the caller's responsibility to ensure that no
+ allocation happens in the `Bump` while iterating over chunks and that there
+ are no active borrows of allocated data if they want to turn any
+ pointer-and-length pairs into slices.
+
+--------------------------------------------------------------------------------
+
+## 3.7.1
+
+Released 2021-09-17.
+
+### Changed
+
+* The packaged crate uploaded to crates.io when `bumpalo` is published is now
+ smaller, thanks to excluding unnecessary files.
+
+--------------------------------------------------------------------------------
+
+## 3.7.0
+
+Released 2020-05-28.
+
+### Added
+
+* Added `Borrow` and `BorrowMut` trait implementations for
+ `bumpalo::collections::Vec` and
+ `bumpalo::collections::String`. [#108](https://github.com/fitzgen/bumpalo/pull/108)
+
+### Changed
+
+* When allocating a new chunk fails, don't immediately give up. Instead, try
+ allocating a chunk that is half that size, and if that fails, then try half of
+ *that* size, etc until either we successfully allocate a chunk or we fail to
+ allocate the minimum chunk size and then finally give
+ up. [#111](https://github.com/fitzgen/bumpalo/pull/111)
+
+--------------------------------------------------------------------------------
+
+## 3.6.1
+
+Released 2020-02-18.
+
+### Added
+
+* Improved performance of `Bump`'s `Allocator::grow_zeroed` trait method
+ implementation. [#99](https://github.com/fitzgen/bumpalo/pull/99)
+
+--------------------------------------------------------------------------------
+
+## 3.6.0
+
+Released 2020-01-29.
+
+### Added
+
+* Added a few new flavors of allocation:
+
+ * `try_alloc` for fallible, by-value allocation
+
+ * `try_alloc_with` for fallible allocation with an infallible initializer
+ function
+
+ * `alloc_try_with` for infallible allocation with a fallible initializer
+ function
+
+ * `try_alloc_try_with` method for fallible allocation with a fallible
+ initializer function
+
+ We already have infallible, by-value allocation (`alloc`) and infallible
+ allocation with an infallible initializer (`alloc_with`). With these new
+ methods, we now have every combination covered.
+
+ Thanks to [Tamme Schichler](https://github.com/Tamschi) for contributing these
+ methods!
+
+--------------------------------------------------------------------------------
+
+## 3.5.0
+
+Released 2020-01-22.
+
+### Added
+
+* Added experimental, unstable support for the unstable, nightly Rust
+ `allocator_api` feature.
+
+ The `allocator_api` feature defines an `Allocator` trait and exposes custom
+ allocators for `std` types. Bumpalo has a matching `allocator_api` cargo
+ feature to enable implementing `Allocator` and using `Bump` with `std`
+ collections.
+
+ First, enable the `allocator_api` feature in your `Cargo.toml`:
+
+ ```toml
+ [dependencies]
+ bumpalo = { version = "3.5", features = ["allocator_api"] }
+ ```
+
+ Next, enable the `allocator_api` nightly Rust feature in your `src/lib.rs` or `src/main.rs`:
+
+ ```rust
+ # #[cfg(feature = "allocator_api")]
+ # {
+ #![feature(allocator_api)]
+ # }
+ ```
+
+ Finally, use `std` collections with `Bump`, so that their internal heap
+ allocations are made within the given bump arena:
+
+ ```
+ # #![cfg_attr(feature = "allocator_api", feature(allocator_api))]
+ # #[cfg(feature = "allocator_api")]
+ # {
+ #![feature(allocator_api)]
+ use bumpalo::Bump;
+
+ // Create a new bump arena.
+ let bump = Bump::new();
+
+ // Create a `Vec` whose elements are allocated within the bump arena.
+ let mut v = Vec::new_in(&bump);
+ v.push(0);
+ v.push(1);
+ v.push(2);
+ # }
+ ```
+
+ I'm very excited to see custom allocators in `std` coming along! Thanks to
+ Arthur Gautier for implementing support for the `allocator_api` feature for
+ Bumpalo.
+
+--------------------------------------------------------------------------------
+
+## 3.4.0
+
+Released 2020-06-01.
+
+### Added
+
+* Added the `bumpalo::boxed::Box<T>` type. It is an owned pointer referencing a
+ bump-allocated value, and it runs `T`'s `Drop` implementation on the
+ referenced value when dropped. This type can be used by enabling the `"boxed"`
+ cargo feature flag.
+
+--------------------------------------------------------------------------------
+
+## 3.3.0
+
+Released 2020-05-13.
+
+### Added
+
+* Added fallible allocation methods to `Bump`: `try_new`, `try_with_capacity`,
+ and `try_alloc_layout`.
+
+* Added `Bump::chunk_capacity`
+
+* Added `bumpalo::collections::Vec::try_reserve[_exact]`
+
+--------------------------------------------------------------------------------
+
+## 3.2.1
+
+Released 2020-03-24.
+
+### Security
+
+* When `realloc`ing, if we allocate new space, we need to copy the old
+ allocation's bytes into the new space. There are `old_size` number of bytes in
+ the old allocation, but we were accidentally copying `new_size` number of
+ bytes, which could lead to copying bytes into the realloc'd space from past
+ the chunk that we're bump allocating out of, from unknown memory.
+
+ If an attacker can cause `realloc`s, and can read the `realoc`ed data back,
+ this could allow them to read things from other regions of memory that they
+ shouldn't be able to. For example, if some crypto keys happened to live in
+ memory right after a chunk we were bump allocating out of, this could allow
+ the attacker to read the crypto keys.
+
+ Beyond just fixing the bug and adding a regression test, I've also taken two
+ additional steps:
+
+ 1. While we were already running the testsuite under `valgrind` in CI, because
+ `valgrind` exits with the same code that the program did, if there are
+ invalid reads/writes that happen not to trigger a segfault, the program can
+ still exit OK and we will be none the wiser. I've enabled the
+ `--error-exitcode=1` flag for `valgrind` in CI so that tests eagerly fail
+ in these scenarios.
+
+ 2. I've written a quickcheck test to exercise `realloc`. Without the bug fix
+ in this patch, this quickcheck immediately triggers invalid reads when run
+ under `valgrind`. We didn't previously have quickchecks that exercised
+ `realloc` because `realloc` isn't publicly exposed directly, and instead
+ can only be indirectly called. This new quickcheck test exercises `realloc`
+ via `bumpalo::collections::Vec::resize` and
+ `bumpalo::collections::Vec::shrink_to_fit` calls.
+
+ This bug was introduced in version 3.0.0.
+
+ See [#69](https://github.com/fitzgen/bumpalo/issues/69) for details.
+
+--------------------------------------------------------------------------------
+
+## 3.2.0
+
+Released 2020-02-07.
+
+### Added
+
+* Added the `bumpalo::collections::Vec::into_bump_slice_mut` method to turn a
+ `bumpalo::collections::Vec<'bump, T>` into a `&'bump mut [T]`.
+
+--------------------------------------------------------------------------------
+
+## 3.1.2
+
+Released 2020-01-07.
+
+### Fixed
+
+* The `bumpalo::collections::format!` macro did not used to accept a trailing
+ comma like `format!(in bump; "{}", 1,)`, but it does now.
+
+--------------------------------------------------------------------------------
+
+## 3.1.1
+
+Released 2020-01-03.
+
+### Fixed
+
+* The `bumpalo::collections::vec!` macro did not used to accept a trailing
+ comma like `vec![in bump; 1, 2,]`, but it does now.
+
+--------------------------------------------------------------------------------
+
+## 3.1.0
+
+Released 2019-12-27.
+
+### Added
+
+* Added the `Bump::allocated_bytes` diagnostic method for counting the total
+ number of bytes a `Bump` has allocated.
+
+--------------------------------------------------------------------------------
+
+# 3.0.0
+
+Released 2019-12-20.
+
+## Added
+
+* Added `Bump::alloc_str` for copying string slices into a `Bump`.
+
+* Added `Bump::alloc_slice_copy` and `Bump::alloc_slice_clone` for copying or
+ cloning slices into a `Bump`.
+
+* Added `Bump::alloc_slice_fill_iter` for allocating a slice in the `Bump` from
+ an iterator.
+
+* Added `Bump::alloc_slice_fill_copy` and `Bump::alloc_slice_fill_clone` for
+ creating slices of length `n` that are filled with copies or clones of an
+ initial element.
+
+* Added `Bump::alloc_slice_fill_default` for creating slices of length `n` with
+ the element type's default instance.
+
+* Added `Bump::alloc_slice_fill_with` for creating slices of length `n` whose
+ elements are initialized with a function or closure.
+
+* Added `Bump::iter_allocated_chunks` as a replacement for the old
+ `Bump::each_allocated_chunk`. The `iter_allocated_chunks` version returns an
+ iterator, which is more idiomatic than its old, callback-taking counterpart.
+ Additionally, `iter_allocated_chunks` exposes the chunks as `MaybeUninit`s
+ instead of slices, which makes it usable in more situations without triggering
+ undefined behavior. See also the note about bump direction in the "changed"
+ section; if you're iterating chunks, you're likely affected by that change!
+
+* Added `Bump::with_capacity` so that you can pre-allocate a chunk with the
+ requested space.
+
+### Changed
+
+* **BREAKING:** The direction we allocate within a chunk has changed. It used to
+ be "upwards", from low addresses within a chunk towards high addresses. It is
+ now "downwards", from high addresses towards lower addresses.
+
+ Additionally, the order in which we iterate over allocated chunks has changed!
+ We used to iterate over chunks from oldest chunk to youngest chunk, and now we
+ do the opposite: the youngest chunks are iterated over first, and the oldest
+ chunks are iterated over last.
+
+ If you were using `Bump::each_allocated_chunk` to iterate over data that you
+ had previously allocated, and *you want to iterate in order of
+ oldest-to-youngest allocation*, you need to reverse the chunks iterator and
+ also reverse the order in which you loop through the data within a chunk!
+
+ For example, if you had this code:
+
+ ```rust
+ unsafe {
+ bump.each_allocated_chunk(|chunk| {
+ for byte in chunk {
+ // Touch each byte in oldest-to-youngest allocation order...
+ }
+ });
+ }
+ ```
+
+ It should become this code:
+
+ ```rust
+ let mut chunks: Vec<_> = bump.iter_allocated_chunks().collect();
+ chunks.reverse();
+ for chunk in chunks {
+ for byte in chunk.iter().rev() {
+ let byte = unsafe { byte.assume_init() };
+ // Touch each byte in oldest-to-youngest allocation order...
+ }
+ }
+ ```
+
+ The good news is that this change yielded a *speed up in allocation throughput
+ of 3-19%!*
+
+ See https://github.com/fitzgen/bumpalo/pull/37 and
+ https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html for details.
+
+* **BREAKING:** The `collections` cargo feature is no longer on by default. You
+ must explicitly turn it on if you intend to use the `bumpalo::collections`
+ module.
+
+* `Bump::reset` will now retain only the last allocated chunk (the biggest),
+ rather than only the first allocated chunk (the smallest). This should enable
+ `Bump` to better adapt to workload sizes and quickly reach a steady state
+ where new chunks are not requested from the global allocator.
+
+### Removed
+
+* The `Bump::each_allocated_chunk` method is removed in favor of
+ `Bump::iter_allocated_chunks`. Note that its safety requirements for reading
+ from the allocated chunks are slightly different from the old
+ `each_allocated_chunk`: only up to 16-byte alignment is supported now. If you
+ allocate anything with greater alignment than that into the bump arena, there
+ might be uninitilized padding inserted in the chunks, and therefore it is no
+ longer safe to read them via `MaybeUninit::assume_init`. See also the note
+ about bump direction in the "changed" section; if you're iterating chunks,
+ you're likely affected by that change!
+
+* The `std` cargo feature has been removed, since this crate is now always
+ no-std.
+
+## Fixed
+
+* Fixed a bug involving potential integer overflows with large requested
+ allocation sizes.
+
+--------------------------------------------------------------------------------
+
+# 2.6.0
+
+Released 2019-08-19.
+
+* Implement `Send` for `Bump`.
+
+--------------------------------------------------------------------------------
+
+# 2.5.0
+
+Released 2019-07-01.
+
+* Add `alloc_slice_copy` and `alloc_slice_clone` methods that allocate space for
+ slices and either copy (with bound `T: Copy`) or clone (with bound `T: Clone`)
+ the provided slice's data into the newly allocated space.
+
+--------------------------------------------------------------------------------
+
+# 2.4.3
+
+Released 2019-05-20.
+
+* Fixed a bug where chunks were always deallocated with the default chunk
+ layout, not the layout that the chunk was actually allocated with (i.e. if we
+ started growing largers chunks with larger layouts, we would deallocate those
+ chunks with an incorrect layout).
+
+--------------------------------------------------------------------------------
+
+# 2.4.2
+
+Released 2019-05-17.
+
+* Added an implementation `Default` for `Bump`.
+* Made it so that if bump allocation within a chunk overflows, we still try to
+ allocate a new chunk to bump out of for the requested allocation. This can
+ avoid some OOMs in scenarios where the chunk we are currently allocating out
+ of is very near the high end of the address space, and there is still
+ available address space lower down for new chunks.
+
+--------------------------------------------------------------------------------
+
+# 2.4.1
+
+Released 2019-04-19.
+
+* Added readme metadata to Cargo.toml so it shows up on crates.io
+
+--------------------------------------------------------------------------------
+
+# 2.4.0
+
+Released 2019-04-19.
+
+* Added support for `realloc`ing in-place when the pointer being `realloc`ed is
+ the last allocation made from the bump arena. This should speed up various
+ `String`, `Vec`, and `format!` operations in many cases.
+
+--------------------------------------------------------------------------------
+
+# 2.3.0
+
+Released 2019-03-26.
+
+* Add the `alloc_with` method, that (usually) avoids stack-allocating the
+ allocated value and then moving it into the bump arena. This avoids potential
+ stack overflows in release mode when allocating very large objects, and also
+ some `memcpy` calls. This is similar to the `copyless` crate. Read [the
+ `alloc_with` doc comments][alloc-with-doc-comments] and [the original issue
+ proposing this API][issue-proposing-alloc-with] for more.
+
+[alloc-with-doc-comments]: https://github.com/fitzgen/bumpalo/blob/9f47aee8a6839ba65c073b9ad5372aacbbd02352/src/lib.rs#L436-L475
+[issue-proposing-alloc-with]: https://github.com/fitzgen/bumpalo/issues/10
+
+--------------------------------------------------------------------------------
+
+# 2.2.2
+
+Released 2019-03-18.
+
+* Fix a regression from 2.2.1 where chunks were not always aligned to the chunk
+ footer's alignment.
+
+--------------------------------------------------------------------------------
+
+# 2.2.1
+
+Released 2019-03-18.
+
+* Fix a regression in 2.2.0 where newly allocated bump chunks could fail to have
+ capacity for a large requested bump allocation in some corner cases.
+
+--------------------------------------------------------------------------------
+
+# 2.2.0
+
+Released 2019-03-15.
+
+* Chunks in an arena now start out small, and double in size as more chunks are
+ requested.
+
+--------------------------------------------------------------------------------
+
+# 2.1.0
+
+Released 2019-02-12.
+
+* Added the `into_bump_slice` method on `bumpalo::collections::Vec<T>`.
+
+--------------------------------------------------------------------------------
+
+# 2.0.0
+
+Released 2019-02-11.
+
+* Removed the `BumpAllocSafe` trait.
+* Correctly detect overflows from large allocations and panic.
+
+--------------------------------------------------------------------------------
+
+# 1.2.0
+
+Released 2019-01-15.
+
+* Fixed an overly-aggressive `debug_assert!` that had false positives.
+* Ported to Rust 2018 edition.
+
+--------------------------------------------------------------------------------
+
+# 1.1.0
+
+Released 2018-11-28.
+
+* Added the `collections` module, which contains ports of `std`'s collection
+ types that are compatible with backing their storage in `Bump` arenas.
+* Lifted the limits on size and alignment of allocations.
+
+--------------------------------------------------------------------------------
+
+# 1.0.2
+
+--------------------------------------------------------------------------------
+
+# 1.0.1
+
+--------------------------------------------------------------------------------
+
+# 1.0.0
diff --git a/third_party/rust/bumpalo/Cargo.toml b/third_party/rust/bumpalo/Cargo.toml
new file mode 100644
index 0000000000..02ec679c2b
--- /dev/null
+++ b/third_party/rust/bumpalo/Cargo.toml
@@ -0,0 +1,66 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "bumpalo"
+version = "3.12.0"
+authors = ["Nick Fitzgerald <fitzgen@gmail.com>"]
+exclude = [
+ "/.github/*",
+ "/benches",
+ "/tests",
+ "valgrind.supp",
+ "bumpalo.png",
+]
+description = "A fast bump allocation arena for Rust."
+documentation = "https://docs.rs/bumpalo"
+readme = "README.md"
+categories = [
+ "memory-management",
+ "rust-patterns",
+ "no-std",
+]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/fitzgen/bumpalo"
+
+[package.metadata.docs.rs]
+all-features = true
+
+[lib]
+path = "src/lib.rs"
+bench = false
+
+[[test]]
+name = "try_alloc"
+path = "tests/try_alloc.rs"
+harness = false
+
+[[bench]]
+name = "benches"
+path = "benches/benches.rs"
+harness = false
+required-features = ["collections"]
+
+[dev-dependencies.criterion]
+version = "0.3.6"
+
+[dev-dependencies.quickcheck]
+version = "1.0.3"
+
+[dev-dependencies.rand]
+version = "0.8.5"
+
+[features]
+allocator_api = []
+boxed = []
+collections = []
+default = []
diff --git a/third_party/rust/bumpalo/LICENSE-APACHE b/third_party/rust/bumpalo/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/third_party/rust/bumpalo/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/rust/bumpalo/LICENSE-MIT b/third_party/rust/bumpalo/LICENSE-MIT
new file mode 100644
index 0000000000..bac6fb98de
--- /dev/null
+++ b/third_party/rust/bumpalo/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2019 Nick Fitzgerald
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/third_party/rust/bumpalo/README.md b/third_party/rust/bumpalo/README.md
new file mode 100644
index 0000000000..3d73e2967e
--- /dev/null
+++ b/third_party/rust/bumpalo/README.md
@@ -0,0 +1,216 @@
+# `bumpalo`
+
+**A fast bump allocation arena for Rust.**
+
+[![](https://docs.rs/bumpalo/badge.svg)](https://docs.rs/bumpalo/)
+[![](https://img.shields.io/crates/v/bumpalo.svg)](https://crates.io/crates/bumpalo)
+[![](https://img.shields.io/crates/d/bumpalo.svg)](https://crates.io/crates/bumpalo)
+[![Build Status](https://github.com/fitzgen/bumpalo/workflows/Rust/badge.svg)](https://github.com/fitzgen/bumpalo/actions?query=workflow%3ARust)
+
+![](https://github.com/fitzgen/bumpalo/raw/main/bumpalo.png)
+
+### Bump Allocation
+
+Bump allocation is a fast, but limited approach to allocation. We have a chunk
+of memory, and we maintain a pointer within that memory. Whenever we allocate an
+object, we do a quick check that we have enough capacity left in our chunk to
+allocate the object and then update the pointer by the object's size. *That's
+it!*
+
+The disadvantage of bump allocation is that there is no general way to
+deallocate individual objects or reclaim the memory region for a
+no-longer-in-use object.
+
+These trade offs make bump allocation well-suited for *phase-oriented*
+allocations. That is, a group of objects that will all be allocated during the
+same program phase, used, and then can all be deallocated together as a group.
+
+### Deallocation en Masse, but no `Drop`
+
+To deallocate all the objects in the arena at once, we can simply reset the bump
+pointer back to the start of the arena's memory chunk. This makes mass
+deallocation *extremely* fast, but allocated objects' [`Drop`] implementations are
+not invoked.
+
+> **However:** [`bumpalo::boxed::Box<T>`][box] can be used to wrap
+> `T` values allocated in the `Bump` arena, and calls `T`'s `Drop`
+> implementation when the `Box<T>` wrapper goes out of scope. This is similar to
+> how [`std::boxed::Box`] works, except without deallocating its backing memory.
+
+[`Drop`]: https://doc.rust-lang.org/std/ops/trait.Drop.html
+[box]: https://docs.rs/bumpalo/latest/bumpalo/boxed/struct.Box.html
+[`std::boxed::Box`]: https://doc.rust-lang.org/std/boxed/struct.Box.html
+
+### What happens when the memory chunk is full?
+
+This implementation will allocate a new memory chunk from the global allocator
+and then start bump allocating into this new memory chunk.
+
+### Example
+
+```rust
+use bumpalo::Bump;
+use std::u64;
+
+struct Doggo {
+ cuteness: u64,
+ age: u8,
+ scritches_required: bool,
+}
+
+// Create a new arena to bump allocate into.
+let bump = Bump::new();
+
+// Allocate values into the arena.
+let scooter = bump.alloc(Doggo {
+ cuteness: u64::max_value(),
+ age: 8,
+ scritches_required: true,
+});
+
+// Exclusive, mutable references to the just-allocated value are returned.
+assert!(scooter.scritches_required);
+scooter.age += 1;
+```
+
+### Collections
+
+When the `"collections"` cargo feature is enabled, a fork of some of the `std`
+library's collections are available in the [`collections`] module. These
+collection types are modified to allocate their space inside `bumpalo::Bump`
+arenas.
+
+[`collections`]: https://docs.rs/bumpalo/latest/bumpalo/collections/index.html
+
+```rust
+#[cfg(feature = "collections")]
+{
+ use bumpalo::{Bump, collections::Vec};
+
+ // Create a new bump arena.
+ let bump = Bump::new();
+
+ // Create a vector of integers whose storage is backed by the bump arena. The
+ // vector cannot outlive its backing arena, and this property is enforced with
+ // Rust's lifetime rules.
+ let mut v = Vec::new_in(&bump);
+
+ // Push a bunch of integers onto `v`!
+ for i in 0..100 {
+ v.push(i);
+ }
+}
+```
+
+Eventually [all `std` collection types will be parameterized by an
+allocator](https://github.com/rust-lang/rust/issues/42774) and we can remove
+this `collections` module and use the `std` versions.
+
+For unstable, nightly-only support for custom allocators in `std`, see the
+`allocator_api` section below.
+
+### `bumpalo::boxed::Box`
+
+When the `"boxed"` cargo feature is enabled, a fork of `std::boxed::Box`
+is available in the `boxed` module. This `Box` type is modified to allocate its
+space inside `bumpalo::Bump` arenas.
+
+**A `Box<T>` runs `T`'s drop implementation when the `Box<T>` is dropped.** You
+can use this to work around the fact that `Bump` does not drop values allocated
+in its space itself.
+
+```rust
+#[cfg(feature = "boxed")]
+{
+ use bumpalo::{Bump, boxed::Box};
+ use std::sync::atomic::{AtomicUsize, Ordering};
+
+ static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0);
+
+ struct CountDrops;
+
+ impl Drop for CountDrops {
+ fn drop(&mut self) {
+ NUM_DROPPED.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ // Create a new bump arena.
+ let bump = Bump::new();
+
+ // Create a `CountDrops` inside the bump arena.
+ let mut c = Box::new_in(CountDrops, &bump);
+
+ // No `CountDrops` have been dropped yet.
+ assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0);
+
+ // Drop our `Box<CountDrops>`.
+ drop(c);
+
+ // Its `Drop` implementation was run, and so `NUM_DROPS` has been
+ // incremented.
+ assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1);
+}
+```
+
+### `#![no_std]` Support
+
+Bumpalo is a `no_std` crate. It depends only on the `alloc` and `core` crates.
+
+### Thread support
+
+The `Bump` is `!Sync`, which makes it hard to use in certain situations around
+threads ‒ for example in `rayon`.
+
+The [`bumpalo-herd`](https://crates.io/crates/bumpalo-herd) crate provides a
+pool of `Bump` allocators for use in such situations.
+
+### Nightly Rust `allocator_api` Support
+
+The unstable, nightly-only Rust `allocator_api` feature defines an [`Allocator`]
+trait and exposes custom allocators for `std` types. Bumpalo has a matching
+`allocator_api` cargo feature to enable implementing `Allocator` and using
+`Bump` with `std` collections. Note that, as `feature(allocator_api)` is
+unstable and only in nightly Rust, Bumpalo's matching `allocator_api` cargo
+feature should be considered unstable, and will not follow the semver
+conventions that the rest of the crate does.
+
+First, enable the `allocator_api` feature in your `Cargo.toml`:
+
+```toml
+[dependencies]
+bumpalo = { version = "3.9", features = ["allocator_api"] }
+```
+
+Next, enable the `allocator_api` nightly Rust feature in your `src/lib.rs` or
+`src/main.rs`:
+
+```rust,ignore
+#![feature(allocator_api)]
+```
+
+Finally, use `std` collections with `Bump`, so that their internal heap
+allocations are made within the given bump arena:
+
+```rust,ignore
+use bumpalo::Bump;
+
+// Create a new bump arena.
+let bump = Bump::new();
+
+// Create a `Vec` whose elements are allocated within the bump arena.
+let mut v = Vec::new_in(&bump);
+v.push(0);
+v.push(1);
+v.push(2);
+```
+
+[`Allocator`]: https://doc.rust-lang.org/std/alloc/trait.Allocator.html
+
+#### Minimum Supported Rust Version (MSRV)
+
+This crate is guaranteed to compile on stable Rust **1.56** and up. It might
+compile with older versions but that may change in any new patch release.
+
+We reserve the right to increment the MSRV on minor releases, however we will
+strive to only do it deliberately and for good reasons.
diff --git a/third_party/rust/bumpalo/src/alloc.rs b/third_party/rust/bumpalo/src/alloc.rs
new file mode 100644
index 0000000000..0bcc21f22c
--- /dev/null
+++ b/third_party/rust/bumpalo/src/alloc.rs
@@ -0,0 +1,794 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(unstable_name_collisions)]
+#![allow(dead_code)]
+#![allow(deprecated)]
+
+//! Memory allocation APIs
+
+use core::cmp;
+use core::fmt;
+use core::mem;
+use core::ptr::{self, NonNull};
+use core::usize;
+
+pub use core::alloc::{Layout, LayoutErr};
+
+fn new_layout_err() -> LayoutErr {
+ Layout::from_size_align(1, 3).unwrap_err()
+}
+
+pub fn handle_alloc_error(layout: Layout) -> ! {
+ panic!("encountered allocation error: {:?}", layout)
+}
+
+pub trait UnstableLayoutMethods {
+ fn padding_needed_for(&self, align: usize) -> usize;
+ fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr>;
+ fn array<T>(n: usize) -> Result<Layout, LayoutErr>;
+}
+
+impl UnstableLayoutMethods for Layout {
+ fn padding_needed_for(&self, align: usize) -> usize {
+ let len = self.size();
+
+ // Rounded up value is:
+ // len_rounded_up = (len + align - 1) & !(align - 1);
+ // and then we return the padding difference: `len_rounded_up - len`.
+ //
+ // We use modular arithmetic throughout:
+ //
+ // 1. align is guaranteed to be > 0, so align - 1 is always
+ // valid.
+ //
+ // 2. `len + align - 1` can overflow by at most `align - 1`,
+ // so the &-mask with `!(align - 1)` will ensure that in the
+ // case of overflow, `len_rounded_up` will itself be 0.
+ // Thus the returned padding, when added to `len`, yields 0,
+ // which trivially satisfies the alignment `align`.
+ //
+ // (Of course, attempts to allocate blocks of memory whose
+ // size and padding overflow in the above manner should cause
+ // the allocator to yield an error anyway.)
+
+ let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
+ len_rounded_up.wrapping_sub(len)
+ }
+
+ fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr> {
+ let padded_size = self
+ .size()
+ .checked_add(self.padding_needed_for(self.align()))
+ .ok_or_else(new_layout_err)?;
+ let alloc_size = padded_size.checked_mul(n).ok_or_else(new_layout_err)?;
+
+ unsafe {
+ // self.align is already known to be valid and alloc_size has been
+ // padded already.
+ Ok((
+ Layout::from_size_align_unchecked(alloc_size, self.align()),
+ padded_size,
+ ))
+ }
+ }
+
+ fn array<T>(n: usize) -> Result<Layout, LayoutErr> {
+ Layout::new::<T>().repeat(n).map(|(k, offs)| {
+ debug_assert!(offs == mem::size_of::<T>());
+ k
+ })
+ }
+}
+
+/// Represents the combination of a starting address and
+/// a total capacity of the returned block.
+// #[unstable(feature = "allocator_api", issue = "32838")]
+#[derive(Debug)]
+pub struct Excess(pub NonNull<u8>, pub usize);
+
+fn size_align<T>() -> (usize, usize) {
+ (mem::size_of::<T>(), mem::align_of::<T>())
+}
+
+/// The `AllocErr` error indicates an allocation failure
+/// that may be due to resource exhaustion or to
+/// something wrong when combining the given input arguments with this
+/// allocator.
+// #[unstable(feature = "allocator_api", issue = "32838")]
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct AllocErr;
+
+// (we need this for downstream impl of trait Error)
+// #[unstable(feature = "allocator_api", issue = "32838")]
+impl fmt::Display for AllocErr {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("memory allocation failed")
+ }
+}
+
+/// The `CannotReallocInPlace` error is used when `grow_in_place` or
+/// `shrink_in_place` were unable to reuse the given memory block for
+/// a requested layout.
+// #[unstable(feature = "allocator_api", issue = "32838")]
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct CannotReallocInPlace;
+
+// #[unstable(feature = "allocator_api", issue = "32838")]
+impl CannotReallocInPlace {
+ pub fn description(&self) -> &str {
+ "cannot reallocate allocator's memory in place"
+ }
+}
+
+// (we need this for downstream impl of trait Error)
+// #[unstable(feature = "allocator_api", issue = "32838")]
+impl fmt::Display for CannotReallocInPlace {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.description())
+ }
+}
+
+/// An implementation of `Alloc` can allocate, reallocate, and
+/// deallocate arbitrary blocks of data described via `Layout`.
+///
+/// Some of the methods require that a memory block be *currently
+/// allocated* via an allocator. This means that:
+///
+/// * the starting address for that memory block was previously
+/// returned by a previous call to an allocation method (`alloc`,
+/// `alloc_zeroed`, `alloc_excess`, `alloc_one`, `alloc_array`) or
+/// reallocation method (`realloc`, `realloc_excess`, or
+/// `realloc_array`), and
+///
+/// * the memory block has not been subsequently deallocated, where
+/// blocks are deallocated either by being passed to a deallocation
+/// method (`dealloc`, `dealloc_one`, `dealloc_array`) or by being
+/// passed to a reallocation method (see above) that returns `Ok`.
+///
+/// A note regarding zero-sized types and zero-sized layouts: many
+/// methods in the `Alloc` trait state that allocation requests
+/// must be non-zero size, or else undefined behavior can result.
+///
+/// * However, some higher-level allocation methods (`alloc_one`,
+/// `alloc_array`) are well-defined on zero-sized types and can
+/// optionally support them: it is left up to the implementor
+/// whether to return `Err`, or to return `Ok` with some pointer.
+///
+/// * If an `Alloc` implementation chooses to return `Ok` in this
+/// case (i.e. the pointer denotes a zero-sized inaccessible block)
+/// then that returned pointer must be considered "currently
+/// allocated". On such an allocator, *all* methods that take
+/// currently-allocated pointers as inputs must accept these
+/// zero-sized pointers, *without* causing undefined behavior.
+///
+/// * In other words, if a zero-sized pointer can flow out of an
+/// allocator, then that allocator must likewise accept that pointer
+/// flowing back into its deallocation and reallocation methods.
+///
+/// Some of the methods require that a layout *fit* a memory block.
+/// What it means for a layout to "fit" a memory block means (or
+/// equivalently, for a memory block to "fit" a layout) is that the
+/// following two conditions must hold:
+///
+/// 1. The block's starting address must be aligned to `layout.align()`.
+///
+/// 2. The block's size must fall in the range `[use_min, use_max]`, where:
+///
+/// * `use_min` is `self.usable_size(layout).0`, and
+///
+/// * `use_max` is the capacity that was (or would have been)
+/// returned when (if) the block was allocated via a call to
+/// `alloc_excess` or `realloc_excess`.
+///
+/// Note that:
+///
+/// * the size of the layout most recently used to allocate the block
+/// is guaranteed to be in the range `[use_min, use_max]`, and
+///
+/// * a lower-bound on `use_max` can be safely approximated by a call to
+/// `usable_size`.
+///
+/// * if a layout `k` fits a memory block (denoted by `ptr`)
+/// currently allocated via an allocator `a`, then it is legal to
+/// use that layout to deallocate it, i.e. `a.dealloc(ptr, k);`.
+///
+/// # Unsafety
+///
+/// The `Alloc` trait is an `unsafe` trait for a number of reasons, and
+/// implementors must ensure that they adhere to these contracts:
+///
+/// * Pointers returned from allocation functions must point to valid memory and
+/// retain their validity until at least the instance of `Alloc` is dropped
+/// itself.
+///
+/// * `Layout` queries and calculations in general must be correct. Callers of
+/// this trait are allowed to rely on the contracts defined on each method,
+/// and implementors must ensure such contracts remain true.
+///
+/// Note that this list may get tweaked over time as clarifications are made in
+/// the future.
+// #[unstable(feature = "allocator_api", issue = "32838")]
+pub unsafe trait Alloc {
+ // (Note: some existing allocators have unspecified but well-defined
+ // behavior in response to a zero size allocation request ;
+ // e.g. in C, `malloc` of 0 will either return a null pointer or a
+ // unique pointer, but will not have arbitrary undefined
+ // behavior.
+ // However in jemalloc for example,
+ // `mallocx(0)` is documented as undefined behavior.)
+
+ /// Returns a pointer meeting the size and alignment guarantees of
+ /// `layout`.
+ ///
+ /// If this method returns an `Ok(addr)`, then the `addr` returned
+ /// will be non-null address pointing to a block of storage
+ /// suitable for holding an instance of `layout`.
+ ///
+ /// The returned block of storage may or may not have its contents
+ /// initialized. (Extension subtraits might restrict this
+ /// behavior, e.g. to ensure initialization to particular sets of
+ /// bit patterns.)
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure that `layout` has non-zero size.
+ ///
+ /// (Extension subtraits might provide more specific bounds on
+ /// behavior, e.g. guarantee a sentinel address or a null pointer
+ /// in response to a zero-size allocation request.)
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `layout` does not meet allocator's size or alignment
+ /// constraints.
+ ///
+ /// Implementations are encouraged to return `Err` on memory
+ /// exhaustion rather than panicking or aborting, but this is not
+ /// a strict requirement. (Specifically: it is *legal* to
+ /// implement this trait atop an underlying native allocation
+ /// library that aborts on memory exhaustion.)
+ ///
+ /// Clients wishing to abort computation in response to an
+ /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr>;
+
+ /// Deallocate the memory referenced by `ptr`.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure all of the following:
+ ///
+ /// * `ptr` must denote a block of memory currently allocated via
+ /// this allocator,
+ ///
+ /// * `layout` must *fit* that block of memory,
+ ///
+ /// * In addition to fitting the block of memory `layout`, the
+ /// alignment of the `layout` must match the alignment used
+ /// to allocate that block of memory.
+ unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);
+
+ // == ALLOCATOR-SPECIFIC QUANTITIES AND LIMITS ==
+ // usable_size
+
+ /// Returns bounds on the guaranteed usable size of a successful
+ /// allocation created with the specified `layout`.
+ ///
+ /// In particular, if one has a memory block allocated via a given
+ /// allocator `a` and layout `k` where `a.usable_size(k)` returns
+ /// `(l, u)`, then one can pass that block to `a.dealloc()` with a
+ /// layout in the size range [l, u].
+ ///
+ /// (All implementors of `usable_size` must ensure that
+ /// `l <= k.size() <= u`)
+ ///
+ /// Both the lower- and upper-bounds (`l` and `u` respectively)
+ /// are provided, because an allocator based on size classes could
+ /// misbehave if one attempts to deallocate a block without
+ /// providing a correct value for its size (i.e., one within the
+ /// range `[l, u]`).
+ ///
+ /// Clients who wish to make use of excess capacity are encouraged
+ /// to use the `alloc_excess` and `realloc_excess` instead, as
+ /// this method is constrained to report conservative values that
+ /// serve as valid bounds for *all possible* allocation method
+ /// calls.
+ ///
+ /// However, for clients that do not wish to track the capacity
+ /// returned by `alloc_excess` locally, this method is likely to
+ /// produce useful results.
+ #[inline]
+ fn usable_size(&self, layout: &Layout) -> (usize, usize) {
+ (layout.size(), layout.size())
+ }
+
+ // == METHODS FOR MEMORY REUSE ==
+ // realloc. alloc_excess, realloc_excess
+
+ /// Returns a pointer suitable for holding data described by
+ /// a new layout with `layout`’s alignment and a size given
+ /// by `new_size`. To
+ /// accomplish this, this may extend or shrink the allocation
+ /// referenced by `ptr` to fit the new layout.
+ ///
+ /// If this returns `Ok`, then ownership of the memory block
+ /// referenced by `ptr` has been transferred to this
+ /// allocator. The memory may or may not have been freed, and
+ /// should be considered unusable (unless of course it was
+ /// transferred back to the caller again via the return value of
+ /// this method).
+ ///
+ /// If this method returns `Err`, then ownership of the memory
+ /// block has not been transferred to this allocator, and the
+ /// contents of the memory block are unaltered.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure all of the following:
+ ///
+ /// * `ptr` must be currently allocated via this allocator,
+ ///
+ /// * `layout` must *fit* the `ptr` (see above). (The `new_size`
+ /// argument need not fit it.)
+ ///
+ /// * `new_size` must be greater than zero.
+ ///
+ /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
+ /// must not overflow (i.e. the rounded value must be less than `usize::MAX`).
+ ///
+ /// (Extension subtraits might provide more specific bounds on
+ /// behavior, e.g. guarantee a sentinel address or a null pointer
+ /// in response to a zero-size allocation request.)
+ ///
+ /// # Errors
+ ///
+ /// Returns `Err` only if the new layout
+ /// does not meet the allocator's size
+ /// and alignment constraints of the allocator, or if reallocation
+ /// otherwise fails.
+ ///
+ /// Implementations are encouraged to return `Err` on memory
+ /// exhaustion rather than panicking or aborting, but this is not
+ /// a strict requirement. (Specifically: it is *legal* to
+ /// implement this trait atop an underlying native allocation
+ /// library that aborts on memory exhaustion.)
+ ///
+ /// Clients wishing to abort computation in response to a
+ /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn realloc(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<NonNull<u8>, AllocErr> {
+ let old_size = layout.size();
+
+ if new_size >= old_size {
+ if let Ok(()) = self.grow_in_place(ptr, layout, new_size) {
+ return Ok(ptr);
+ }
+ } else if new_size < old_size {
+ if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {
+ return Ok(ptr);
+ }
+ }
+
+ // otherwise, fall back on alloc + copy + dealloc.
+ let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+ let result = self.alloc(new_layout);
+ if let Ok(new_ptr) = result {
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
+ self.dealloc(ptr, layout);
+ }
+ result
+ }
+
+ /// Behaves like `alloc`, but also ensures that the contents
+ /// are set to zero before being returned.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe for the same reasons that `alloc` is.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `layout` does not meet allocator's size or alignment
+ /// constraints, just as in `alloc`.
+ ///
+ /// Clients wishing to abort computation in response to an
+ /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
+ let size = layout.size();
+ let p = self.alloc(layout);
+ if let Ok(p) = p {
+ ptr::write_bytes(p.as_ptr(), 0, size);
+ }
+ p
+ }
+
+ /// Behaves like `alloc`, but also returns the whole size of
+ /// the returned block. For some `layout` inputs, like arrays, this
+ /// may include extra storage usable for additional data.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe for the same reasons that `alloc` is.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `layout` does not meet allocator's size or alignment
+ /// constraints, just as in `alloc`.
+ ///
+ /// Clients wishing to abort computation in response to an
+ /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
+ let usable_size = self.usable_size(&layout);
+ self.alloc(layout).map(|p| Excess(p, usable_size.1))
+ }
+
+ /// Behaves like `realloc`, but also returns the whole size of
+ /// the returned block. For some `layout` inputs, like arrays, this
+ /// may include extra storage usable for additional data.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe for the same reasons that `realloc` is.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `layout` does not meet allocator's size or alignment
+ /// constraints, just as in `realloc`.
+ ///
+ /// Clients wishing to abort computation in response to a
+ /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn realloc_excess(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<Excess, AllocErr> {
+ let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+ let usable_size = self.usable_size(&new_layout);
+ self.realloc(ptr, layout, new_size)
+ .map(|p| Excess(p, usable_size.1))
+ }
+
+ /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
+ ///
+ /// If this returns `Ok`, then the allocator has asserted that the
+ /// memory block referenced by `ptr` now fits `new_size`, and thus can
+ /// be used to carry data of a layout of that size and same alignment as
+ /// `layout`. (The allocator is allowed to
+ /// expend effort to accomplish this, such as extending the memory block to
+ /// include successor blocks, or virtual memory tricks.)
+ ///
+ /// Regardless of what this method returns, ownership of the
+ /// memory block referenced by `ptr` has not been transferred, and
+ /// the contents of the memory block are unaltered.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure all of the following:
+ ///
+ /// * `ptr` must be currently allocated via this allocator,
+ ///
+ /// * `layout` must *fit* the `ptr` (see above); note the
+ /// `new_size` argument need not fit it,
+ ///
+ /// * `new_size` must not be less than `layout.size()`,
+ ///
+ /// # Errors
+ ///
+ /// Returns `Err(CannotReallocInPlace)` when the allocator is
+ /// unable to assert that the memory block referenced by `ptr`
+ /// could fit `layout`.
+ ///
+ /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`
+ /// function; clients are expected either to be able to recover from
+ /// `grow_in_place` failures without aborting, or to fall back on
+ /// another reallocation method before resorting to an abort.
+ unsafe fn grow_in_place(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<(), CannotReallocInPlace> {
+ let _ = ptr; // this default implementation doesn't care about the actual address.
+ debug_assert!(new_size >= layout.size());
+ let (_l, u) = self.usable_size(&layout);
+ // _l <= layout.size() [guaranteed by usable_size()]
+ // layout.size() <= new_layout.size() [required by this method]
+ if new_size <= u {
+ Ok(())
+ } else {
+ Err(CannotReallocInPlace)
+ }
+ }
+
+ /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
+ ///
+ /// If this returns `Ok`, then the allocator has asserted that the
+ /// memory block referenced by `ptr` now fits `new_size`, and
+ /// thus can only be used to carry data of that smaller
+ /// layout. (The allocator is allowed to take advantage of this,
+ /// carving off portions of the block for reuse elsewhere.) The
+ /// truncated contents of the block within the smaller layout are
+ /// unaltered, and ownership of block has not been transferred.
+ ///
+ /// If this returns `Err`, then the memory block is considered to
+ /// still represent the original (larger) `layout`. None of the
+ /// block has been carved off for reuse elsewhere, ownership of
+ /// the memory block has not been transferred, and the contents of
+ /// the memory block are unaltered.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure all of the following:
+ ///
+ /// * `ptr` must be currently allocated via this allocator,
+ ///
+ /// * `layout` must *fit* the `ptr` (see above); note the
+ /// `new_size` argument need not fit it,
+ ///
+ /// * `new_size` must not be greater than `layout.size()`
+ /// (and must be greater than zero),
+ ///
+ /// # Errors
+ ///
+ /// Returns `Err(CannotReallocInPlace)` when the allocator is
+ /// unable to assert that the memory block referenced by `ptr`
+ /// could fit `layout`.
+ ///
+ /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`
+ /// function; clients are expected either to be able to recover from
+ /// `shrink_in_place` failures without aborting, or to fall back
+ /// on another reallocation method before resorting to an abort.
+ unsafe fn shrink_in_place(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<(), CannotReallocInPlace> {
+ let _ = ptr; // this default implementation doesn't care about the actual address.
+ debug_assert!(new_size <= layout.size());
+ let (l, _u) = self.usable_size(&layout);
+ // layout.size() <= _u [guaranteed by usable_size()]
+ // new_layout.size() <= layout.size() [required by this method]
+ if l <= new_size {
+ Ok(())
+ } else {
+ Err(CannotReallocInPlace)
+ }
+ }
+
+ // == COMMON USAGE PATTERNS ==
+ // alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array
+
+ /// Allocates a block suitable for holding an instance of `T`.
+ ///
+ /// Captures a common usage pattern for allocators.
+ ///
+ /// The returned block is suitable for passing to the
+ /// `alloc`/`realloc` methods of this allocator.
+ ///
+ /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
+ /// must be considered "currently allocated" and must be
+ /// acceptable input to methods such as `realloc` or `dealloc`,
+ /// *even if* `T` is a zero-sized type. In other words, if your
+ /// `Alloc` implementation overrides this method in a manner
+ /// that can return a zero-sized `ptr`, then all reallocation and
+ /// deallocation methods need to be similarly overridden to accept
+ /// such values as input.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `T` does not meet allocator's size or alignment constraints.
+ ///
+ /// For zero-sized `T`, may return either of `Ok` or `Err`, but
+ /// will *not* yield undefined behavior.
+ ///
+ /// Clients wishing to abort computation in response to an
+ /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ fn alloc_one<T>(&mut self) -> Result<NonNull<T>, AllocErr>
+ where
+ Self: Sized,
+ {
+ let k = Layout::new::<T>();
+ if k.size() > 0 {
+ unsafe { self.alloc(k).map(|p| p.cast()) }
+ } else {
+ Err(AllocErr)
+ }
+ }
+
+ /// Deallocates a block suitable for holding an instance of `T`.
+ ///
+ /// The given block must have been produced by this allocator,
+ /// and must be suitable for storing a `T` (in terms of alignment
+ /// as well as minimum and maximum size); otherwise yields
+ /// undefined behavior.
+ ///
+ /// Captures a common usage pattern for allocators.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure both:
+ ///
+ /// * `ptr` must denote a block of memory currently allocated via this allocator
+ ///
+ /// * the layout of `T` must *fit* that block of memory.
+ unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
+ where
+ Self: Sized,
+ {
+ let k = Layout::new::<T>();
+ if k.size() > 0 {
+ self.dealloc(ptr.cast(), k);
+ }
+ }
+
+ /// Allocates a block suitable for holding `n` instances of `T`.
+ ///
+ /// Captures a common usage pattern for allocators.
+ ///
+ /// The returned block is suitable for passing to the
+ /// `alloc`/`realloc` methods of this allocator.
+ ///
+ /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
+ /// must be considered "currently allocated" and must be
+ /// acceptable input to methods such as `realloc` or `dealloc`,
+ /// *even if* `T` is a zero-sized type. In other words, if your
+ /// `Alloc` implementation overrides this method in a manner
+ /// that can return a zero-sized `ptr`, then all reallocation and
+ /// deallocation methods need to be similarly overridden to accept
+ /// such values as input.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `[T; n]` does not meet allocator's size or alignment
+ /// constraints.
+ ///
+ /// For zero-sized `T` or `n == 0`, may return either of `Ok` or
+ /// `Err`, but will *not* yield undefined behavior.
+ ///
+ /// Always returns `Err` on arithmetic overflow.
+ ///
+ /// Clients wishing to abort computation in response to an
+ /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ fn alloc_array<T>(&mut self, n: usize) -> Result<NonNull<T>, AllocErr>
+ where
+ Self: Sized,
+ {
+ match Layout::array::<T>(n) {
+ Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) },
+ _ => Err(AllocErr),
+ }
+ }
+
+ /// Reallocates a block previously suitable for holding `n_old`
+ /// instances of `T`, returning a block suitable for holding
+ /// `n_new` instances of `T`.
+ ///
+ /// Captures a common usage pattern for allocators.
+ ///
+ /// The returned block is suitable for passing to the
+ /// `alloc`/`realloc` methods of this allocator.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure all of the following:
+ ///
+ /// * `ptr` must be currently allocated via this allocator,
+ ///
+ /// * the layout of `[T; n_old]` must *fit* that block of memory.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either memory is exhausted or
+ /// `[T; n_new]` does not meet allocator's size or alignment
+ /// constraints.
+ ///
+ /// For zero-sized `T` or `n_new == 0`, may return either of `Ok` or
+ /// `Err`, but will *not* yield undefined behavior.
+ ///
+ /// Always returns `Err` on arithmetic overflow.
+ ///
+ /// Clients wishing to abort computation in response to a
+ /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
+ /// rather than directly invoking `panic!` or similar.
+ ///
+ /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+ unsafe fn realloc_array<T>(
+ &mut self,
+ ptr: NonNull<T>,
+ n_old: usize,
+ n_new: usize,
+ ) -> Result<NonNull<T>, AllocErr>
+ where
+ Self: Sized,
+ {
+ match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
+ (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
+ debug_assert!(k_old.align() == k_new.align());
+ self.realloc(ptr.cast(), k_old.clone(), k_new.size())
+ .map(NonNull::cast)
+ }
+ _ => Err(AllocErr),
+ }
+ }
+
+ /// Deallocates a block suitable for holding `n` instances of `T`.
+ ///
+ /// Captures a common usage pattern for allocators.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because undefined behavior can result
+ /// if the caller does not ensure both:
+ ///
+ /// * `ptr` must denote a block of memory currently allocated via this allocator
+ ///
+ /// * the layout of `[T; n]` must *fit* that block of memory.
+ ///
+ /// # Errors
+ ///
+ /// Returning `Err` indicates that either `[T; n]` or the given
+ /// memory block does not meet allocator's size or alignment
+ /// constraints.
+ ///
+ /// Always returns `Err` on arithmetic overflow.
+ unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
+ where
+ Self: Sized,
+ {
+ match Layout::array::<T>(n) {
+ Ok(k) if k.size() > 0 => {
+ self.dealloc(ptr.cast(), k);
+ Ok(())
+ }
+ _ => Err(AllocErr),
+ }
+ }
+}
diff --git a/third_party/rust/bumpalo/src/boxed.rs b/third_party/rust/bumpalo/src/boxed.rs
new file mode 100644
index 0000000000..af0737cfbb
--- /dev/null
+++ b/third_party/rust/bumpalo/src/boxed.rs
@@ -0,0 +1,683 @@
+//! A pointer type for bump allocation.
+//!
+//! [`Box<'a, T>`] provides the simplest form of
+//! bump allocation in `bumpalo`. Boxes provide ownership for this allocation, and
+//! drop their contents when they go out of scope.
+//!
+//! # Examples
+//!
+//! Move a value from the stack to the heap by creating a [`Box`]:
+//!
+//! ```
+//! use bumpalo::{Bump, boxed::Box};
+//!
+//! let b = Bump::new();
+//!
+//! let val: u8 = 5;
+//! let boxed: Box<u8> = Box::new_in(val, &b);
+//! ```
+//!
+//! Move a value from a [`Box`] back to the stack by [dereferencing]:
+//!
+//! ```
+//! use bumpalo::{Bump, boxed::Box};
+//!
+//! let b = Bump::new();
+//!
+//! let boxed: Box<u8> = Box::new_in(5, &b);
+//! let val: u8 = *boxed;
+//! ```
+//!
+//! Running [`Drop`] implementations on bump-allocated values:
+//!
+//! ```
+//! use bumpalo::{Bump, boxed::Box};
+//! use std::sync::atomic::{AtomicUsize, Ordering};
+//!
+//! static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0);
+//!
+//! struct CountDrops;
+//!
+//! impl Drop for CountDrops {
+//! fn drop(&mut self) {
+//! NUM_DROPPED.fetch_add(1, Ordering::SeqCst);
+//! }
+//! }
+//!
+//! // Create a new bump arena.
+//! let bump = Bump::new();
+//!
+//! // Create a `CountDrops` inside the bump arena.
+//! let mut c = Box::new_in(CountDrops, &bump);
+//!
+//! // No `CountDrops` have been dropped yet.
+//! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0);
+//!
+//! // Drop our `Box<CountDrops>`.
+//! drop(c);
+//!
+//! // Its `Drop` implementation was run, and so `NUM_DROPS` has been incremented.
+//! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1);
+//! ```
+//!
+//! Creating a recursive data structure:
+//!
+//! ```
+//! use bumpalo::{Bump, boxed::Box};
+//!
+//! let b = Bump::new();
+//!
+//! #[derive(Debug)]
+//! enum List<'a, T> {
+//! Cons(T, Box<'a, List<'a, T>>),
+//! Nil,
+//! }
+//!
+//! let list: List<i32> = List::Cons(1, Box::new_in(List::Cons(2, Box::new_in(List::Nil, &b)), &b));
+//! println!("{:?}", list);
+//! ```
+//!
+//! This will print `Cons(1, Cons(2, Nil))`.
+//!
+//! Recursive structures must be boxed, because if the definition of `Cons`
+//! looked like this:
+//!
+//! ```compile_fail,E0072
+//! # enum List<T> {
+//! Cons(T, List<T>),
+//! # }
+//! ```
+//!
+//! It wouldn't work. This is because the size of a `List` depends on how many
+//! elements are in the list, and so we don't know how much memory to allocate
+//! for a `Cons`. By introducing a [`Box<'a, T>`], which has a defined size, we know how
+//! big `Cons` needs to be.
+//!
+//! # Memory layout
+//!
+//! For non-zero-sized values, a [`Box`] will use the provided [`Bump`] allocator for
+//! its allocation. It is valid to convert both ways between a [`Box`] and a
+//! pointer allocated with the [`Bump`] allocator, given that the
+//! [`Layout`] used with the allocator is correct for the type. More precisely,
+//! a `value: *mut T` that has been allocated with the [`Bump`] allocator
+//! with `Layout::for_value(&*value)` may be converted into a box using
+//! [`Box::<T>::from_raw(value)`]. Conversely, the memory backing a `value: *mut
+//! T` obtained from [`Box::<T>::into_raw`] will be deallocated by the
+//! [`Bump`] allocator with [`Layout::for_value(&*value)`].
+//!
+//! Note that roundtrip `Box::from_raw(Box::into_raw(b))` looses the lifetime bound to the
+//! [`Bump`] immutable borrow which guarantees that the allocator will not be reset
+//! and memory will not be freed.
+//!
+//! [dereferencing]: https://doc.rust-lang.org/std/ops/trait.Deref.html
+//! [`Box`]: struct.Box.html
+//! [`Box<'a, T>`]: struct.Box.html
+//! [`Box::<T>::from_raw(value)`]: struct.Box.html#method.from_raw
+//! [`Box::<T>::into_raw`]: struct.Box.html#method.into_raw
+//! [`Bump`]: ../struct.Bump.html
+//! [`Drop`]: https://doc.rust-lang.org/std/ops/trait.Drop.html
+//! [`Layout`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html
+//! [`Layout::for_value(&*value)`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.for_value
+
+use {
+ crate::Bump,
+ {
+ core::{
+ any::Any,
+ borrow,
+ cmp::Ordering,
+ convert::TryFrom,
+ future::Future,
+ hash::{Hash, Hasher},
+ iter::FusedIterator,
+ mem::ManuallyDrop,
+ ops::{Deref, DerefMut},
+ pin::Pin,
+ task::{Context, Poll},
+ },
+ core_alloc::fmt,
+ },
+};
+
+/// An owned pointer to a bump-allocated `T` value, that runs `Drop`
+/// implementations.
+///
+/// See the [module-level documentation][crate::boxed] for more details.
+#[repr(transparent)]
+pub struct Box<'a, T: ?Sized>(&'a mut T);
+
+impl<'a, T> Box<'a, T> {
+ /// Allocates memory on the heap and then places `x` into it.
+ ///
+ /// This doesn't actually allocate if `T` is zero-sized.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let five = Box::new_in(5, &b);
+ /// ```
+ #[inline(always)]
+ pub fn new_in(x: T, a: &'a Bump) -> Box<'a, T> {
+ Box(a.alloc(x))
+ }
+
+ /// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
+ /// `x` will be pinned in memory and unable to be moved.
+ #[inline(always)]
+ pub fn pin_in(x: T, a: &'a Bump) -> Pin<Box<'a, T>> {
+ Box(a.alloc(x)).into()
+ }
+
+ /// Consumes the `Box`, returning the wrapped value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let hello = Box::new_in("hello".to_owned(), &b);
+ /// assert_eq!(Box::into_inner(hello), "hello");
+ /// ```
+ pub fn into_inner(b: Box<'a, T>) -> T {
+ // `Box::into_raw` returns a pointer that is properly aligned and non-null.
+ // The underlying `Bump` only frees the memory, but won't call the destructor.
+ unsafe { core::ptr::read(Box::into_raw(b)) }
+ }
+}
+
+impl<'a, T: ?Sized> Box<'a, T> {
+ /// Constructs a box from a raw pointer.
+ ///
+ /// After calling this function, the raw pointer is owned by the
+ /// resulting `Box`. Specifically, the `Box` destructor will call
+ /// the destructor of `T` and free the allocated memory. For this
+ /// to be safe, the memory must have been allocated in accordance
+ /// with the memory layout used by `Box` .
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because improper use may lead to
+ /// memory problems. For example, a double-free may occur if the
+ /// function is called twice on the same raw pointer.
+ ///
+ /// # Examples
+ ///
+ /// Recreate a `Box` which was previously converted to a raw pointer
+ /// using [`Box::into_raw`]:
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let x = Box::new_in(5, &b);
+ /// let ptr = Box::into_raw(x);
+ /// let x = unsafe { Box::from_raw(ptr) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset.
+ /// ```
+ /// Manually create a `Box` from scratch by using the bump allocator:
+ /// ```
+ /// use std::alloc::{alloc, Layout};
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// unsafe {
+ /// let ptr = b.alloc_layout(Layout::new::<i32>()).as_ptr() as *mut i32;
+ /// *ptr = 5;
+ /// let x = Box::from_raw(ptr); // Note that `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset.
+ /// }
+ /// ```
+ #[inline]
+ pub unsafe fn from_raw(raw: *mut T) -> Self {
+ Box(&mut *raw)
+ }
+
+ /// Consumes the `Box`, returning a wrapped raw pointer.
+ ///
+ /// The pointer will be properly aligned and non-null.
+ ///
+ /// After calling this function, the caller is responsible for the
+ /// value previously managed by the `Box`. In particular, the
+ /// caller should properly destroy `T`. The easiest way to
+ /// do this is to convert the raw pointer back into a `Box` with the
+ /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
+ /// the cleanup.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ ///
+ /// # Examples
+ ///
+ /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
+ /// for automatic cleanup:
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let x = Box::new_in(String::from("Hello"), &b);
+ /// let ptr = Box::into_raw(x);
+ /// let x = unsafe { Box::from_raw(ptr) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset.
+ /// ```
+ /// Manual cleanup by explicitly running the destructor:
+ /// ```
+ /// use std::ptr;
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut x = Box::new_in(String::from("Hello"), &b);
+ /// let p = Box::into_raw(x);
+ /// unsafe {
+ /// ptr::drop_in_place(p);
+ /// }
+ /// ```
+ #[inline]
+ pub fn into_raw(b: Box<'a, T>) -> *mut T {
+ let mut b = ManuallyDrop::new(b);
+ b.deref_mut().0 as *mut T
+ }
+
+ /// Consumes and leaks the `Box`, returning a mutable reference,
+ /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime
+ /// `'a`. If the type has only static references, or none at all, then this
+ /// may be chosen to be `'static`.
+ ///
+ /// This function is mainly useful for data that lives for the remainder of
+ /// the program's life. Dropping the returned reference will cause a memory
+ /// leak. If this is not acceptable, the reference should first be wrapped
+ /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
+ /// then be dropped which will properly destroy `T` and release the
+ /// allocated memory.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Box::leak(b)` instead of `b.leak()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ ///
+ /// # Examples
+ ///
+ /// Simple usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let x = Box::new_in(41, &b);
+ /// let reference: &mut usize = Box::leak(x);
+ /// *reference += 1;
+ /// assert_eq!(*reference, 42);
+ /// ```
+ ///
+ ///```
+ /// # #[cfg(feature = "collections")]
+ /// # {
+ /// use bumpalo::{Bump, boxed::Box, vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let x = vec![in &b; 1, 2, 3].into_boxed_slice();
+ /// let reference = Box::leak(x);
+ /// reference[0] = 4;
+ /// assert_eq!(*reference, [4, 2, 3]);
+ /// # }
+ ///```
+ #[inline]
+ pub fn leak(b: Box<'a, T>) -> &'a mut T {
+ unsafe { &mut *Box::into_raw(b) }
+ }
+}
+
+impl<'a, T: ?Sized> Drop for Box<'a, T> {
+ fn drop(&mut self) {
+ unsafe {
+ // `Box` owns value of `T`, but not memory behind it.
+ core::ptr::drop_in_place(self.0);
+ }
+ }
+}
+
+impl<'a, T> Default for Box<'a, [T]> {
+ fn default() -> Box<'a, [T]> {
+ // It should be OK to `drop_in_place` empty slice of anything.
+ Box(&mut [])
+ }
+}
+
+impl<'a> Default for Box<'a, str> {
+ fn default() -> Box<'a, str> {
+ // Empty slice is valid string.
+ // It should be OK to `drop_in_place` empty str.
+ unsafe { Box::from_raw(Box::into_raw(Box::<[u8]>::default()) as *mut str) }
+ }
+}
+
+impl<'a, 'b, T: ?Sized + PartialEq> PartialEq<Box<'b, T>> for Box<'a, T> {
+ #[inline]
+ fn eq(&self, other: &Box<'b, T>) -> bool {
+ PartialEq::eq(&**self, &**other)
+ }
+ #[inline]
+ fn ne(&self, other: &Box<'b, T>) -> bool {
+ PartialEq::ne(&**self, &**other)
+ }
+}
+
+impl<'a, 'b, T: ?Sized + PartialOrd> PartialOrd<Box<'b, T>> for Box<'a, T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Box<'b, T>) -> Option<Ordering> {
+ PartialOrd::partial_cmp(&**self, &**other)
+ }
+ #[inline]
+ fn lt(&self, other: &Box<'b, T>) -> bool {
+ PartialOrd::lt(&**self, &**other)
+ }
+ #[inline]
+ fn le(&self, other: &Box<'b, T>) -> bool {
+ PartialOrd::le(&**self, &**other)
+ }
+ #[inline]
+ fn ge(&self, other: &Box<'b, T>) -> bool {
+ PartialOrd::ge(&**self, &**other)
+ }
+ #[inline]
+ fn gt(&self, other: &Box<'b, T>) -> bool {
+ PartialOrd::gt(&**self, &**other)
+ }
+}
+
+impl<'a, T: ?Sized + Ord> Ord for Box<'a, T> {
+ #[inline]
+ fn cmp(&self, other: &Box<'a, T>) -> Ordering {
+ Ord::cmp(&**self, &**other)
+ }
+}
+
+impl<'a, T: ?Sized + Eq> Eq for Box<'a, T> {}
+
+impl<'a, T: ?Sized + Hash> Hash for Box<'a, T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state);
+ }
+}
+
+impl<'a, T: ?Sized + Hasher> Hasher for Box<'a, T> {
+ fn finish(&self) -> u64 {
+ (**self).finish()
+ }
+ fn write(&mut self, bytes: &[u8]) {
+ (**self).write(bytes)
+ }
+ fn write_u8(&mut self, i: u8) {
+ (**self).write_u8(i)
+ }
+ fn write_u16(&mut self, i: u16) {
+ (**self).write_u16(i)
+ }
+ fn write_u32(&mut self, i: u32) {
+ (**self).write_u32(i)
+ }
+ fn write_u64(&mut self, i: u64) {
+ (**self).write_u64(i)
+ }
+ fn write_u128(&mut self, i: u128) {
+ (**self).write_u128(i)
+ }
+ fn write_usize(&mut self, i: usize) {
+ (**self).write_usize(i)
+ }
+ fn write_i8(&mut self, i: i8) {
+ (**self).write_i8(i)
+ }
+ fn write_i16(&mut self, i: i16) {
+ (**self).write_i16(i)
+ }
+ fn write_i32(&mut self, i: i32) {
+ (**self).write_i32(i)
+ }
+ fn write_i64(&mut self, i: i64) {
+ (**self).write_i64(i)
+ }
+ fn write_i128(&mut self, i: i128) {
+ (**self).write_i128(i)
+ }
+ fn write_isize(&mut self, i: isize) {
+ (**self).write_isize(i)
+ }
+}
+
+impl<'a, T: ?Sized> From<Box<'a, T>> for Pin<Box<'a, T>> {
+ /// Converts a `Box<T>` into a `Pin<Box<T>>`.
+ ///
+ /// This conversion does not allocate on the heap and happens in place.
+ fn from(boxed: Box<'a, T>) -> Self {
+ // It's not possible to move or replace the insides of a `Pin<Box<T>>`
+ // when `T: !Unpin`, so it's safe to pin it directly without any
+ // additional requirements.
+ unsafe { Pin::new_unchecked(boxed) }
+ }
+}
+
+impl<'a> Box<'a, dyn Any> {
+ #[inline]
+ /// Attempt to downcast the box to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ ///
+ /// fn print_if_string(value: Box<dyn Any>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Box::new(my_string));
+ /// print_if_string(Box::new(0i8));
+ /// ```
+ pub fn downcast<T: Any>(self) -> Result<Box<'a, T>, Box<'a, dyn Any>> {
+ if self.is::<T>() {
+ unsafe {
+ let raw: *mut dyn Any = Box::into_raw(self);
+ Ok(Box::from_raw(raw as *mut T))
+ }
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<'a> Box<'a, dyn Any + Send> {
+ #[inline]
+ /// Attempt to downcast the box to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ ///
+ /// fn print_if_string(value: Box<dyn Any + Send>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Box::new(my_string));
+ /// print_if_string(Box::new(0i8));
+ /// ```
+ pub fn downcast<T: Any>(self) -> Result<Box<'a, T>, Box<'a, dyn Any + Send>> {
+ if self.is::<T>() {
+ unsafe {
+ let raw: *mut (dyn Any + Send) = Box::into_raw(self);
+ Ok(Box::from_raw(raw as *mut T))
+ }
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<'a, T: fmt::Display + ?Sized> fmt::Display for Box<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for Box<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+impl<'a, T: ?Sized> fmt::Pointer for Box<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // It's not possible to extract the inner Uniq directly from the Box,
+ // instead we cast it to a *const which aliases the Unique
+ let ptr: *const T = &**self;
+ fmt::Pointer::fmt(&ptr, f)
+ }
+}
+
+impl<'a, T: ?Sized> Deref for Box<'a, T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &*self.0
+ }
+}
+
+impl<'a, T: ?Sized> DerefMut for Box<'a, T> {
+ fn deref_mut(&mut self) -> &mut T {
+ self.0
+ }
+}
+
+impl<'a, I: Iterator + ?Sized> Iterator for Box<'a, I> {
+ type Item = I::Item;
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (**self).size_hint()
+ }
+ fn nth(&mut self, n: usize) -> Option<I::Item> {
+ (**self).nth(n)
+ }
+ fn last(self) -> Option<I::Item> {
+ #[inline]
+ fn some<T>(_: Option<T>, x: T) -> Option<T> {
+ Some(x)
+ }
+ self.fold(None, some)
+ }
+}
+
+impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<'a, I> {
+ fn next_back(&mut self) -> Option<I::Item> {
+ (**self).next_back()
+ }
+ fn nth_back(&mut self, n: usize) -> Option<I::Item> {
+ (**self).nth_back(n)
+ }
+}
+impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<'a, I> {
+ fn len(&self) -> usize {
+ (**self).len()
+ }
+}
+
+impl<'a, I: FusedIterator + ?Sized> FusedIterator for Box<'a, I> {}
+
+#[cfg(feature = "collections")]
+impl<'a, A> Box<'a, [A]> {
+ /// Creates a value from an iterator.
+ /// This method is an adapted version of [`FromIterator::from_iter`][from_iter].
+ /// It cannot be made as that trait implementation given different signature.
+ ///
+ /// [from_iter]: https://doc.rust-lang.org/std/iter/trait.FromIterator.html#tymethod.from_iter
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ /// ```
+ /// use bumpalo::{Bump, boxed::Box, vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let five_fives = std::iter::repeat(5).take(5);
+ /// let slice = Box::from_iter_in(five_fives, &b);
+ /// assert_eq!(vec![in &b; 5, 5, 5, 5, 5], &*slice);
+ /// ```
+ pub fn from_iter_in<T: IntoIterator<Item = A>>(iter: T, a: &'a Bump) -> Self {
+ use crate::collections::Vec;
+ let mut vec = Vec::new_in(a);
+ vec.extend(iter);
+ vec.into_boxed_slice()
+ }
+}
+
+impl<'a, T: ?Sized> borrow::Borrow<T> for Box<'a, T> {
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+impl<'a, T: ?Sized> borrow::BorrowMut<T> for Box<'a, T> {
+ fn borrow_mut(&mut self) -> &mut T {
+ &mut **self
+ }
+}
+
+impl<'a, T: ?Sized> AsRef<T> for Box<'a, T> {
+ fn as_ref(&self) -> &T {
+ &**self
+ }
+}
+
+impl<'a, T: ?Sized> AsMut<T> for Box<'a, T> {
+ fn as_mut(&mut self) -> &mut T {
+ &mut **self
+ }
+}
+
+impl<'a, T: ?Sized> Unpin for Box<'a, T> {}
+
+impl<'a, F: ?Sized + Future + Unpin> Future for Box<'a, F> {
+ type Output = F::Output;
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
+ F::poll(Pin::new(&mut *self), cx)
+ }
+}
+
+/// This impl replaces unsize coercion.
+impl<'a, T, const N: usize> From<Box<'a, [T; N]>> for Box<'a, [T]> {
+ fn from(arr: Box<'a, [T; N]>) -> Box<'a, [T]> {
+ let mut arr = ManuallyDrop::new(arr);
+ let ptr = core::ptr::slice_from_raw_parts_mut(arr.as_mut_ptr(), N);
+ unsafe { Box::from_raw(ptr) }
+ }
+}
+
+/// This impl replaces unsize coercion.
+impl<'a, T, const N: usize> TryFrom<Box<'a, [T]>> for Box<'a, [T; N]> {
+ type Error = Box<'a, [T]>;
+ fn try_from(slice: Box<'a, [T]>) -> Result<Box<'a, [T; N]>, Box<'a, [T]>> {
+ if slice.len() == N {
+ let mut slice = ManuallyDrop::new(slice);
+ let ptr = slice.as_mut_ptr() as *mut [T; N];
+ Ok(unsafe { Box::from_raw(ptr) })
+ } else {
+ Err(slice)
+ }
+ }
+}
diff --git a/third_party/rust/bumpalo/src/collections/collect_in.rs b/third_party/rust/bumpalo/src/collections/collect_in.rs
new file mode 100644
index 0000000000..3e1adeaeaa
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/collect_in.rs
@@ -0,0 +1,152 @@
+#[cfg(feature = "boxed")]
+use crate::boxed::Box;
+use crate::collections::{String, Vec};
+use crate::Bump;
+
+/// A trait for types that support being constructed from an iterator, parameterized by an allocator.
+pub trait FromIteratorIn<A> {
+ /// The allocator type
+ type Alloc;
+
+ /// Similar to [`FromIterator::from_iter`][from_iter], but with a given allocator.
+ ///
+ /// [from_iter]: https://doc.rust-lang.org/std/iter/trait.FromIterator.html#tymethod.from_iter
+ ///
+ /// ```
+ /// # use bumpalo::collections::{FromIteratorIn, Vec};
+ /// # use bumpalo::Bump;
+ /// #
+ /// let five_fives = std::iter::repeat(5).take(5);
+ /// let bump = Bump::new();
+ ///
+ /// let v = Vec::from_iter_in(five_fives, &bump);
+ ///
+ /// assert_eq!(v, [5, 5, 5, 5, 5]);
+ /// ```
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = A>;
+}
+
+#[cfg(feature = "boxed")]
+impl<'bump, T> FromIteratorIn<T> for Box<'bump, [T]> {
+ type Alloc = &'bump Bump;
+
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = T>,
+ {
+ Box::from_iter_in(iter, alloc)
+ }
+}
+
+impl<'bump, T> FromIteratorIn<T> for Vec<'bump, T> {
+ type Alloc = &'bump Bump;
+
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = T>,
+ {
+ Vec::from_iter_in(iter, alloc)
+ }
+}
+
+impl<T, V: FromIteratorIn<T>> FromIteratorIn<Option<T>> for Option<V> {
+ type Alloc = V::Alloc;
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = Option<T>>,
+ {
+ iter.into_iter()
+ .map(|x| x.ok_or(()))
+ .collect_in::<Result<_, _>>(alloc)
+ .ok()
+ }
+}
+
+impl<T, E, V: FromIteratorIn<T>> FromIteratorIn<Result<T, E>> for Result<V, E> {
+ type Alloc = V::Alloc;
+ /// Takes each element in the `Iterator`: if it is an `Err`, no further
+ /// elements are taken, and the `Err` is returned. Should no `Err` occur, a
+ /// container with the values of each `Result` is returned.
+ ///
+ /// Here is an example which increments every integer in a vector,
+ /// checking for overflow:
+ ///
+ /// ```
+ /// # use bumpalo::collections::{FromIteratorIn, CollectIn, Vec, String};
+ /// # use bumpalo::Bump;
+ /// #
+ /// let bump = Bump::new();
+ ///
+ /// let v = vec![1, 2, u32::MAX];
+ /// let res: Result<Vec<u32>, &'static str> = v.iter().take(2).map(|x: &u32|
+ /// x.checked_add(1).ok_or("Overflow!")
+ /// ).collect_in(&bump);
+ /// assert_eq!(res, Ok(bumpalo::vec![in &bump; 2, 3]));
+ ///
+ /// let res: Result<Vec<u32>, &'static str> = v.iter().map(|x: &u32|
+ /// x.checked_add(1).ok_or("Overflow!")
+ /// ).collect_in(&bump);
+ /// assert_eq!(res, Err("Overflow!"));
+ /// ```
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = Result<T, E>>,
+ {
+ let mut iter = iter.into_iter();
+ let mut error = None;
+ let container = core::iter::from_fn(|| match iter.next() {
+ Some(Ok(x)) => Some(x),
+ Some(Err(e)) => {
+ error = Some(e);
+ None
+ }
+ None => None,
+ })
+ .collect_in(alloc);
+
+ match error {
+ Some(e) => Err(e),
+ None => Ok(container),
+ }
+ }
+}
+
+impl<'bump> FromIteratorIn<char> for String<'bump> {
+ type Alloc = &'bump Bump;
+
+ fn from_iter_in<I>(iter: I, alloc: Self::Alloc) -> Self
+ where
+ I: IntoIterator<Item = char>,
+ {
+ String::from_iter_in(iter, alloc)
+ }
+}
+
+/// Extension trait for iterators, in order to allow allocator-parameterized collections to be constructed more easily.
+pub trait CollectIn: Iterator + Sized {
+ /// Collect all items from an iterator, into a collection parameterized by an allocator.
+ /// Similar to [`Iterator::collect`][collect].
+ ///
+ /// [collect]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.collect
+ ///
+ /// ```
+ /// # use bumpalo::collections::{FromIteratorIn, CollectIn, Vec, String};
+ /// # use bumpalo::Bump;
+ /// #
+ /// let bump = Bump::new();
+ ///
+ /// let str = "hello, world!".to_owned();
+ /// let bump_str: String = str.chars().collect_in(&bump);
+ /// assert_eq!(&bump_str, &str);
+ ///
+ /// let nums: Vec<i32> = (0..=3).collect_in::<Vec<_>>(&bump);
+ /// assert_eq!(&nums, &[0,1,2,3]);
+ /// ```
+ fn collect_in<C: FromIteratorIn<Self::Item>>(self, alloc: C::Alloc) -> C {
+ C::from_iter_in(self, alloc)
+ }
+}
+
+impl<I: Iterator> CollectIn for I {}
diff --git a/third_party/rust/bumpalo/src/collections/mod.rs b/third_party/rust/bumpalo/src/collections/mod.rs
new file mode 100644
index 0000000000..218636c320
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/mod.rs
@@ -0,0 +1,93 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Collection types that allocate inside a [`Bump`] arena.
+//!
+//! [`Bump`]: ../struct.Bump.html
+
+#![allow(deprecated)]
+
+mod raw_vec;
+
+pub mod vec;
+pub use self::vec::Vec;
+
+mod str;
+pub mod string;
+pub use self::string::String;
+
+mod collect_in;
+pub use collect_in::{CollectIn, FromIteratorIn};
+
+// pub mod binary_heap;
+// mod btree;
+// pub mod linked_list;
+// pub mod vec_deque;
+
+// pub mod btree_map {
+// //! A map based on a B-Tree.
+// pub use super::btree::map::*;
+// }
+
+// pub mod btree_set {
+// //! A set based on a B-Tree.
+// pub use super::btree::set::*;
+// }
+
+// #[doc(no_inline)]
+// pub use self::binary_heap::BinaryHeap;
+
+// #[doc(no_inline)]
+// pub use self::btree_map::BTreeMap;
+
+// #[doc(no_inline)]
+// pub use self::btree_set::BTreeSet;
+
+// #[doc(no_inline)]
+// pub use self::linked_list::LinkedList;
+
+// #[doc(no_inline)]
+// pub use self::vec_deque::VecDeque;
+
+use crate::alloc::{AllocErr, LayoutErr};
+
+/// Augments `AllocErr` with a `CapacityOverflow` variant.
+#[derive(Clone, PartialEq, Eq, Debug)]
+// #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+pub enum CollectionAllocErr {
+ /// Error due to the computed capacity exceeding the collection's maximum
+ /// (usually `isize::MAX` bytes).
+ CapacityOverflow,
+ /// Error due to the allocator (see the documentation for the [`AllocErr`] type).
+ AllocErr,
+}
+
+// #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+impl From<AllocErr> for CollectionAllocErr {
+ #[inline]
+ fn from(AllocErr: AllocErr) -> Self {
+ CollectionAllocErr::AllocErr
+ }
+}
+
+// #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+impl From<LayoutErr> for CollectionAllocErr {
+ #[inline]
+ fn from(_: LayoutErr) -> Self {
+ CollectionAllocErr::CapacityOverflow
+ }
+}
+
+// /// An intermediate trait for specialization of `Extend`.
+// #[doc(hidden)]
+// trait SpecExtend<I: IntoIterator> {
+// /// Extends `self` with the contents of the given iterator.
+// fn spec_extend(&mut self, iter: I);
+// }
diff --git a/third_party/rust/bumpalo/src/collections/raw_vec.rs b/third_party/rust/bumpalo/src/collections/raw_vec.rs
new file mode 100644
index 0000000000..ac3bd0758c
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/raw_vec.rs
@@ -0,0 +1,730 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(unstable_name_collisions)]
+#![allow(dead_code)]
+
+use crate::Bump;
+
+use core::cmp;
+use core::mem;
+use core::ptr::{self, NonNull};
+
+use crate::alloc::{handle_alloc_error, Alloc, Layout, UnstableLayoutMethods};
+use crate::collections::CollectionAllocErr;
+use crate::collections::CollectionAllocErr::*;
+// use boxed::Box;
+
+/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
+/// a buffer of memory on the heap without having to worry about all the corner cases
+/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
+/// In particular:
+///
+/// * Produces Unique::empty() on zero-sized types
+/// * Produces Unique::empty() on zero-length allocations
+/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
+/// * Guards against 32-bit systems allocating more than isize::MAX bytes
+/// * Guards against overflowing your length
+/// * Aborts on OOM
+/// * Avoids freeing Unique::empty()
+/// * Contains a ptr::Unique and thus endows the user with all related benefits
+///
+/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
+/// free its memory, but it *won't* try to Drop its contents. It is up to the user of RawVec
+/// to handle the actual things *stored* inside of a RawVec.
+///
+/// Note that a RawVec always forces its capacity to be usize::MAX for zero-sized types.
+/// This enables you to use capacity growing logic catch the overflows in your length
+/// that might occur with zero-sized types.
+///
+/// However this means that you need to be careful when round-tripping this type
+/// with a `Box<[T]>`: `cap()` won't yield the len. However `with_capacity`,
+/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
+/// field. This allows zero-sized types to not be special-cased by consumers of
+/// this type.
+#[allow(missing_debug_implementations)]
+pub struct RawVec<'a, T> {
+ ptr: NonNull<T>,
+ cap: usize,
+ a: &'a Bump,
+}
+
+impl<'a, T> RawVec<'a, T> {
+ /// Like `new` but parameterized over the choice of allocator for
+ /// the returned RawVec.
+ pub fn new_in(a: &'a Bump) -> Self {
+ // `cap: 0` means "unallocated". zero-sized types are ignored.
+ RawVec {
+ ptr: NonNull::dangling(),
+ cap: 0,
+ a,
+ }
+ }
+
+ /// Like `with_capacity` but parameterized over the choice of
+ /// allocator for the returned RawVec.
+ #[inline]
+ pub fn with_capacity_in(cap: usize, a: &'a Bump) -> Self {
+ RawVec::allocate_in(cap, false, a)
+ }
+
+ /// Like `with_capacity_zeroed` but parameterized over the choice
+ /// of allocator for the returned RawVec.
+ #[inline]
+ pub fn with_capacity_zeroed_in(cap: usize, a: &'a Bump) -> Self {
+ RawVec::allocate_in(cap, true, a)
+ }
+
+ fn allocate_in(cap: usize, zeroed: bool, mut a: &'a Bump) -> Self {
+ unsafe {
+ let elem_size = mem::size_of::<T>();
+
+ let alloc_size = cap
+ .checked_mul(elem_size)
+ .unwrap_or_else(|| capacity_overflow());
+ alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
+
+ // handles ZSTs and `cap = 0` alike
+ let ptr = if alloc_size == 0 {
+ NonNull::<T>::dangling()
+ } else {
+ let align = mem::align_of::<T>();
+ let layout = Layout::from_size_align(alloc_size, align).unwrap();
+ let result = if zeroed {
+ a.alloc_zeroed(layout)
+ } else {
+ Alloc::alloc(&mut a, layout)
+ };
+ match result {
+ Ok(ptr) => ptr.cast(),
+ Err(_) => handle_alloc_error(layout),
+ }
+ };
+
+ RawVec { ptr, cap, a }
+ }
+ }
+}
+
+impl<'a, T> RawVec<'a, T> {
+ /// Reconstitutes a RawVec from a pointer, capacity, and allocator.
+ ///
+ /// # Undefined Behavior
+ ///
+ /// The ptr must be allocated (via the given allocator `a`), and with the given capacity. The
+ /// capacity cannot exceed `isize::MAX` (only a concern on 32-bit systems).
+ /// If the ptr and capacity come from a RawVec created via `a`, then this is guaranteed.
+ pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: &'a Bump) -> Self {
+ RawVec {
+ ptr: NonNull::new_unchecked(ptr),
+ cap,
+ a,
+ }
+ }
+}
+
+impl<'a, T> RawVec<'a, T> {
+ /// Gets a raw pointer to the start of the allocation. Note that this is
+ /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
+ /// be careful.
+ pub fn ptr(&self) -> *mut T {
+ self.ptr.as_ptr()
+ }
+
+ /// Gets the capacity of the allocation.
+ ///
+ /// This will always be `usize::MAX` if `T` is zero-sized.
+ #[inline(always)]
+ pub fn cap(&self) -> usize {
+ if mem::size_of::<T>() == 0 {
+ !0
+ } else {
+ self.cap
+ }
+ }
+
+ /// Returns a shared reference to the allocator backing this RawVec.
+ pub fn bump(&self) -> &'a Bump {
+ self.a
+ }
+
+ fn current_layout(&self) -> Option<Layout> {
+ if self.cap == 0 {
+ None
+ } else {
+ // We have an allocated chunk of memory, so we can bypass runtime
+ // checks to get our current layout.
+ unsafe {
+ let align = mem::align_of::<T>();
+ let size = mem::size_of::<T>() * self.cap;
+ Some(Layout::from_size_align_unchecked(size, align))
+ }
+ }
+ }
+
+ /// Doubles the size of the type's backing allocation. This is common enough
+ /// to want to do that it's easiest to just have a dedicated method. Slightly
+ /// more efficient logic can be provided for this than the general case.
+ ///
+ /// This function is ideal for when pushing elements one-at-a-time because
+ /// you don't need to incur the costs of the more general computations
+ /// reserve needs to do to guard against overflow. You do however need to
+ /// manually check if your `len == cap`.
+ ///
+ /// # Panics
+ ///
+ /// * Panics if T is zero-sized on the assumption that you managed to exhaust
+ /// all `usize::MAX` slots in your imaginary buffer.
+ /// * Panics on 32-bit platforms if the requested capacity exceeds
+ /// `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM
+ ///
+ /// # Examples
+ ///
+ /// ```ignore
+ /// # #![feature(alloc, raw_vec_internals)]
+ /// # extern crate alloc;
+ /// # use std::ptr;
+ /// # use alloc::raw_vec::RawVec;
+ /// struct MyVec<T> {
+ /// buf: RawVec<T>,
+ /// len: usize,
+ /// }
+ ///
+ /// impl<T> MyVec<T> {
+ /// pub fn push(&mut self, elem: T) {
+ /// if self.len == self.buf.cap() { self.buf.double(); }
+ /// // double would have aborted or panicked if the len exceeded
+ /// // `isize::MAX` so this is safe to do unchecked now.
+ /// unsafe {
+ /// ptr::write(self.buf.ptr().add(self.len), elem);
+ /// }
+ /// self.len += 1;
+ /// }
+ /// }
+ /// # fn main() {
+ /// # let mut vec = MyVec { buf: RawVec::new(), len: 0 };
+ /// # vec.push(1);
+ /// # }
+ /// ```
+ #[inline(never)]
+ #[cold]
+ pub fn double(&mut self) {
+ unsafe {
+ let elem_size = mem::size_of::<T>();
+
+ // since we set the capacity to usize::MAX when elem_size is
+ // 0, getting to here necessarily means the RawVec is overfull.
+ assert!(elem_size != 0, "capacity overflow");
+
+ let (new_cap, uniq) = match self.current_layout() {
+ Some(cur) => {
+ // Since we guarantee that we never allocate more than
+ // isize::MAX bytes, `elem_size * self.cap <= isize::MAX` as
+ // a precondition, so this can't overflow. Additionally the
+ // alignment will never be too large as to "not be
+ // satisfiable", so `Layout::from_size_align` will always
+ // return `Some`.
+ //
+ // tl;dr; we bypass runtime checks due to dynamic assertions
+ // in this module, allowing us to use
+ // `from_size_align_unchecked`.
+ let new_cap = 2 * self.cap;
+ let new_size = new_cap * elem_size;
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
+ let ptr_res = self.a.realloc(self.ptr.cast(), cur, new_size);
+ match ptr_res {
+ Ok(ptr) => (new_cap, ptr.cast()),
+ Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
+ new_size,
+ cur.align(),
+ )),
+ }
+ }
+ None => {
+ // skip to 4 because tiny Vec's are dumb; but not if that
+ // would cause overflow
+ let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
+ match self.a.alloc_array::<T>(new_cap) {
+ Ok(ptr) => (new_cap, ptr),
+ Err(_) => handle_alloc_error(Layout::array::<T>(new_cap).unwrap()),
+ }
+ }
+ };
+ self.ptr = uniq;
+ self.cap = new_cap;
+ }
+ }
+
+ /// Attempts to double the size of the type's backing allocation in place. This is common
+ /// enough to want to do that it's easiest to just have a dedicated method. Slightly
+ /// more efficient logic can be provided for this than the general case.
+ ///
+ /// Returns true if the reallocation attempt has succeeded, or false otherwise.
+ ///
+ /// # Panics
+ ///
+ /// * Panics if T is zero-sized on the assumption that you managed to exhaust
+ /// all `usize::MAX` slots in your imaginary buffer.
+ /// * Panics on 32-bit platforms if the requested capacity exceeds
+ /// `isize::MAX` bytes.
+ #[inline(never)]
+ #[cold]
+ pub fn double_in_place(&mut self) -> bool {
+ unsafe {
+ let elem_size = mem::size_of::<T>();
+ let old_layout = match self.current_layout() {
+ Some(layout) => layout,
+ None => return false, // nothing to double
+ };
+
+ // since we set the capacity to usize::MAX when elem_size is
+ // 0, getting to here necessarily means the RawVec is overfull.
+ assert!(elem_size != 0, "capacity overflow");
+
+ // Since we guarantee that we never allocate more than isize::MAX
+ // bytes, `elem_size * self.cap <= isize::MAX` as a precondition, so
+ // this can't overflow.
+ //
+ // Similarly like with `double` above we can go straight to
+ // `Layout::from_size_align_unchecked` as we know this won't
+ // overflow and the alignment is sufficiently small.
+ let new_cap = 2 * self.cap;
+ let new_size = new_cap * elem_size;
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
+ match self.a.grow_in_place(self.ptr.cast(), old_layout, new_size) {
+ Ok(_) => {
+ // We can't directly divide `size`.
+ self.cap = new_cap;
+ true
+ }
+ Err(_) => false,
+ }
+ }
+ }
+
+ /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
+ pub fn try_reserve_exact(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ ) -> Result<(), CollectionAllocErr> {
+ self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
+ }
+
+ /// Ensures that the buffer contains at least enough space to hold
+ /// `used_cap + needed_extra_cap` elements. If it doesn't already,
+ /// will reallocate the minimum possible amount of memory necessary.
+ /// Generally this will be exactly the amount of memory necessary,
+ /// but in principle the allocator is free to give back more than
+ /// we asked for.
+ ///
+ /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate
+ /// the requested space. This is not really unsafe, but the unsafe
+ /// code *you* write that relies on the behavior of this function may break.
+ ///
+ /// # Panics
+ ///
+ /// * Panics if the requested capacity exceeds `usize::MAX` bytes.
+ /// * Panics on 32-bit platforms if the requested capacity exceeds
+ /// `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM
+ pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
+ match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocErr) => unreachable!(),
+ Ok(()) => { /* yay */ }
+ }
+ }
+
+ /// Calculates the buffer's new size given that it'll hold `used_cap +
+ /// needed_extra_cap` elements. This logic is used in amortized reserve methods.
+ /// Returns `(new_capacity, new_alloc_size)`.
+ fn amortized_new_size(
+ &self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ ) -> Result<usize, CollectionAllocErr> {
+ // Nothing we can really do about these checks :(
+ let required_cap = used_cap
+ .checked_add(needed_extra_cap)
+ .ok_or(CapacityOverflow)?;
+ // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`.
+ let double_cap = self.cap * 2;
+ // `double_cap` guarantees exponential growth.
+ Ok(cmp::max(double_cap, required_cap))
+ }
+
+ /// The same as `reserve`, but returns on errors instead of panicking or aborting.
+ pub fn try_reserve(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ ) -> Result<(), CollectionAllocErr> {
+ self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
+ }
+
+ /// Ensures that the buffer contains at least enough space to hold
+ /// `used_cap + needed_extra_cap` elements. If it doesn't already have
+ /// enough capacity, will reallocate enough space plus comfortable slack
+ /// space to get amortized `O(1)` behavior. Will limit this behavior
+ /// if it would needlessly cause itself to panic.
+ ///
+ /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate
+ /// the requested space. This is not really unsafe, but the unsafe
+ /// code *you* write that relies on the behavior of this function may break.
+ ///
+ /// This is ideal for implementing a bulk-push operation like `extend`.
+ ///
+ /// # Panics
+ ///
+ /// * Panics if the requested capacity exceeds `usize::MAX` bytes.
+ /// * Panics on 32-bit platforms if the requested capacity exceeds
+ /// `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM
+ ///
+ /// # Examples
+ ///
+ /// ```ignore
+ /// # #![feature(alloc, raw_vec_internals)]
+ /// # extern crate alloc;
+ /// # use std::ptr;
+ /// # use alloc::raw_vec::RawVec;
+ /// struct MyVec<T> {
+ /// buf: RawVec<T>,
+ /// len: usize,
+ /// }
+ ///
+ /// impl<T: Clone> MyVec<T> {
+ /// pub fn push_all(&mut self, elems: &[T]) {
+ /// self.buf.reserve(self.len, elems.len());
+ /// // reserve would have aborted or panicked if the len exceeded
+ /// // `isize::MAX` so this is safe to do unchecked now.
+ /// for x in elems {
+ /// unsafe {
+ /// ptr::write(self.buf.ptr().add(self.len), x.clone());
+ /// }
+ /// self.len += 1;
+ /// }
+ /// }
+ /// }
+ /// # fn main() {
+ /// # let mut vector = MyVec { buf: RawVec::new(), len: 0 };
+ /// # vector.push_all(&[1, 3, 5, 7, 9]);
+ /// # }
+ /// ```
+ pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
+ match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocErr) => unreachable!(),
+ Ok(()) => { /* yay */ }
+ }
+ }
+ /// Attempts to ensure that the buffer contains at least enough space to hold
+ /// `used_cap + needed_extra_cap` elements. If it doesn't already have
+ /// enough capacity, will reallocate in place enough space plus comfortable slack
+ /// space to get amortized `O(1)` behavior. Will limit this behaviour
+ /// if it would needlessly cause itself to panic.
+ ///
+ /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate
+ /// the requested space. This is not really unsafe, but the unsafe
+ /// code *you* write that relies on the behavior of this function may break.
+ ///
+ /// Returns true if the reallocation attempt has succeeded, or false otherwise.
+ ///
+ /// # Panics
+ ///
+ /// * Panics if the requested capacity exceeds `usize::MAX` bytes.
+ /// * Panics on 32-bit platforms if the requested capacity exceeds
+ /// `isize::MAX` bytes.
+ pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool {
+ unsafe {
+ // NOTE: we don't early branch on ZSTs here because we want this
+ // to actually catch "asking for more than usize::MAX" in that case.
+ // If we make it past the first branch then we are guaranteed to
+ // panic.
+
+ // Don't actually need any more capacity. If the current `cap` is 0, we can't
+ // reallocate in place.
+ // Wrapping in case they give a bad `used_cap`
+ let old_layout = match self.current_layout() {
+ Some(layout) => layout,
+ None => return false,
+ };
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return false;
+ }
+
+ let new_cap = self
+ .amortized_new_size(used_cap, needed_extra_cap)
+ .unwrap_or_else(|_| capacity_overflow());
+
+ // Here, `cap < used_cap + needed_extra_cap <= new_cap`
+ // (regardless of whether `self.cap - used_cap` wrapped).
+ // Therefore we can safely call grow_in_place.
+
+ let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
+ // FIXME: may crash and burn on over-reserve
+ alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
+ match self
+ .a
+ .grow_in_place(self.ptr.cast(), old_layout, new_layout.size())
+ {
+ Ok(_) => {
+ self.cap = new_cap;
+ true
+ }
+ Err(_) => false,
+ }
+ }
+ }
+
+ /// Shrinks the allocation down to the specified amount. If the given amount
+ /// is 0, actually completely deallocates.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the given amount is *larger* than the current capacity.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM.
+ pub fn shrink_to_fit(&mut self, amount: usize) {
+ let elem_size = mem::size_of::<T>();
+
+ // Set the `cap` because they might be about to promote to a `Box<[T]>`
+ if elem_size == 0 {
+ self.cap = amount;
+ return;
+ }
+
+ // This check is my waterloo; it's the only thing Vec wouldn't have to do.
+ assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
+
+ if amount == 0 {
+ // We want to create a new zero-length vector within the
+ // same allocator. We use ptr::write to avoid an
+ // erroneous attempt to drop the contents, and we use
+ // ptr::read to sidestep condition against destructuring
+ // types that implement Drop.
+
+ unsafe {
+ let a = self.a;
+ self.dealloc_buffer();
+ ptr::write(self, RawVec::new_in(a));
+ }
+ } else if self.cap != amount {
+ unsafe {
+ // We know here that our `amount` is greater than zero. This
+ // implies, via the assert above, that capacity is also greater
+ // than zero, which means that we've got a current layout that
+ // "fits"
+ //
+ // We also know that `self.cap` is greater than `amount`, and
+ // consequently we don't need runtime checks for creating either
+ // layout
+ let old_size = elem_size * self.cap;
+ let new_size = elem_size * amount;
+ let align = mem::align_of::<T>();
+ let old_layout = Layout::from_size_align_unchecked(old_size, align);
+ match self.a.realloc(self.ptr.cast(), old_layout, new_size) {
+ Ok(p) => self.ptr = p.cast(),
+ Err(_) => {
+ handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
+ }
+ }
+ }
+ self.cap = amount;
+ }
+ }
+}
+
+#[cfg(feature = "boxed")]
+impl<'a, T> RawVec<'a, T> {
+ /// Converts the entire buffer into `Box<[T]>`.
+ ///
+ /// Note that this will correctly reconstitute any `cap` changes
+ /// that may have been performed. (See description of type for details.)
+ ///
+ /// # Undefined Behavior
+ ///
+ /// All elements of `RawVec<T>` must be initialized. Notice that
+ /// the rules around uninitialized boxed values are not finalized yet,
+ /// but until they are, it is advisable to avoid them.
+ pub unsafe fn into_box(self) -> crate::boxed::Box<'a, [T]> {
+ use crate::boxed::Box;
+
+ // NOTE: not calling `cap()` here; actually using the real `cap` field!
+ let slice = core::slice::from_raw_parts_mut(self.ptr(), self.cap);
+ let output: Box<'a, [T]> = Box::from_raw(slice);
+ mem::forget(self);
+ output
+ }
+}
+
+enum Fallibility {
+ Fallible,
+ Infallible,
+}
+
+use self::Fallibility::*;
+
+enum ReserveStrategy {
+ Exact,
+ Amortized,
+}
+
+use self::ReserveStrategy::*;
+
+impl<'a, T> RawVec<'a, T> {
+ fn reserve_internal(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ fallibility: Fallibility,
+ strategy: ReserveStrategy,
+ ) -> Result<(), CollectionAllocErr> {
+ unsafe {
+ use crate::AllocErr;
+
+ // NOTE: we don't early branch on ZSTs here because we want this
+ // to actually catch "asking for more than usize::MAX" in that case.
+ // If we make it past the first branch then we are guaranteed to
+ // panic.
+
+ // Don't actually need any more capacity.
+ // Wrapping in case they gave a bad `used_cap`.
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return Ok(());
+ }
+
+ // Nothing we can really do about these checks :(
+ let new_cap = match strategy {
+ Exact => used_cap
+ .checked_add(needed_extra_cap)
+ .ok_or(CapacityOverflow)?,
+ Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
+ };
+ let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
+
+ alloc_guard(new_layout.size())?;
+
+ let res = match self.current_layout() {
+ Some(layout) => {
+ debug_assert!(new_layout.align() == layout.align());
+ self.a.realloc(self.ptr.cast(), layout, new_layout.size())
+ }
+ None => Alloc::alloc(&mut self.a, new_layout),
+ };
+
+ if let (Err(AllocErr), Infallible) = (&res, fallibility) {
+ handle_alloc_error(new_layout);
+ }
+
+ self.ptr = res?.cast();
+ self.cap = new_cap;
+
+ Ok(())
+ }
+ }
+}
+
+impl<'a, T> RawVec<'a, T> {
+ /// Frees the memory owned by the RawVec *without* trying to Drop its contents.
+ pub unsafe fn dealloc_buffer(&mut self) {
+ let elem_size = mem::size_of::<T>();
+ if elem_size != 0 {
+ if let Some(layout) = self.current_layout() {
+ self.a.dealloc(self.ptr.cast(), layout);
+ }
+ }
+ }
+}
+
+impl<'a, T> Drop for RawVec<'a, T> {
+ /// Frees the memory owned by the RawVec *without* trying to Drop its contents.
+ fn drop(&mut self) {
+ unsafe {
+ self.dealloc_buffer();
+ }
+ }
+}
+
+// We need to guarantee the following:
+// * We don't ever allocate `> isize::MAX` byte-size objects
+// * We don't overflow `usize::MAX` and actually allocate too little
+//
+// On 64-bit we just need to check for overflow since trying to allocate
+// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
+// an extra guard for this in case we're running on a platform which can use
+// all 4GB in user-space. e.g. PAE or x32
+
+#[inline]
+fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
+ if mem::size_of::<usize>() < 8 && alloc_size > ::core::isize::MAX as usize {
+ Err(CapacityOverflow)
+ } else {
+ Ok(())
+ }
+}
+
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+ panic!("capacity overflow")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn reserve_does_not_overallocate() {
+ let bump = Bump::new();
+ {
+ let mut v: RawVec<u32> = RawVec::new_in(&bump);
+ // First `reserve` allocates like `reserve_exact`
+ v.reserve(0, 9);
+ assert_eq!(9, v.cap());
+ }
+
+ {
+ let mut v: RawVec<u32> = RawVec::new_in(&bump);
+ v.reserve(0, 7);
+ assert_eq!(7, v.cap());
+ // 97 if more than double of 7, so `reserve` should work
+ // like `reserve_exact`.
+ v.reserve(7, 90);
+ assert_eq!(97, v.cap());
+ }
+
+ {
+ let mut v: RawVec<u32> = RawVec::new_in(&bump);
+ v.reserve(0, 12);
+ assert_eq!(12, v.cap());
+ v.reserve(12, 3);
+ // 3 is less than half of 12, so `reserve` must grow
+ // exponentially. At the time of writing this test grow
+ // factor is 2, so new capacity is 24, however, grow factor
+ // of 1.5 is OK too. Hence `>= 18` in assert.
+ assert!(v.cap() >= 12 + 12 / 2);
+ }
+ }
+}
diff --git a/third_party/rust/bumpalo/src/collections/str/lossy.rs b/third_party/rust/bumpalo/src/collections/str/lossy.rs
new file mode 100644
index 0000000000..b1012a4129
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/str/lossy.rs
@@ -0,0 +1,209 @@
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use crate::collections::str as core_str;
+use core::char;
+use core::fmt;
+use core::fmt::Write;
+use core::str;
+
+/// Lossy UTF-8 string.
+pub struct Utf8Lossy<'a> {
+ bytes: &'a [u8],
+}
+
+impl<'a> Utf8Lossy<'a> {
+ pub fn from_bytes(bytes: &'a [u8]) -> Utf8Lossy<'a> {
+ Utf8Lossy { bytes }
+ }
+
+ pub fn chunks(&self) -> Utf8LossyChunksIter<'a> {
+ Utf8LossyChunksIter {
+ source: &self.bytes,
+ }
+ }
+}
+
+/// Iterator over lossy UTF-8 string
+#[allow(missing_debug_implementations)]
+pub struct Utf8LossyChunksIter<'a> {
+ source: &'a [u8],
+}
+
+#[derive(PartialEq, Eq, Debug)]
+pub struct Utf8LossyChunk<'a> {
+ /// Sequence of valid chars.
+ /// Can be empty between broken UTF-8 chars.
+ pub valid: &'a str,
+ /// Single broken char, empty if none.
+ /// Empty iff iterator item is last.
+ pub broken: &'a [u8],
+}
+
+impl<'a> Iterator for Utf8LossyChunksIter<'a> {
+ type Item = Utf8LossyChunk<'a>;
+
+ fn next(&mut self) -> Option<Utf8LossyChunk<'a>> {
+ if self.source.is_empty() {
+ return None;
+ }
+
+ const TAG_CONT_U8: u8 = 128;
+ fn unsafe_get(xs: &[u8], i: usize) -> u8 {
+ unsafe { *xs.get_unchecked(i) }
+ }
+ fn safe_get(xs: &[u8], i: usize) -> u8 {
+ if i >= xs.len() {
+ 0
+ } else {
+ unsafe_get(xs, i)
+ }
+ }
+
+ let mut i = 0;
+ while i < self.source.len() {
+ let i_ = i;
+
+ let byte = unsafe_get(self.source, i);
+ i += 1;
+
+ if byte < 128 {
+ } else {
+ let w = core_str::utf8_char_width(byte);
+
+ macro_rules! error {
+ () => {{
+ unsafe {
+ let r = Utf8LossyChunk {
+ valid: str::from_utf8_unchecked(&self.source[0..i_]),
+ broken: &self.source[i_..i],
+ };
+ self.source = &self.source[i..];
+ return Some(r);
+ }
+ }};
+ }
+
+ match w {
+ 2 => {
+ if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
+ error!();
+ }
+ i += 1;
+ }
+ 3 => {
+ match (byte, safe_get(self.source, i)) {
+ (0xE0, 0xA0..=0xBF) => (),
+ (0xE1..=0xEC, 0x80..=0xBF) => (),
+ (0xED, 0x80..=0x9F) => (),
+ (0xEE..=0xEF, 0x80..=0xBF) => (),
+ _ => {
+ error!();
+ }
+ }
+ i += 1;
+ if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
+ error!();
+ }
+ i += 1;
+ }
+ 4 => {
+ match (byte, safe_get(self.source, i)) {
+ (0xF0, 0x90..=0xBF) => (),
+ (0xF1..=0xF3, 0x80..=0xBF) => (),
+ (0xF4, 0x80..=0x8F) => (),
+ _ => {
+ error!();
+ }
+ }
+ i += 1;
+ if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
+ error!();
+ }
+ i += 1;
+ if safe_get(self.source, i) & 192 != TAG_CONT_U8 {
+ error!();
+ }
+ i += 1;
+ }
+ _ => {
+ error!();
+ }
+ }
+ }
+ }
+
+ let r = Utf8LossyChunk {
+ valid: unsafe { str::from_utf8_unchecked(self.source) },
+ broken: &[],
+ };
+ self.source = &[];
+ Some(r)
+ }
+}
+
+impl<'a> fmt::Display for Utf8Lossy<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // If we're the empty string then our iterator won't actually yield
+ // anything, so perform the formatting manually
+ if self.bytes.is_empty() {
+ return "".fmt(f);
+ }
+
+ for Utf8LossyChunk { valid, broken } in self.chunks() {
+ // If we successfully decoded the whole chunk as a valid string then
+ // we can return a direct formatting of the string which will also
+ // respect various formatting flags if possible.
+ if valid.len() == self.bytes.len() {
+ assert!(broken.is_empty());
+ return valid.fmt(f);
+ }
+
+ f.write_str(valid)?;
+ if !broken.is_empty() {
+ f.write_char(char::REPLACEMENT_CHARACTER)?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl<'a> fmt::Debug for Utf8Lossy<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_char('"')?;
+
+ for Utf8LossyChunk { valid, broken } in self.chunks() {
+ // Valid part.
+ // Here we partially parse UTF-8 again which is suboptimal.
+ {
+ let mut from = 0;
+ for (i, c) in valid.char_indices() {
+ let esc = c.escape_debug();
+ // If char needs escaping, flush backlog so far and write, else skip
+ if esc.len() != 1 {
+ f.write_str(&valid[from..i])?;
+ for c in esc {
+ f.write_char(c)?;
+ }
+ from = i + c.len_utf8();
+ }
+ }
+ f.write_str(&valid[from..])?;
+ }
+
+ // Broken parts of string as hex escape.
+ for &b in broken {
+ write!(f, "\\x{:02x}", b)?;
+ }
+ }
+
+ f.write_char('"')
+ }
+}
diff --git a/third_party/rust/bumpalo/src/collections/str/mod.rs b/third_party/rust/bumpalo/src/collections/str/mod.rs
new file mode 100644
index 0000000000..29f4c6be06
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/str/mod.rs
@@ -0,0 +1,43 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! String manipulation
+//!
+//! For more details, see std::str
+
+#[allow(missing_docs)]
+pub mod lossy;
+
+// https://tools.ietf.org/html/rfc3629
+#[rustfmt::skip]
+static UTF8_CHAR_WIDTH: [u8; 256] = [
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x1F
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x3F
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x5F
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x7F
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0x9F
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0xBF
+0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, // 0xDF
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, // 0xEF
+4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0, // 0xFF
+];
+
+/// Given a first byte, determines how many bytes are in this UTF-8 character.
+#[inline]
+pub fn utf8_char_width(b: u8) -> usize {
+ UTF8_CHAR_WIDTH[b as usize] as usize
+}
diff --git a/third_party/rust/bumpalo/src/collections/string.rs b/third_party/rust/bumpalo/src/collections/string.rs
new file mode 100644
index 0000000000..ffd1db92de
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/string.rs
@@ -0,0 +1,2141 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A UTF-8 encoded, growable string.
+//!
+//! This module contains the [`String`] type and several error types that may
+//! result from working with [`String`]s.
+//!
+//! This module is a fork of the [`std::string`] module, that uses a bump allocator.
+//!
+//! [`std::string`]: https://doc.rust-lang.org/std/string/index.html
+//!
+//! # Examples
+//!
+//! You can create a new [`String`] from a string literal with [`String::from_str_in`]:
+//!
+//! ```
+//! use bumpalo::{Bump, collections::String};
+//!
+//! let b = Bump::new();
+//!
+//! let s = String::from_str_in("world", &b);
+//! ```
+//!
+//! [`String`]: struct.String.html
+//! [`String::from_str_in`]: struct.String.html#method.from_str_in
+//!
+//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of
+//! it. You can do the reverse too.
+//!
+//! ```
+//! use bumpalo::{Bump, collections::String};
+//!
+//! let b = Bump::new();
+//!
+//! let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150];
+//!
+//! // We know these bytes are valid, so we'll use `unwrap()`.
+//! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+//!
+//! assert_eq!("💖", sparkle_heart);
+//!
+//! let bytes = sparkle_heart.into_bytes();
+//!
+//! assert_eq!(bytes, [240, 159, 146, 150]);
+//! ```
+
+use crate::collections::str::lossy;
+use crate::collections::vec::Vec;
+use crate::Bump;
+use core::borrow::{Borrow, BorrowMut};
+use core::char::decode_utf16;
+use core::fmt;
+use core::hash;
+use core::iter::FusedIterator;
+use core::mem;
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
+use core::ptr;
+use core::str::{self, Chars, Utf8Error};
+use core_alloc::borrow::Cow;
+
+/// Like the [`format!`] macro, but for creating [`bumpalo::collections::String`]s.
+///
+/// [`format!`]: https://doc.rust-lang.org/std/macro.format.html
+/// [`bumpalo::collections::String`]: collections/string/struct.String.html
+///
+/// # Examples
+///
+/// ```
+/// use bumpalo::Bump;
+///
+/// let b = Bump::new();
+///
+/// let who = "World";
+/// let s = bumpalo::format!(in &b, "Hello, {}!", who);
+/// assert_eq!(s, "Hello, World!")
+/// ```
+#[macro_export]
+macro_rules! format {
+ ( in $bump:expr, $fmt:expr, $($args:expr),* ) => {{
+ use $crate::core_alloc::fmt::Write;
+ let bump = $bump;
+ let mut s = $crate::collections::String::new_in(bump);
+ let _ = write!(&mut s, $fmt, $($args),*);
+ s
+ }};
+
+ ( in $bump:expr, $fmt:expr, $($args:expr,)* ) => {
+ $crate::format!(in $bump, $fmt, $($args),*)
+ };
+}
+
+/// A UTF-8 encoded, growable string.
+///
+/// The `String` type is the most common string type that has ownership over the
+/// contents of the string. It has a close relationship with its borrowed
+/// counterpart, the primitive [`str`].
+///
+/// [`str`]: https://doc.rust-lang.org/std/primitive.str.html
+///
+/// # Examples
+///
+/// You can create a `String` from a literal string with [`String::from_str_in`]:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// let hello = String::from_str_in("Hello, world!", &b);
+/// ```
+///
+/// You can append a [`char`] to a `String` with the [`push`] method, and
+/// append a [`&str`] with the [`push_str`] method:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// let mut hello = String::from_str_in("Hello, ", &b);
+///
+/// hello.push('w');
+/// hello.push_str("orld!");
+/// ```
+///
+/// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+/// [`push`]: #method.push
+/// [`push_str`]: #method.push_str
+///
+/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
+/// the [`from_utf8`] method:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// // some bytes, in a vector
+/// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150];
+///
+/// // We know these bytes are valid, so we'll use `unwrap()`.
+/// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+///
+/// assert_eq!("💖", sparkle_heart);
+/// ```
+///
+/// [`from_utf8`]: #method.from_utf8
+///
+/// # Deref
+///
+/// `String`s implement <code>[`Deref`]<Target = [`str`]></code>, and so inherit all of [`str`]'s
+/// methods. In addition, this means that you can pass a `String` to a
+/// function which takes a [`&str`] by using an ampersand (`&`):
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// fn takes_str(s: &str) { }
+///
+/// let s = String::from_str_in("Hello", &b);
+///
+/// takes_str(&s);
+/// ```
+///
+/// This will create a [`&str`] from the `String` and pass it in. This
+/// conversion is very inexpensive, and so generally, functions will accept
+/// [`&str`]s as arguments unless they need a `String` for some specific
+/// reason.
+///
+/// In certain cases Rust doesn't have enough information to make this
+/// conversion, known as [`Deref`] coercion. In the following example a string
+/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function
+/// `example_func` takes anything that implements the trait. In this case Rust
+/// would need to make two implicit conversions, which Rust doesn't have the
+/// means to do. For that reason, the following example will not compile.
+///
+/// ```compile_fail,E0277
+/// use bumpalo::{Bump, collections::String};
+///
+/// trait TraitExample {}
+///
+/// impl<'a> TraitExample for &'a str {}
+///
+/// fn example_func<A: TraitExample>(example_arg: A) {}
+///
+/// let b = Bump::new();
+/// let example_string = String::from_str_in("example_string", &b);
+/// example_func(&example_string);
+/// ```
+///
+/// There are two options that would work instead. The first would be to
+/// change the line `example_func(&example_string);` to
+/// `example_func(example_string.as_str());`, using the method [`as_str()`]
+/// to explicitly extract the string slice containing the string. The second
+/// way changes `example_func(&example_string);` to
+/// `example_func(&*example_string);`. In this case we are dereferencing a
+/// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to
+/// [`&str`]. The second way is more idiomatic, however both work to do the
+/// conversion explicitly rather than relying on the implicit conversion.
+///
+/// # Representation
+///
+/// A `String` is made up of three components: a pointer to some bytes, a
+/// length, and a capacity. The pointer points to an internal buffer `String`
+/// uses to store its data. The length is the number of bytes currently stored
+/// in the buffer, and the capacity is the size of the buffer in bytes. As such,
+/// the length will always be less than or equal to the capacity.
+///
+/// This buffer is always stored on the heap.
+///
+/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`]
+/// methods:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+/// use std::mem;
+///
+/// let b = Bump::new();
+///
+/// let mut story = String::from_str_in("Once upon a time...", &b);
+///
+/// let ptr = story.as_mut_ptr();
+/// let len = story.len();
+/// let capacity = story.capacity();
+///
+/// // story has nineteen bytes
+/// assert_eq!(19, len);
+///
+/// // Now that we have our parts, we throw the story away.
+/// mem::forget(story);
+///
+/// // We can re-build a String out of ptr, len, and capacity. This is all
+/// // unsafe because we are responsible for making sure the components are
+/// // valid:
+/// let s = unsafe { String::from_raw_parts_in(ptr, len, capacity, &b) } ;
+///
+/// assert_eq!(String::from_str_in("Once upon a time...", &b), s);
+/// ```
+///
+/// [`as_ptr`]: https://doc.rust-lang.org/std/primitive.str.html#method.as_ptr
+/// [`len`]: #method.len
+/// [`capacity`]: #method.capacity
+///
+/// If a `String` has enough capacity, adding elements to it will not
+/// re-allocate. For example, consider this program:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// let mut s = String::new_in(&b);
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+/// s.push_str("hello");
+/// println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// This will output the following:
+///
+/// ```text
+/// 0
+/// 5
+/// 10
+/// 20
+/// 20
+/// 40
+/// ```
+///
+/// At first, we have no memory allocated at all, but as we append to the
+/// string, it increases its capacity appropriately. If we instead use the
+/// [`with_capacity_in`] method to allocate the correct capacity initially:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// let mut s = String::with_capacity_in(25, &b);
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+/// s.push_str("hello");
+/// println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// [`with_capacity_in`]: #method.with_capacity_in
+///
+/// We end up with a different output:
+///
+/// ```text
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// ```
+///
+/// Here, there's no need to allocate more memory inside the loop.
+///
+/// [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
+/// [`Deref`]: https://doc.rust-lang.org/std/ops/trait.Deref.html
+/// [`as_str()`]: struct.String.html#method.as_str
+#[derive(PartialOrd, Eq, Ord)]
+pub struct String<'bump> {
+ vec: Vec<'bump, u8>,
+}
+
+/// A possible error value when converting a `String` from a UTF-8 byte vector.
+///
+/// This type is the error type for the [`from_utf8`] method on [`String`]. It
+/// is designed in such a way to carefully avoid reallocations: the
+/// [`into_bytes`] method will give back the byte vector that was used in the
+/// conversion attempt.
+///
+/// [`from_utf8`]: struct.String.html#method.from_utf8
+/// [`String`]: struct.String.html
+/// [`into_bytes`]: struct.FromUtf8Error.html#method.into_bytes
+///
+/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
+/// through the [`utf8_error`] method.
+///
+/// [`Utf8Error`]: https://doc.rust-lang.org/std/str/struct.Utf8Error.html
+/// [`std::str`]: https://doc.rust-lang.org/std/str/index.html
+/// [`u8`]: https://doc.rust-lang.org/std/primitive.u8.html
+/// [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
+/// [`utf8_error`]: #method.utf8_error
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// // some invalid bytes, in a vector
+/// let bytes = bumpalo::vec![in &b; 0, 159];
+///
+/// let value = String::from_utf8(bytes);
+///
+/// assert!(value.is_err());
+/// assert_eq!(bumpalo::vec![in &b; 0, 159], value.unwrap_err().into_bytes());
+/// ```
+#[derive(Debug)]
+pub struct FromUtf8Error<'bump> {
+ bytes: Vec<'bump, u8>,
+ error: Utf8Error,
+}
+
+/// A possible error value when converting a `String` from a UTF-16 byte slice.
+///
+/// This type is the error type for the [`from_utf16_in`] method on [`String`].
+///
+/// [`from_utf16_in`]: struct.String.html#method.from_utf16_in
+/// [`String`]: struct.String.html
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let b = Bump::new();
+///
+/// // 𝄞mu<invalid>ic
+/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, 0xD800, 0x0069, 0x0063];
+///
+/// assert!(String::from_utf16_in(v, &b).is_err());
+/// ```
+#[derive(Debug)]
+pub struct FromUtf16Error(());
+
+impl<'bump> String<'bump> {
+ /// Creates a new empty `String`.
+ ///
+ /// Given that the `String` is empty, this will not allocate any initial
+ /// buffer. While that means that this initial operation is very
+ /// inexpensive, it may cause excessive allocation later when you add
+ /// data. If you have an idea of how much data the `String` will hold,
+ /// consider the [`with_capacity_in`] method to prevent excessive
+ /// re-allocation.
+ ///
+ /// [`with_capacity_in`]: #method.with_capacity_in
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::new_in(&b);
+ /// ```
+ #[inline]
+ pub fn new_in(bump: &'bump Bump) -> String<'bump> {
+ String {
+ vec: Vec::new_in(bump),
+ }
+ }
+
+ /// Creates a new empty `String` with a particular capacity.
+ ///
+ /// `String`s have an internal buffer to hold their data. The capacity is
+ /// the length of that buffer, and can be queried with the [`capacity`]
+ /// method. This method creates an empty `String`, but one with an initial
+ /// buffer that can hold `capacity` bytes. This is useful when you may be
+ /// appending a bunch of data to the `String`, reducing the number of
+ /// reallocations it needs to do.
+ ///
+ /// [`capacity`]: #method.capacity
+ ///
+ /// If the given capacity is `0`, no allocation will occur, and this method
+ /// is identical to the [`new_in`] method.
+ ///
+ /// [`new_in`]: #method.new
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::with_capacity_in(10, &b);
+ ///
+ /// // The String contains no chars, even though it has capacity for more
+ /// assert_eq!(s.len(), 0);
+ ///
+ /// // These are all done without reallocating...
+ /// let cap = s.capacity();
+ /// for _ in 0..10 {
+ /// s.push('a');
+ /// }
+ ///
+ /// assert_eq!(s.capacity(), cap);
+ ///
+ /// // ...but this may make the vector reallocate
+ /// s.push('a');
+ /// ```
+ #[inline]
+ pub fn with_capacity_in(capacity: usize, bump: &'bump Bump) -> String<'bump> {
+ String {
+ vec: Vec::with_capacity_in(capacity, bump),
+ }
+ }
+
+ /// Converts a vector of bytes to a `String`.
+ ///
+ /// A string (`String`) is made of bytes ([`u8`]), and a vector of bytes
+ /// ([`Vec<u8>`]) is made of bytes, so this function converts between the
+ /// two. Not all byte slices are valid `String`s, however: `String`
+ /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that
+ /// the bytes are valid UTF-8, and then does the conversion.
+ ///
+ /// If you are sure that the byte slice is valid UTF-8, and you don't want
+ /// to incur the overhead of the validity check, there is an unsafe version
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
+ /// but skips the check.
+ ///
+ /// This method will take care to not copy the vector, for efficiency's
+ /// sake.
+ ///
+ /// If you need a [`&str`] instead of a `String`, consider
+ /// [`str::from_utf8`].
+ ///
+ /// The inverse of this method is [`into_bytes`].
+ ///
+ /// # Errors
+ ///
+ /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the
+ /// provided bytes are not UTF-8. The vector you moved in is also included.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some bytes, in a vector
+ /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150];
+ ///
+ /// // We know these bytes are valid, so we'll use `unwrap()`.
+ /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ ///
+ /// Incorrect bytes:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some invalid bytes, in a vector
+ /// let sparkle_heart = bumpalo::vec![in &b; 0, 159, 146, 150];
+ ///
+ /// assert!(String::from_utf8(sparkle_heart).is_err());
+ /// ```
+ ///
+ /// See the docs for [`FromUtf8Error`] for more details on what you can do
+ /// with this error.
+ ///
+ /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
+ /// [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
+ /// [`u8`]: https://doc.rust-lang.org/std/primitive.u8.html
+ /// [`Vec<u8>`]: ../vec/struct.Vec.html
+ /// [`str::from_utf8`]: https://doc.rust-lang.org/std/str/fn.from_utf8.html
+ /// [`into_bytes`]: struct.String.html#method.into_bytes
+ /// [`FromUtf8Error`]: struct.FromUtf8Error.html
+ /// [`Err`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err
+ #[inline]
+ pub fn from_utf8(vec: Vec<'bump, u8>) -> Result<String<'bump>, FromUtf8Error<'bump>> {
+ match str::from_utf8(&vec) {
+ Ok(..) => Ok(String { vec }),
+ Err(e) => Err(FromUtf8Error {
+ bytes: vec,
+ error: e,
+ }),
+ }
+ }
+
+ /// Converts a slice of bytes to a string, including invalid characters.
+ ///
+ /// Strings are made of bytes ([`u8`]), and a slice of bytes
+ /// ([`&[u8]`][slice]) is made of bytes, so this function converts
+ /// between the two. Not all byte slices are valid strings, however: strings
+ /// are required to be valid UTF-8. During this conversion,
+ /// `from_utf8_lossy_in()` will replace any invalid UTF-8 sequences with
+ /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: �
+ ///
+ /// [`u8`]: https://doc.rust-lang.org/std/primitive.u8.html
+ /// [slice]: https://doc.rust-lang.org/std/primitive.slice.html
+ /// [U+FFFD]: https://doc.rust-lang.org/std/char/constant.REPLACEMENT_CHARACTER.html
+ ///
+ /// If you are sure that the byte slice is valid UTF-8, and you don't want
+ /// to incur the overhead of the conversion, there is an unsafe version
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
+ /// but skips the checks.
+ ///
+ /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{collections::String, Bump, vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some bytes, in a vector
+ /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150];
+ ///
+ /// let sparkle_heart = String::from_utf8_lossy_in(&sparkle_heart, &b);
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ ///
+ /// Incorrect bytes:
+ ///
+ /// ```
+ /// use bumpalo::{collections::String, Bump, vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some invalid bytes
+ /// let input = b"Hello \xF0\x90\x80World";
+ /// let output = String::from_utf8_lossy_in(input, &b);
+ ///
+ /// assert_eq!("Hello �World", output);
+ /// ```
+ pub fn from_utf8_lossy_in(v: &[u8], bump: &'bump Bump) -> String<'bump> {
+ let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks();
+
+ let (first_valid, first_broken) = if let Some(chunk) = iter.next() {
+ let lossy::Utf8LossyChunk { valid, broken } = chunk;
+ if valid.len() == v.len() {
+ debug_assert!(broken.is_empty());
+ unsafe {
+ return String::from_utf8_unchecked(Vec::from_iter_in(v.iter().cloned(), bump));
+ }
+ }
+ (valid, broken)
+ } else {
+ return String::from_str_in("", bump);
+ };
+
+ const REPLACEMENT: &str = "\u{FFFD}";
+
+ let mut res = String::with_capacity_in(v.len(), bump);
+ res.push_str(first_valid);
+ if !first_broken.is_empty() {
+ res.push_str(REPLACEMENT);
+ }
+
+ for lossy::Utf8LossyChunk { valid, broken } in iter {
+ res.push_str(valid);
+ if !broken.is_empty() {
+ res.push_str(REPLACEMENT);
+ }
+ }
+
+ res
+ }
+
+ /// Decode a UTF-16 encoded slice `v` into a `String`, returning [`Err`]
+ /// if `v` contains any invalid data.
+ ///
+ /// [`Err`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // 𝄞music
+ /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, 0x0073, 0x0069, 0x0063];
+ /// assert_eq!(String::from_str_in("𝄞music", &b), String::from_utf16_in(v, &b).unwrap());
+ ///
+ /// // 𝄞mu<invalid>ic
+ /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, 0xD800, 0x0069, 0x0063];
+ /// assert!(String::from_utf16_in(v, &b).is_err());
+ /// ```
+ pub fn from_utf16_in(v: &[u16], bump: &'bump Bump) -> Result<String<'bump>, FromUtf16Error> {
+ let mut ret = String::with_capacity_in(v.len(), bump);
+ for c in decode_utf16(v.iter().cloned()) {
+ if let Ok(c) = c {
+ ret.push(c);
+ } else {
+ return Err(FromUtf16Error(()));
+ }
+ }
+ Ok(ret)
+ }
+
+ /// Construct a new `String<'bump>` from a string slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_str_in("hello", &b);
+ /// assert_eq!(s, "hello");
+ /// ```
+ pub fn from_str_in(s: &str, bump: &'bump Bump) -> String<'bump> {
+ let mut t = String::with_capacity_in(s.len(), bump);
+ t.push_str(s);
+ t
+ }
+
+ /// Construct a new `String<'bump>` from an iterator of `char`s.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_iter_in(['h', 'e', 'l', 'l', 'o'].iter().cloned(), &b);
+ /// assert_eq!(s, "hello");
+ /// ```
+ pub fn from_iter_in<I: IntoIterator<Item = char>>(iter: I, bump: &'bump Bump) -> String<'bump> {
+ let mut s = String::new_in(bump);
+ for c in iter {
+ s.push(c);
+ }
+ s
+ }
+
+ /// Creates a new `String` from a length, capacity, and pointer.
+ ///
+ /// # Safety
+ ///
+ /// This is highly unsafe, due to the number of invariants that aren't
+ /// checked:
+ ///
+ /// * The memory at `ptr` needs to have been previously allocated by the
+ /// same allocator the standard library uses.
+ /// * `length` needs to be less than or equal to `capacity`.
+ /// * `capacity` needs to be the correct value.
+ ///
+ /// Violating these may cause problems like corrupting the allocator's
+ /// internal data structures.
+ ///
+ /// The ownership of `ptr` is effectively transferred to the
+ /// `String` which may then deallocate, reallocate or change the
+ /// contents of memory pointed to by the pointer at will. Ensure
+ /// that nothing else uses the pointer after calling this
+ /// function.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ /// use std::mem;
+ ///
+ /// let b = Bump::new();
+ ///
+ /// unsafe {
+ /// let mut s = String::from_str_in("hello", &b);
+ /// let ptr = s.as_mut_ptr();
+ /// let len = s.len();
+ /// let capacity = s.capacity();
+ ///
+ /// mem::forget(s);
+ ///
+ /// let s = String::from_raw_parts_in(ptr, len, capacity, &b);
+ ///
+ /// assert_eq!(s, "hello");
+ /// }
+ /// ```
+ #[inline]
+ pub unsafe fn from_raw_parts_in(
+ buf: *mut u8,
+ length: usize,
+ capacity: usize,
+ bump: &'bump Bump,
+ ) -> String<'bump> {
+ String {
+ vec: Vec::from_raw_parts_in(buf, length, capacity, bump),
+ }
+ }
+
+ /// Converts a vector of bytes to a `String` without checking that the
+ /// string contains valid UTF-8.
+ ///
+ /// See the safe version, [`from_utf8`], for more details.
+ ///
+ /// [`from_utf8`]: struct.String.html#method.from_utf8
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because it does not check that the bytes passed
+ /// to it are valid UTF-8. If this constraint is violated, it may cause
+ /// memory unsafety issues with future users of the `String`,
+ /// as it is assumed that `String`s are valid UTF-8.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some bytes, in a vector
+ /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150];
+ ///
+ /// let sparkle_heart = unsafe {
+ /// String::from_utf8_unchecked(sparkle_heart)
+ /// };
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ #[inline]
+ pub unsafe fn from_utf8_unchecked(bytes: Vec<'bump, u8>) -> String<'bump> {
+ String { vec: bytes }
+ }
+
+ /// Returns a shared reference to the allocator backing this `String`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// // uses the same allocator as the provided `String`
+ /// fn copy_string<'bump>(s: &String<'bump>) -> &'bump str {
+ /// s.bump().alloc_str(s.as_str())
+ /// }
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn bump(&self) -> &'bump Bump {
+ self.vec.bump()
+ }
+
+ /// Converts a `String` into a byte vector.
+ ///
+ /// This consumes the `String`, so we do not need to copy its contents.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_str_in("hello", &b);
+ ///
+ /// assert_eq!(s.into_bytes(), [104, 101, 108, 108, 111]);
+ /// ```
+ #[inline]
+ pub fn into_bytes(self) -> Vec<'bump, u8> {
+ self.vec
+ }
+
+ /// Convert this `String<'bump>` into a `&'bump str`. This is analogous to
+ /// [`std::string::String::into_boxed_str`][into_boxed_str].
+ ///
+ /// [into_boxed_str]: https://doc.rust-lang.org/std/string/struct.String.html#method.into_boxed_str
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_str_in("foo", &b);
+ ///
+ /// assert_eq!(s.into_bump_str(), "foo");
+ /// ```
+ pub fn into_bump_str(self) -> &'bump str {
+ let s = unsafe {
+ let s = self.as_str();
+ mem::transmute(s)
+ };
+ mem::forget(self);
+ s
+ }
+
+ /// Extracts a string slice containing the entire `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_str_in("foo", &b);
+ ///
+ /// assert_eq!("foo", s.as_str());
+ /// ```
+ #[inline]
+ pub fn as_str(&self) -> &str {
+ self
+ }
+
+ /// Converts a `String` into a mutable string slice.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foobar", &b);
+ /// let s_mut_str = s.as_mut_str();
+ ///
+ /// s_mut_str.make_ascii_uppercase();
+ ///
+ /// assert_eq!("FOOBAR", s_mut_str);
+ /// ```
+ #[inline]
+ pub fn as_mut_str(&mut self) -> &mut str {
+ self
+ }
+
+ /// Appends a given string slice onto the end of this `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foo", &b);
+ ///
+ /// s.push_str("bar");
+ ///
+ /// assert_eq!("foobar", s);
+ /// ```
+ #[inline]
+ pub fn push_str(&mut self, string: &str) {
+ self.vec.extend_from_slice(string.as_bytes())
+ }
+
+ /// Returns this `String`'s capacity, in bytes.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::with_capacity_in(10, &b);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ #[inline]
+ pub fn capacity(&self) -> usize {
+ self.vec.capacity()
+ }
+
+ /// Ensures that this `String`'s capacity is at least `additional` bytes
+ /// larger than its length.
+ ///
+ /// The capacity may be increased by more than `additional` bytes if it
+ /// chooses, to prevent frequent reallocations.
+ ///
+ /// If you do not want this "at least" behavior, see the [`reserve_exact`]
+ /// method.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows [`usize`].
+ ///
+ /// [`reserve_exact`]: struct.String.html#method.reserve_exact
+ /// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::new_in(&b);
+ ///
+ /// s.reserve(10);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ ///
+ /// This may not actually increase the capacity:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::with_capacity_in(10, &b);
+ /// s.push('a');
+ /// s.push('b');
+ ///
+ /// // s now has a length of 2 and a capacity of 10
+ /// assert_eq!(2, s.len());
+ /// assert_eq!(10, s.capacity());
+ ///
+ /// // Since we already have an extra 8 capacity, calling this...
+ /// s.reserve(8);
+ ///
+ /// // ... doesn't actually increase.
+ /// assert_eq!(10, s.capacity());
+ /// ```
+ #[inline]
+ pub fn reserve(&mut self, additional: usize) {
+ self.vec.reserve(additional)
+ }
+
+ /// Ensures that this `String`'s capacity is `additional` bytes
+ /// larger than its length.
+ ///
+ /// Consider using the [`reserve`] method unless you absolutely know
+ /// better than the allocator.
+ ///
+ /// [`reserve`]: #method.reserve
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::new_in(&b);
+ ///
+ /// s.reserve_exact(10);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ ///
+ /// This may not actually increase the capacity:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::with_capacity_in(10, &b);
+ /// s.push('a');
+ /// s.push('b');
+ ///
+ /// // s now has a length of 2 and a capacity of 10
+ /// assert_eq!(2, s.len());
+ /// assert_eq!(10, s.capacity());
+ ///
+ /// // Since we already have an extra 8 capacity, calling this...
+ /// s.reserve_exact(8);
+ ///
+ /// // ... doesn't actually increase.
+ /// assert_eq!(10, s.capacity());
+ /// ```
+ #[inline]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.vec.reserve_exact(additional)
+ }
+
+ /// Shrinks the capacity of this `String` to match its length.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foo", &b);
+ ///
+ /// s.reserve(100);
+ /// assert!(s.capacity() >= 100);
+ ///
+ /// s.shrink_to_fit();
+ /// assert_eq!(3, s.capacity());
+ /// ```
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.vec.shrink_to_fit()
+ }
+
+ /// Appends the given [`char`] to the end of this `String`.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("abc", &b);
+ ///
+ /// s.push('1');
+ /// s.push('2');
+ /// s.push('3');
+ ///
+ /// assert_eq!("abc123", s);
+ /// ```
+ #[inline]
+ pub fn push(&mut self, ch: char) {
+ match ch.len_utf8() {
+ 1 => self.vec.push(ch as u8),
+ _ => self
+ .vec
+ .extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
+ }
+ }
+
+ /// Returns a byte slice of this `String`'s contents.
+ ///
+ /// The inverse of this method is [`from_utf8`].
+ ///
+ /// [`from_utf8`]: #method.from_utf8
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let s = String::from_str_in("hello", &b);
+ ///
+ /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
+ /// ```
+ #[inline]
+ pub fn as_bytes(&self) -> &[u8] {
+ &self.vec
+ }
+
+ /// Shortens this `String` to the specified length.
+ ///
+ /// If `new_len` is greater than the string's current length, this has no
+ /// effect.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the string.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `new_len` does not lie on a [`char`] boundary.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("hello", &b);
+ ///
+ /// s.truncate(2);
+ ///
+ /// assert_eq!("he", s);
+ /// ```
+ #[inline]
+ pub fn truncate(&mut self, new_len: usize) {
+ if new_len <= self.len() {
+ assert!(self.is_char_boundary(new_len));
+ self.vec.truncate(new_len)
+ }
+ }
+
+ /// Removes the last character from the string buffer and returns it.
+ ///
+ /// Returns [`None`] if this `String` is empty.
+ ///
+ /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foo", &b);
+ ///
+ /// assert_eq!(s.pop(), Some('o'));
+ /// assert_eq!(s.pop(), Some('o'));
+ /// assert_eq!(s.pop(), Some('f'));
+ ///
+ /// assert_eq!(s.pop(), None);
+ /// ```
+ #[inline]
+ pub fn pop(&mut self) -> Option<char> {
+ let ch = self.chars().rev().next()?;
+ let newlen = self.len() - ch.len_utf8();
+ unsafe {
+ self.vec.set_len(newlen);
+ }
+ Some(ch)
+ }
+
+ /// Removes a [`char`] from this `String` at a byte position and returns it.
+ ///
+ /// This is an `O(n)` operation, as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than or equal to the `String`'s length,
+ /// or if it does not lie on a [`char`] boundary.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foo", &b);
+ ///
+ /// assert_eq!(s.remove(0), 'f');
+ /// assert_eq!(s.remove(1), 'o');
+ /// assert_eq!(s.remove(0), 'o');
+ /// ```
+ #[inline]
+ pub fn remove(&mut self, idx: usize) -> char {
+ let ch = match self[idx..].chars().next() {
+ Some(ch) => ch,
+ None => panic!("cannot remove a char from the end of a string"),
+ };
+
+ let next = idx + ch.len_utf8();
+ let len = self.len();
+ unsafe {
+ ptr::copy(
+ self.vec.as_ptr().add(next),
+ self.vec.as_mut_ptr().add(idx),
+ len - next,
+ );
+ self.vec.set_len(len - (next - idx));
+ }
+ ch
+ }
+
+ /// Retains only the characters specified by the predicate.
+ ///
+ /// In other words, remove all characters `c` such that `f(c)` returns `false`.
+ /// This method operates in place and preserves the order of the retained
+ /// characters.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("f_o_ob_ar", &b);
+ ///
+ /// s.retain(|c| c != '_');
+ ///
+ /// assert_eq!(s, "foobar");
+ /// ```
+ #[inline]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(char) -> bool,
+ {
+ let len = self.len();
+ let mut del_bytes = 0;
+ let mut idx = 0;
+
+ while idx < len {
+ let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() };
+ let ch_len = ch.len_utf8();
+
+ if !f(ch) {
+ del_bytes += ch_len;
+ } else if del_bytes > 0 {
+ unsafe {
+ ptr::copy(
+ self.vec.as_ptr().add(idx),
+ self.vec.as_mut_ptr().add(idx - del_bytes),
+ ch_len,
+ );
+ }
+ }
+
+ // Point idx to the next char
+ idx += ch_len;
+ }
+
+ if del_bytes > 0 {
+ unsafe {
+ self.vec.set_len(len - del_bytes);
+ }
+ }
+ }
+
+ /// Inserts a character into this `String` at a byte position.
+ ///
+ /// This is an `O(n)` operation as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than the `String`'s length, or if it does not
+ /// lie on a [`char`] boundary.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::with_capacity_in(3, &b);
+ ///
+ /// s.insert(0, 'f');
+ /// s.insert(1, 'o');
+ /// s.insert(2, 'o');
+ ///
+ /// assert_eq!("foo", s);
+ /// ```
+ #[inline]
+ pub fn insert(&mut self, idx: usize, ch: char) {
+ assert!(self.is_char_boundary(idx));
+ let mut bits = [0; 4];
+ let bits = ch.encode_utf8(&mut bits).as_bytes();
+
+ unsafe {
+ self.insert_bytes(idx, bits);
+ }
+ }
+
+ unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) {
+ let len = self.len();
+ let amt = bytes.len();
+ self.vec.reserve(amt);
+
+ ptr::copy(
+ self.vec.as_ptr().add(idx),
+ self.vec.as_mut_ptr().add(idx + amt),
+ len - idx,
+ );
+ ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
+ self.vec.set_len(len + amt);
+ }
+
+ /// Inserts a string slice into this `String` at a byte position.
+ ///
+ /// This is an `O(n)` operation as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than the `String`'s length, or if it does not
+ /// lie on a [`char`] boundary.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("bar", &b);
+ ///
+ /// s.insert_str(0, "foo");
+ ///
+ /// assert_eq!("foobar", s);
+ /// ```
+ #[inline]
+ pub fn insert_str(&mut self, idx: usize, string: &str) {
+ assert!(self.is_char_boundary(idx));
+
+ unsafe {
+ self.insert_bytes(idx, string.as_bytes());
+ }
+ }
+
+ /// Returns a mutable reference to the contents of this `String`.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because the returned `&mut Vec` allows writing
+ /// bytes which are not valid UTF-8. If this constraint is violated, using
+ /// the original `String` after dropping the `&mut Vec` may violate memory
+ /// safety, as it is assumed that `String`s are valid UTF-8.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("hello", &b);
+ ///
+ /// unsafe {
+ /// let vec = s.as_mut_vec();
+ /// assert_eq!(vec, &[104, 101, 108, 108, 111]);
+ ///
+ /// vec.reverse();
+ /// }
+ /// assert_eq!(s, "olleh");
+ /// ```
+ #[inline]
+ pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<'bump, u8> {
+ &mut self.vec
+ }
+
+ /// Returns the length of this `String`, in bytes.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let a = String::from_str_in("foo", &b);
+ ///
+ /// assert_eq!(a.len(), 3);
+ /// ```
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.vec.len()
+ }
+
+ /// Returns `true` if this `String` has a length of zero.
+ ///
+ /// Returns `false` otherwise.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = String::new_in(&b);
+ /// assert!(v.is_empty());
+ ///
+ /// v.push('a');
+ /// assert!(!v.is_empty());
+ /// ```
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Splits the string into two at the given index.
+ ///
+ /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and
+ /// the returned `String` contains bytes `[at, len)`. `at` must be on the
+ /// boundary of a UTF-8 code point.
+ ///
+ /// Note that the capacity of `self` does not change.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at` is not on a UTF-8 code point boundary, or if it is beyond the last
+ /// code point of the string.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut hello = String::from_str_in("Hello, World!", &b);
+ /// let world = hello.split_off(7);
+ /// assert_eq!(hello, "Hello, ");
+ /// assert_eq!(world, "World!");
+ /// ```
+ #[inline]
+ pub fn split_off(&mut self, at: usize) -> String<'bump> {
+ assert!(self.is_char_boundary(at));
+ let other = self.vec.split_off(at);
+ unsafe { String::from_utf8_unchecked(other) }
+ }
+
+ /// Truncates this `String`, removing all contents.
+ ///
+ /// While this means the `String` will have a length of zero, it does not
+ /// touch its capacity.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("foo", &b);
+ ///
+ /// s.clear();
+ ///
+ /// assert!(s.is_empty());
+ /// assert_eq!(0, s.len());
+ /// assert_eq!(3, s.capacity());
+ /// ```
+ #[inline]
+ pub fn clear(&mut self) {
+ self.vec.clear()
+ }
+
+ /// Creates a draining iterator that removes the specified range in the `String`
+ /// and yields the removed `chars`.
+ ///
+ /// Note: The element range is removed even if the iterator is not
+ /// consumed until the end.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point or end point do not lie on a [`char`]
+ /// boundary, or if they're out of bounds.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("α is alpha, β is beta", &b);
+ /// let beta_offset = s.find('β').unwrap_or(s.len());
+ ///
+ /// // Remove the range up until the β from the string
+ /// let t = String::from_iter_in(s.drain(..beta_offset), &b);
+ /// assert_eq!(t, "α is alpha, ");
+ /// assert_eq!(s, "β is beta");
+ ///
+ /// // A full range clears the string
+ /// drop(s.drain(..));
+ /// assert_eq!(s, "");
+ /// ```
+ pub fn drain<'a, R>(&'a mut self, range: R) -> Drain<'a, 'bump>
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // The String version of Drain does not have the memory safety issues
+ // of the vector version. The data is just plain bytes.
+ // Because the range removal happens in Drop, if the Drain iterator is leaked,
+ // the removal will not happen.
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+
+ // Take out two simultaneous borrows. The &mut String won't be accessed
+ // until iteration is over, in Drop.
+ let self_ptr = self as *mut _;
+ // slicing does the appropriate bounds checks
+ let chars_iter = self[start..end].chars();
+
+ Drain {
+ start,
+ end,
+ iter: chars_iter,
+ string: self_ptr,
+ }
+ }
+
+ /// Removes the specified range in the string,
+ /// and replaces it with the given string.
+ /// The given string doesn't need to be the same length as the range.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point or end point do not lie on a [`char`]
+ /// boundary, or if they're out of bounds.
+ ///
+ /// [`char`]: https://doc.rust-lang.org/std/primitive.char.html
+ /// [`Vec::splice`]: ../vec/struct.Vec.html#method.splice
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut s = String::from_str_in("α is alpha, β is beta", &b);
+ /// let beta_offset = s.find('β').unwrap_or(s.len());
+ ///
+ /// // Replace the range up until the β from the string
+ /// s.replace_range(..beta_offset, "Α is capital alpha; ");
+ /// assert_eq!(s, "Α is capital alpha; β is beta");
+ /// ```
+ pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // Replace_range does not have the memory safety issues of a vector Splice.
+ // of the vector version. The data is just plain bytes.
+
+ match range.start_bound() {
+ Included(&n) => assert!(self.is_char_boundary(n)),
+ Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
+ Unbounded => {}
+ };
+ match range.end_bound() {
+ Included(&n) => assert!(self.is_char_boundary(n + 1)),
+ Excluded(&n) => assert!(self.is_char_boundary(n)),
+ Unbounded => {}
+ };
+
+ unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes());
+ }
+}
+
+impl<'bump> FromUtf8Error<'bump> {
+ /// Returns a slice of bytes that were attempted to convert to a `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some invalid bytes, in a vector
+ /// let bytes = bumpalo::vec![in &b; 0, 159];
+ ///
+ /// let value = String::from_utf8(bytes);
+ ///
+ /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes());
+ /// ```
+ pub fn as_bytes(&self) -> &[u8] {
+ &self.bytes[..]
+ }
+
+ /// Returns the bytes that were attempted to convert to a `String`.
+ ///
+ /// This method is carefully constructed to avoid allocation. It will
+ /// consume the error, moving out the bytes, so that a copy of the bytes
+ /// does not need to be made.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some invalid bytes, in a vector
+ /// let bytes = bumpalo::vec![in &b; 0, 159];
+ ///
+ /// let value = String::from_utf8(bytes);
+ ///
+ /// assert_eq!(bumpalo::vec![in &b; 0, 159], value.unwrap_err().into_bytes());
+ /// ```
+ pub fn into_bytes(self) -> Vec<'bump, u8> {
+ self.bytes
+ }
+
+ /// Fetch a `Utf8Error` to get more details about the conversion failure.
+ ///
+ /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+ /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+ /// an analogue to `FromUtf8Error`. See its documentation for more details
+ /// on using it.
+ ///
+ /// [`Utf8Error`]: https://doc.rust-lang.org/std/str/struct.Utf8Error.html
+ /// [`std::str`]: https://doc.rust-lang.org/std/str/index.html
+ /// [`u8`]: https://doc.rust-lang.org/std/primitive.u8.html
+ /// [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::String};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// // some invalid bytes, in a vector
+ /// let bytes = bumpalo::vec![in &b; 0, 159];
+ ///
+ /// let error = String::from_utf8(bytes).unwrap_err().utf8_error();
+ ///
+ /// // the first byte is invalid here
+ /// assert_eq!(1, error.valid_up_to());
+ /// ```
+ pub fn utf8_error(&self) -> Utf8Error {
+ self.error
+ }
+}
+
+impl<'bump> fmt::Display for FromUtf8Error<'bump> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.error, f)
+ }
+}
+
+impl fmt::Display for FromUtf16Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
+ }
+}
+
+impl<'bump> Clone for String<'bump> {
+ fn clone(&self) -> Self {
+ String {
+ vec: self.vec.clone(),
+ }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.vec.clone_from(&source.vec);
+ }
+}
+
+impl<'bump> Extend<char> for String<'bump> {
+ fn extend<I: IntoIterator<Item = char>>(&mut self, iter: I) {
+ let iterator = iter.into_iter();
+ let (lower_bound, _) = iterator.size_hint();
+ self.reserve(lower_bound);
+ for ch in iterator {
+ self.push(ch)
+ }
+ }
+}
+
+impl<'a, 'bump> Extend<&'a char> for String<'bump> {
+ fn extend<I: IntoIterator<Item = &'a char>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+impl<'a, 'bump> Extend<&'a str> for String<'bump> {
+ fn extend<I: IntoIterator<Item = &'a str>>(&mut self, iter: I) {
+ for s in iter {
+ self.push_str(s)
+ }
+ }
+}
+
+impl<'bump> Extend<String<'bump>> for String<'bump> {
+ fn extend<I: IntoIterator<Item = String<'bump>>>(&mut self, iter: I) {
+ for s in iter {
+ self.push_str(&s)
+ }
+ }
+}
+
+impl<'bump> Extend<core_alloc::string::String> for String<'bump> {
+ fn extend<I: IntoIterator<Item = core_alloc::string::String>>(&mut self, iter: I) {
+ for s in iter {
+ self.push_str(&s)
+ }
+ }
+}
+
+impl<'a, 'bump> Extend<Cow<'a, str>> for String<'bump> {
+ fn extend<I: IntoIterator<Item = Cow<'a, str>>>(&mut self, iter: I) {
+ for s in iter {
+ self.push_str(&s)
+ }
+ }
+}
+
+impl<'bump> PartialEq for String<'bump> {
+ #[inline]
+ fn eq(&self, other: &String) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+}
+
+macro_rules! impl_eq {
+ ($lhs:ty, $rhs: ty) => {
+ impl<'a, 'bump> PartialEq<$rhs> for $lhs {
+ #[inline]
+ fn eq(&self, other: &$rhs) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+ }
+
+ impl<'a, 'b, 'bump> PartialEq<$lhs> for $rhs {
+ #[inline]
+ fn eq(&self, other: &$lhs) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+ }
+ };
+}
+
+impl_eq! { String<'bump>, str }
+impl_eq! { String<'bump>, &'a str }
+impl_eq! { Cow<'a, str>, String<'bump> }
+impl_eq! { core_alloc::string::String, String<'bump> }
+
+impl<'bump> fmt::Display for String<'bump> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+impl<'bump> fmt::Debug for String<'bump> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+impl<'bump> hash::Hash for String<'bump> {
+ #[inline]
+ fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
+ (**self).hash(hasher)
+ }
+}
+
+/// Implements the `+` operator for concatenating two strings.
+///
+/// This consumes the `String<'bump>` on the left-hand side and re-uses its buffer (growing it if
+/// necessary). This is done to avoid allocating a new `String<'bump>` and copying the entire contents on
+/// every operation, which would lead to `O(n^2)` running time when building an `n`-byte string by
+/// repeated concatenation.
+///
+/// The string on the right-hand side is only borrowed; its contents are copied into the returned
+/// `String<'bump>`.
+///
+/// # Examples
+///
+/// Concatenating two `String<'bump>`s takes the first by value and borrows the second:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let bump = Bump::new();
+///
+/// let a = String::from_str_in("hello", &bump);
+/// let b = String::from_str_in(" world", &bump);
+/// let c = a + &b;
+/// // `a` is moved and can no longer be used here.
+/// ```
+///
+/// If you want to keep using the first `String`, you can clone it and append to the clone instead:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let bump = Bump::new();
+///
+/// let a = String::from_str_in("hello", &bump);
+/// let b = String::from_str_in(" world", &bump);
+/// let c = a.clone() + &b;
+/// // `a` is still valid here.
+/// ```
+///
+/// Concatenating `&str` slices can be done by converting the first to a `String`:
+///
+/// ```
+/// use bumpalo::{Bump, collections::String};
+///
+/// let bump = Bump::new();
+///
+/// let a = "hello";
+/// let b = " world";
+/// let c = String::from_str_in(a, &bump) + b;
+/// ```
+impl<'a, 'bump> Add<&'a str> for String<'bump> {
+ type Output = String<'bump>;
+
+ #[inline]
+ fn add(mut self, other: &str) -> String<'bump> {
+ self.push_str(other);
+ self
+ }
+}
+
+/// Implements the `+=` operator for appending to a `String<'bump>`.
+///
+/// This has the same behavior as the [`push_str`][String::push_str] method.
+impl<'a, 'bump> AddAssign<&'a str> for String<'bump> {
+ #[inline]
+ fn add_assign(&mut self, other: &str) {
+ self.push_str(other);
+ }
+}
+
+impl<'bump> ops::Index<ops::Range<usize>> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::Range<usize>) -> &str {
+ &self[..][index]
+ }
+}
+impl<'bump> ops::Index<ops::RangeTo<usize>> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeTo<usize>) -> &str {
+ &self[..][index]
+ }
+}
+impl<'bump> ops::Index<ops::RangeFrom<usize>> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeFrom<usize>) -> &str {
+ &self[..][index]
+ }
+}
+impl<'bump> ops::Index<ops::RangeFull> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, _index: ops::RangeFull) -> &str {
+ unsafe { str::from_utf8_unchecked(&self.vec) }
+ }
+}
+impl<'bump> ops::Index<ops::RangeInclusive<usize>> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeInclusive<usize>) -> &str {
+ Index::index(&**self, index)
+ }
+}
+impl<'bump> ops::Index<ops::RangeToInclusive<usize>> for String<'bump> {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeToInclusive<usize>) -> &str {
+ Index::index(&**self, index)
+ }
+}
+
+impl<'bump> ops::IndexMut<ops::Range<usize>> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+impl<'bump> ops::IndexMut<ops::RangeTo<usize>> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+impl<'bump> ops::IndexMut<ops::RangeFrom<usize>> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+impl<'bump> ops::IndexMut<ops::RangeFull> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
+ unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+ }
+}
+impl<'bump> ops::IndexMut<ops::RangeInclusive<usize>> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut str {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+impl<'bump> ops::IndexMut<ops::RangeToInclusive<usize>> for String<'bump> {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut str {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+
+impl<'bump> ops::Deref for String<'bump> {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ unsafe { str::from_utf8_unchecked(&self.vec) }
+ }
+}
+
+impl<'bump> ops::DerefMut for String<'bump> {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut str {
+ unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+ }
+}
+
+impl<'bump> AsRef<str> for String<'bump> {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self
+ }
+}
+
+impl<'bump> AsRef<[u8]> for String<'bump> {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'bump> fmt::Write for String<'bump> {
+ #[inline]
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ self.push_str(s);
+ Ok(())
+ }
+
+ #[inline]
+ fn write_char(&mut self, c: char) -> fmt::Result {
+ self.push(c);
+ Ok(())
+ }
+}
+
+impl<'bump> Borrow<str> for String<'bump> {
+ #[inline]
+ fn borrow(&self) -> &str {
+ &self[..]
+ }
+}
+
+impl<'bump> BorrowMut<str> for String<'bump> {
+ #[inline]
+ fn borrow_mut(&mut self) -> &mut str {
+ &mut self[..]
+ }
+}
+
+/// A draining iterator for `String`.
+///
+/// This struct is created by the [`String::drain`] method. See its
+/// documentation for more information.
+pub struct Drain<'a, 'bump> {
+ /// Will be used as &'a mut String in the destructor
+ string: *mut String<'bump>,
+ /// Start of part to remove
+ start: usize,
+ /// End of part to remove
+ end: usize,
+ /// Current remaining range to remove
+ iter: Chars<'a>,
+}
+
+impl<'a, 'bump> fmt::Debug for Drain<'a, 'bump> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.pad("Drain { .. }")
+ }
+}
+
+unsafe impl<'a, 'bump> Sync for Drain<'a, 'bump> {}
+unsafe impl<'a, 'bump> Send for Drain<'a, 'bump> {}
+
+impl<'a, 'bump> Drop for Drain<'a, 'bump> {
+ fn drop(&mut self) {
+ unsafe {
+ // Use Vec::drain. "Reaffirm" the bounds checks to avoid
+ // panic code being inserted again.
+ let self_vec = (*self.string).as_mut_vec();
+ if self.start <= self.end && self.end <= self_vec.len() {
+ self_vec.drain(self.start..self.end);
+ }
+ }
+ }
+}
+
+// TODO: implement `AsRef<str/[u8]>` and `as_str`
+
+impl<'a, 'bump> Iterator for Drain<'a, 'bump> {
+ type Item = char;
+
+ #[inline]
+ fn next(&mut self) -> Option<char> {
+ self.iter.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, 'bump> DoubleEndedIterator for Drain<'a, 'bump> {
+ #[inline]
+ fn next_back(&mut self) -> Option<char> {
+ self.iter.next_back()
+ }
+}
+
+impl<'a, 'bump> FusedIterator for Drain<'a, 'bump> {}
diff --git a/third_party/rust/bumpalo/src/collections/vec.rs b/third_party/rust/bumpalo/src/collections/vec.rs
new file mode 100644
index 0000000000..312aa055b9
--- /dev/null
+++ b/third_party/rust/bumpalo/src/collections/vec.rs
@@ -0,0 +1,2614 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A contiguous growable array type with heap-allocated contents, written
+//! [`Vec<'bump, T>`].
+//!
+//! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and
+//! `O(1)` pop (from the end).
+//!
+//! This module is a fork of the [`std::vec`] module, that uses a bump allocator.
+//!
+//! [`std::vec`]: https://doc.rust-lang.org/std/vec/index.html
+//!
+//! # Examples
+//!
+//! You can explicitly create a [`Vec<'bump, T>`] with [`new_in`]:
+//!
+//! ```
+//! use bumpalo::{Bump, collections::Vec};
+//!
+//! let b = Bump::new();
+//! let v: Vec<i32> = Vec::new_in(&b);
+//! ```
+//!
+//! ... or by using the [`vec!`] macro:
+//!
+//! ```
+//! use bumpalo::{Bump, collections::Vec};
+//!
+//! let b = Bump::new();
+//!
+//! let v: Vec<i32> = bumpalo::vec![in &b];
+//!
+//! let v = bumpalo::vec![in &b; 1, 2, 3, 4, 5];
+//!
+//! let v = bumpalo::vec![in &b; 0; 10]; // ten zeroes
+//! ```
+//!
+//! You can [`push`] values onto the end of a vector (which will grow the vector
+//! as needed):
+//!
+//! ```
+//! use bumpalo::{Bump, collections::Vec};
+//!
+//! let b = Bump::new();
+//!
+//! let mut v = bumpalo::vec![in &b; 1, 2];
+//!
+//! v.push(3);
+//! ```
+//!
+//! Popping values works in much the same way:
+//!
+//! ```
+//! use bumpalo::{Bump, collections::Vec};
+//!
+//! let b = Bump::new();
+//!
+//! let mut v = bumpalo::vec![in &b; 1, 2];
+//!
+//! assert_eq!(v.pop(), Some(2));
+//! ```
+//!
+//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits):
+//!
+//! ```
+//! use bumpalo::{Bump, collections::Vec};
+//!
+//! let b = Bump::new();
+//!
+//! let mut v = bumpalo::vec![in &b; 1, 2, 3];
+//! assert_eq!(v[2], 3);
+//! v[1] += 5;
+//! assert_eq!(v, [1, 7, 3]);
+//! ```
+//!
+//! [`Vec<'bump, T>`]: struct.Vec.html
+//! [`new_in`]: struct.Vec.html#method.new_in
+//! [`push`]: struct.Vec.html#method.push
+//! [`Index`]: https://doc.rust-lang.org/std/ops/trait.Index.html
+//! [`IndexMut`]: https://doc.rust-lang.org/std/ops/trait.IndexMut.html
+//! [`vec!`]: ../../macro.vec.html
+
+use super::raw_vec::RawVec;
+use crate::collections::CollectionAllocErr;
+use crate::Bump;
+use core::borrow::{Borrow, BorrowMut};
+use core::cmp::Ordering;
+use core::fmt;
+use core::hash::{self, Hash};
+use core::iter::FusedIterator;
+use core::marker::PhantomData;
+use core::mem;
+use core::ops;
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, IndexMut, RangeBounds};
+use core::ptr;
+use core::ptr::NonNull;
+use core::slice;
+
+unsafe fn arith_offset<T>(p: *const T, offset: isize) -> *const T {
+ p.offset(offset)
+}
+
+fn partition_dedup_by<T, F>(s: &mut [T], mut same_bucket: F) -> (&mut [T], &mut [T])
+where
+ F: FnMut(&mut T, &mut T) -> bool,
+{
+ // Although we have a mutable reference to `s`, we cannot make
+ // *arbitrary* changes. The `same_bucket` calls could panic, so we
+ // must ensure that the slice is in a valid state at all times.
+ //
+ // The way that we handle this is by using swaps; we iterate
+ // over all the elements, swapping as we go so that at the end
+ // the elements we wish to keep are in the front, and those we
+ // wish to reject are at the back. We can then split the slice.
+ // This operation is still O(n).
+ //
+ // Example: We start in this state, where `r` represents "next
+ // read" and `w` represents "next_write`.
+ //
+ // r
+ // +---+---+---+---+---+---+
+ // | 0 | 1 | 1 | 2 | 3 | 3 |
+ // +---+---+---+---+---+---+
+ // w
+ //
+ // Comparing s[r] against s[w-1], this is not a duplicate, so
+ // we swap s[r] and s[w] (no effect as r==w) and then increment both
+ // r and w, leaving us with:
+ //
+ // r
+ // +---+---+---+---+---+---+
+ // | 0 | 1 | 1 | 2 | 3 | 3 |
+ // +---+---+---+---+---+---+
+ // w
+ //
+ // Comparing s[r] against s[w-1], this value is a duplicate,
+ // so we increment `r` but leave everything else unchanged:
+ //
+ // r
+ // +---+---+---+---+---+---+
+ // | 0 | 1 | 1 | 2 | 3 | 3 |
+ // +---+---+---+---+---+---+
+ // w
+ //
+ // Comparing s[r] against s[w-1], this is not a duplicate,
+ // so swap s[r] and s[w] and advance r and w:
+ //
+ // r
+ // +---+---+---+---+---+---+
+ // | 0 | 1 | 2 | 1 | 3 | 3 |
+ // +---+---+---+---+---+---+
+ // w
+ //
+ // Not a duplicate, repeat:
+ //
+ // r
+ // +---+---+---+---+---+---+
+ // | 0 | 1 | 2 | 3 | 1 | 3 |
+ // +---+---+---+---+---+---+
+ // w
+ //
+ // Duplicate, advance r. End of slice. Split at w.
+
+ let len = s.len();
+ if len <= 1 {
+ return (s, &mut []);
+ }
+
+ let ptr = s.as_mut_ptr();
+ let mut next_read: usize = 1;
+ let mut next_write: usize = 1;
+
+ unsafe {
+ // Avoid bounds checks by using raw pointers.
+ while next_read < len {
+ let ptr_read = ptr.add(next_read);
+ let prev_ptr_write = ptr.add(next_write - 1);
+ if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
+ if next_read != next_write {
+ let ptr_write = prev_ptr_write.offset(1);
+ mem::swap(&mut *ptr_read, &mut *ptr_write);
+ }
+ next_write += 1;
+ }
+ next_read += 1;
+ }
+ }
+
+ s.split_at_mut(next_write)
+}
+
+unsafe fn offset_from<T>(p: *const T, origin: *const T) -> isize
+where
+ T: Sized,
+{
+ let pointee_size = mem::size_of::<T>();
+ assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize);
+
+ // This is the same sequence that Clang emits for pointer subtraction.
+ // It can be neither `nsw` nor `nuw` because the input is treated as
+ // unsigned but then the output is treated as signed, so neither works.
+ let d = isize::wrapping_sub(p as _, origin as _);
+ d / (pointee_size as isize)
+}
+
+/// Creates a [`Vec`] containing the arguments.
+///
+/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions.
+/// There are two forms of this macro:
+///
+/// - Create a [`Vec`] containing a given list of elements:
+///
+/// ```
+/// use bumpalo::Bump;
+///
+/// let b = Bump::new();
+/// let v = bumpalo::vec![in &b; 1, 2, 3];
+/// assert_eq!(v, [1, 2, 3]);
+/// ```
+///
+/// - Create a [`Vec`] from a given element and size:
+///
+/// ```
+/// use bumpalo::Bump;
+///
+/// let b = Bump::new();
+/// let v = bumpalo::vec![in &b; 1; 3];
+/// assert_eq!(v, [1, 1, 1]);
+/// ```
+///
+/// Note that unlike array expressions, this syntax supports all elements
+/// which implement [`Clone`] and the number of elements doesn't have to be
+/// a constant.
+///
+/// This will use `clone` to duplicate an expression, so one should be careful
+/// using this with types having a non-standard `Clone` implementation. For
+/// example, `bumpalo::vec![in &bump; Rc::new(1); 5]` will create a vector of five references
+/// to the same boxed integer value, not five references pointing to independently
+/// boxed integers.
+///
+/// [`Vec`]: collections/vec/struct.Vec.html
+/// [`Clone`]: https://doc.rust-lang.org/std/clone/trait.Clone.html
+#[macro_export]
+macro_rules! vec {
+ (in $bump:expr; $elem:expr; $n:expr) => {{
+ let n = $n;
+ let mut v = $crate::collections::Vec::with_capacity_in(n, $bump);
+ if n > 0 {
+ let elem = $elem;
+ for _ in 0..n - 1 {
+ v.push(elem.clone());
+ }
+ v.push(elem);
+ }
+ v
+ }};
+ (in $bump:expr) => { $crate::collections::Vec::new_in($bump) };
+ (in $bump:expr; $($x:expr),*) => {{
+ let mut v = $crate::collections::Vec::new_in($bump);
+ $( v.push($x); )*
+ v
+ }};
+ (in $bump:expr; $($x:expr,)*) => (bumpalo::vec![in $bump; $($x),*])
+}
+
+/// A contiguous growable array type, written `Vec<'bump, T>` but pronounced 'vector'.
+///
+/// # Examples
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let mut vec = Vec::new_in(&b);
+/// vec.push(1);
+/// vec.push(2);
+///
+/// assert_eq!(vec.len(), 2);
+/// assert_eq!(vec[0], 1);
+///
+/// assert_eq!(vec.pop(), Some(2));
+/// assert_eq!(vec.len(), 1);
+///
+/// vec[0] = 7;
+/// assert_eq!(vec[0], 7);
+///
+/// vec.extend([1, 2, 3].iter().cloned());
+///
+/// for x in &vec {
+/// println!("{}", x);
+/// }
+/// assert_eq!(vec, [7, 1, 2, 3]);
+/// ```
+///
+/// The [`vec!`] macro is provided to make initialization more convenient:
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+/// vec.push(4);
+/// assert_eq!(vec, [1, 2, 3, 4]);
+/// ```
+///
+/// It can also initialize each element of a `Vec<'bump, T>` with a given value.
+/// This may be more efficient than performing allocation and initialization
+/// in separate steps, especially when initializing a vector of zeros:
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let vec = bumpalo::vec![in &b; 0; 5];
+/// assert_eq!(vec, [0, 0, 0, 0, 0]);
+///
+/// // The following is equivalent, but potentially slower:
+/// let mut vec1 = Vec::with_capacity_in(5, &b);
+/// vec1.resize(5, 0);
+/// ```
+///
+/// Use a `Vec<'bump, T>` as an efficient stack:
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let mut stack = Vec::new_in(&b);
+///
+/// stack.push(1);
+/// stack.push(2);
+/// stack.push(3);
+///
+/// while let Some(top) = stack.pop() {
+/// // Prints 3, 2, 1
+/// println!("{}", top);
+/// }
+/// ```
+///
+/// # Indexing
+///
+/// The `Vec` type allows to access values by index, because it implements the
+/// [`Index`] trait. An example will be more explicit:
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let v = bumpalo::vec![in &b; 0, 2, 4, 6];
+/// println!("{}", v[1]); // it will display '2'
+/// ```
+///
+/// However be careful: if you try to access an index which isn't in the `Vec`,
+/// your software will panic! You cannot do this:
+///
+/// ```should_panic
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// let b = Bump::new();
+///
+/// let v = bumpalo::vec![in &b; 0, 2, 4, 6];
+/// println!("{}", v[6]); // it will panic!
+/// ```
+///
+/// In conclusion: always check if the index you want to get really exists
+/// before doing it.
+///
+/// # Slicing
+///
+/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects.
+/// To get a slice, use `&`. Example:
+///
+/// ```
+/// use bumpalo::{Bump, collections::Vec};
+///
+/// fn read_slice(slice: &[usize]) {
+/// // ...
+/// }
+///
+/// let b = Bump::new();
+///
+/// let v = bumpalo::vec![in &b; 0, 1];
+/// read_slice(&v);
+///
+/// // ... and that's all!
+/// // you can also do it like this:
+/// let x : &[usize] = &v;
+/// ```
+///
+/// In Rust, it's more common to pass slices as arguments rather than vectors
+/// when you just want to provide a read access. The same goes for [`String`] and
+/// [`&str`].
+///
+/// # Capacity and reallocation
+///
+/// The capacity of a vector is the amount of space allocated for any future
+/// elements that will be added onto the vector. This is not to be confused with
+/// the *length* of a vector, which specifies the number of actual elements
+/// within the vector. If a vector's length exceeds its capacity, its capacity
+/// will automatically be increased, but its elements will have to be
+/// reallocated.
+///
+/// For example, a vector with capacity 10 and length 0 would be an empty vector
+/// with space for 10 more elements. Pushing 10 or fewer elements onto the
+/// vector will not change its capacity or cause reallocation to occur. However,
+/// if the vector's length is increased to 11, it will have to reallocate, which
+/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity_in`]
+/// whenever possible to specify how big the vector is expected to get.
+///
+/// # Guarantees
+///
+/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees
+/// about its design. This ensures that it's as low-overhead as possible in
+/// the general case, and can be correctly manipulated in primitive ways
+/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<'bump, T>`.
+/// If additional type parameters are added (e.g. to support custom allocators),
+/// overriding their defaults may change the behavior.
+///
+/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length)
+/// triplet. No more, no less. The order of these fields is completely
+/// unspecified, and you should use the appropriate methods to modify these.
+/// The pointer will never be null, so this type is null-pointer-optimized.
+///
+/// However, the pointer may not actually point to allocated memory. In particular,
+/// if you construct a `Vec` with capacity 0 via [`Vec::new_in`], [`bumpalo::vec![in bump]`][`vec!`],
+/// [`Vec::with_capacity_in(0)`][`Vec::with_capacity_in`], or by calling [`shrink_to_fit`]
+/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
+/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
+/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
+/// if <code>[`mem::size_of::<T>`]\() * capacity() > 0</code>. In general, `Vec`'s allocation
+/// details are very subtle &mdash; if you intend to allocate memory using a `Vec`
+/// and use it for something else (either to pass to unsafe code, or to build your
+/// own memory-backed collection), be sure to deallocate this memory by using
+/// `from_raw_parts` to recover the `Vec` and then dropping it.
+///
+/// If a `Vec` *has* allocated memory, then the memory it points to is
+/// in the [`Bump`] arena used to construct it, and its
+/// pointer points to [`len`] initialized, contiguous elements in order (what
+/// you would see if you coerced it to a slice), followed by <code>[`capacity`] -
+/// [`len`]</code> logically uninitialized, contiguous elements.
+///
+/// `Vec` will never perform a "small optimization" where elements are actually
+/// stored on the stack for two reasons:
+///
+/// * It would make it more difficult for unsafe code to correctly manipulate
+/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were
+/// only moved, and it would be more difficult to determine if a `Vec` had
+/// actually allocated memory.
+///
+/// * It would penalize the general case, incurring an additional branch
+/// on every access.
+///
+/// `Vec` will never automatically shrink itself, even if completely empty. This
+/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
+/// and then filling it back up to the same [`len`] should incur no calls to
+/// the allocator. If you wish to free up unused memory, use
+/// [`shrink_to_fit`][`shrink_to_fit`].
+///
+/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
+/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
+/// <code>[`len`] == [`capacity`]</code>. That is, the reported capacity is completely
+/// accurate, and can be relied on. It can even be used to manually free the memory
+/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
+/// when not necessary.
+///
+/// `Vec` does not guarantee any particular growth strategy when reallocating
+/// when full, nor when [`reserve`] is called. The current strategy is basic
+/// and it may prove desirable to use a non-constant growth factor. Whatever
+/// strategy is used will of course guarantee `O(1)` amortized [`push`].
+///
+/// `bumpalo::vec![in bump; x; n]`, `bumpalo::vec![in bump; a, b, c, d]`, and
+/// [`Vec::with_capacity_in(n)`][`Vec::with_capacity_in`], will all produce a
+/// `Vec` with exactly the requested capacity. If <code>[`len`] == [`capacity`]</code>, (as
+/// is the case for the [`vec!`] macro), then a `Vec<'bump, T>` can be converted
+/// to and from a [`Box<[T]>`][owned slice] without reallocating or moving the
+/// elements.
+///
+/// `Vec` will not specifically overwrite any data that is removed from it,
+/// but also won't specifically preserve it. Its uninitialized memory is
+/// scratch space that it may use however it wants. It will generally just do
+/// whatever is most efficient or otherwise easy to implement. Do not rely on
+/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
+/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
+/// first, that may not actually happen because the optimizer does not consider
+/// this a side-effect that must be preserved. There is one case which we will
+/// not break, however: using `unsafe` code to write to the excess capacity,
+/// and then increasing the length to match, is always valid.
+///
+/// `Vec` does not currently guarantee the order in which elements are dropped.
+/// The order has changed in the past and may change again.
+///
+/// [`vec!`]: ../../macro.vec.html
+/// [`Index`]: https://doc.rust-lang.org/std/ops/trait.Index.html
+/// [`String`]: ../string/struct.String.html
+/// [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
+/// [`Vec::with_capacity_in`]: struct.Vec.html#method.with_capacity_in
+/// [`Vec::new_in`]: struct.Vec.html#method.new_in
+/// [`shrink_to_fit`]: struct.Vec.html#method.shrink_to_fit
+/// [`capacity`]: struct.Vec.html#method.capacity
+/// [`mem::size_of::<T>`]: https://doc.rust-lang.org/std/mem/fn.size_of.html
+/// [`len`]: struct.Vec.html#method.len
+/// [`push`]: struct.Vec.html#method.push
+/// [`insert`]: struct.Vec.html#method.insert
+/// [`reserve`]: struct.Vec.html#method.reserve
+/// [owned slice]: https://doc.rust-lang.org/std/boxed/struct.Box.html
+pub struct Vec<'bump, T: 'bump> {
+ buf: RawVec<'bump, T>,
+ len: usize,
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Inherent methods
+////////////////////////////////////////////////////////////////////////////////
+
+impl<'bump, T: 'bump> Vec<'bump, T> {
+ /// Constructs a new, empty `Vec<'bump, T>`.
+ ///
+ /// The vector will not allocate until elements are pushed onto it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let mut vec: Vec<i32> = Vec::new_in(&b);
+ /// ```
+ #[inline]
+ pub fn new_in(bump: &'bump Bump) -> Vec<'bump, T> {
+ Vec {
+ buf: RawVec::new_in(bump),
+ len: 0,
+ }
+ }
+
+ /// Constructs a new, empty `Vec<'bump, T>` with the specified capacity.
+ ///
+ /// The vector will be able to hold exactly `capacity` elements without
+ /// reallocating. If `capacity` is 0, the vector will not allocate.
+ ///
+ /// It is important to note that although the returned vector has the
+ /// *capacity* specified, the vector will have a zero *length*. For an
+ /// explanation of the difference between length and capacity, see
+ /// *[Capacity and reallocation]*.
+ ///
+ /// [Capacity and reallocation]: #capacity-and-reallocation
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = Vec::with_capacity_in(10, &b);
+ ///
+ /// // The vector contains no items, even though it has capacity for more
+ /// assert_eq!(vec.len(), 0);
+ ///
+ /// // These are all done without reallocating...
+ /// for i in 0..10 {
+ /// vec.push(i);
+ /// }
+ ///
+ /// // ...but this may make the vector reallocate
+ /// vec.push(11);
+ /// ```
+ #[inline]
+ pub fn with_capacity_in(capacity: usize, bump: &'bump Bump) -> Vec<'bump, T> {
+ Vec {
+ buf: RawVec::with_capacity_in(capacity, bump),
+ len: 0,
+ }
+ }
+
+ /// Construct a new `Vec` from the given iterator's items.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ /// use std::iter;
+ ///
+ /// let b = Bump::new();
+ /// let v = Vec::from_iter_in(iter::repeat(7).take(3), &b);
+ /// assert_eq!(v, [7, 7, 7]);
+ /// ```
+ pub fn from_iter_in<I: IntoIterator<Item = T>>(iter: I, bump: &'bump Bump) -> Vec<'bump, T> {
+ let mut v = Vec::new_in(bump);
+ v.extend(iter);
+ v
+ }
+
+ /// Creates a `Vec<'bump, T>` directly from the raw components of another vector.
+ ///
+ /// # Safety
+ ///
+ /// This is highly unsafe, due to the number of invariants that aren't
+ /// checked:
+ ///
+ /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<'bump, T>`
+ /// (at least, it's highly likely to be incorrect if it wasn't).
+ /// * `ptr`'s `T` needs to have the same size and alignment as it was allocated with.
+ /// * `length` needs to be less than or equal to `capacity`.
+ /// * `capacity` needs to be the capacity that the pointer was allocated with.
+ ///
+ /// Violating these may cause problems like corrupting the allocator's
+ /// internal data structures. For example it is **not** safe
+ /// to build a `Vec<u8>` from a pointer to a C `char` array and a `size_t`.
+ ///
+ /// The ownership of `ptr` is effectively transferred to the
+ /// `Vec<'bump, T>` which may then deallocate, reallocate or change the
+ /// contents of memory pointed to by the pointer at will. Ensure
+ /// that nothing else uses the pointer after calling this
+ /// function.
+ ///
+ /// [`String`]: ../string/struct.String.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// use std::ptr;
+ /// use std::mem;
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; 1, 2, 3];
+ ///
+ /// // Pull out the various important pieces of information about `v`
+ /// let p = v.as_mut_ptr();
+ /// let len = v.len();
+ /// let cap = v.capacity();
+ ///
+ /// unsafe {
+ /// // Cast `v` into the void: no destructor run, so we are in
+ /// // complete control of the allocation to which `p` points.
+ /// mem::forget(v);
+ ///
+ /// // Overwrite memory with 4, 5, 6
+ /// for i in 0..len as isize {
+ /// ptr::write(p.offset(i), 4 + i);
+ /// }
+ ///
+ /// // Put everything back together into a Vec
+ /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, &b);
+ /// assert_eq!(rebuilt, [4, 5, 6]);
+ /// }
+ /// ```
+ pub unsafe fn from_raw_parts_in(
+ ptr: *mut T,
+ length: usize,
+ capacity: usize,
+ bump: &'bump Bump,
+ ) -> Vec<'bump, T> {
+ Vec {
+ buf: RawVec::from_raw_parts_in(ptr, capacity, bump),
+ len: length,
+ }
+ }
+
+ /// Returns a shared reference to the allocator backing this `Vec`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// // uses the same allocator as the provided `Vec`
+ /// fn add_strings<'bump>(vec: &mut Vec<'bump, &'bump str>) {
+ /// for string in ["foo", "bar", "baz"] {
+ /// vec.push(vec.bump().alloc_str(string));
+ /// }
+ /// }
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn bump(&self) -> &'bump Bump {
+ self.buf.bump()
+ }
+
+ /// Returns the number of elements the vector can hold without
+ /// reallocating.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let vec: Vec<i32> = Vec::with_capacity_in(10, &b);
+ /// assert_eq!(vec.capacity(), 10);
+ /// ```
+ #[inline]
+ pub fn capacity(&self) -> usize {
+ self.buf.cap()
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted
+ /// in the given `Vec<'bump, T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.reserve(10);
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ pub fn reserve(&mut self, additional: usize) {
+ self.buf.reserve(self.len, additional);
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `Vec<'bump, T>`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.reserve_exact(10);
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.buf.reserve_exact(self.len, additional);
+ }
+
+ /// Attempts to reserve capacity for at least `additional` more elements to be inserted
+ /// in the given `Vec<'bump, T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `try_reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.try_reserve(10).unwrap();
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ self.buf.try_reserve(self.len, additional)
+ }
+
+ /// Attempts to reserve the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `Vec<'bump, T>`. After calling `try_reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore capacity can not be relied upon to be precisely
+ /// minimal. Prefer `try_reserve` if future insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.try_reserve_exact(10).unwrap();
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ self.buf.try_reserve_exact(self.len, additional)
+ }
+
+ /// Shrinks the capacity of the vector as much as possible.
+ ///
+ /// It will drop down as close as possible to the length but the allocator
+ /// may still inform the vector that there is space for a few more elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = Vec::with_capacity_in(10, &b);
+ /// vec.extend([1, 2, 3].iter().cloned());
+ /// assert_eq!(vec.capacity(), 10);
+ /// vec.shrink_to_fit();
+ /// assert!(vec.capacity() >= 3);
+ /// ```
+ pub fn shrink_to_fit(&mut self) {
+ if self.capacity() != self.len {
+ self.buf.shrink_to_fit(self.len);
+ }
+ }
+
+ /// Converts the vector into `&'bump [T]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let v = bumpalo::vec![in &b; 1, 2, 3];
+ ///
+ /// let slice = v.into_bump_slice();
+ /// assert_eq!(slice, [1, 2, 3]);
+ /// ```
+ pub fn into_bump_slice(self) -> &'bump [T] {
+ unsafe {
+ let ptr = self.as_ptr();
+ let len = self.len();
+ mem::forget(self);
+ slice::from_raw_parts(ptr, len)
+ }
+ }
+
+ /// Converts the vector into `&'bump mut [T]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ /// let v = bumpalo::vec![in &b; 1, 2, 3];
+ ///
+ /// let mut slice = v.into_bump_slice_mut();
+ ///
+ /// slice[0] = 3;
+ /// slice[2] = 1;
+ ///
+ /// assert_eq!(slice, [3, 2, 1]);
+ /// ```
+ pub fn into_bump_slice_mut(mut self) -> &'bump mut [T] {
+ let ptr = self.as_mut_ptr();
+ let len = self.len();
+ mem::forget(self);
+
+ unsafe { slice::from_raw_parts_mut(ptr, len) }
+ }
+
+ /// Shortens the vector, keeping the first `len` elements and dropping
+ /// the rest.
+ ///
+ /// If `len` is greater than the vector's current length, this has no
+ /// effect.
+ ///
+ /// The [`drain`] method can emulate `truncate`, but causes the excess
+ /// elements to be returned instead of dropped.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// Truncating a five element vector to two elements:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4, 5];
+ /// vec.truncate(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// No truncation occurs when `len` is greater than the vector's current
+ /// length:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// vec.truncate(8);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
+ ///
+ /// Truncating when `len == 0` is equivalent to calling the [`clear`]
+ /// method.
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// vec.truncate(0);
+ /// assert_eq!(vec, []);
+ /// ```
+ ///
+ /// [`clear`]: #method.clear
+ /// [`drain`]: #method.drain
+ pub fn truncate(&mut self, len: usize) {
+ let current_len = self.len;
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(self.len);
+ // Set the final length at the end, keeping in mind that
+ // dropping an element might panic. Works around a missed
+ // optimization, as seen in the following issue:
+ // https://github.com/rust-lang/rust/issues/51802
+ let mut local_len = SetLenOnDrop::new(&mut self.len);
+
+ // drop any extra elements
+ for _ in len..current_len {
+ local_len.decrement_len(1);
+ ptr = ptr.offset(-1);
+ ptr::drop_in_place(ptr);
+ }
+ }
+ }
+
+ /// Extracts a slice containing the entire vector.
+ ///
+ /// Equivalent to `&s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ /// use std::io::{self, Write};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let buffer = bumpalo::vec![in &b; 1, 2, 3, 5, 8];
+ /// io::sink().write(buffer.as_slice()).unwrap();
+ /// ```
+ #[inline]
+ pub fn as_slice(&self) -> &[T] {
+ self
+ }
+
+ /// Extracts a mutable slice of the entire vector.
+ ///
+ /// Equivalent to `&mut s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ /// use std::io::{self, Read};
+ ///
+ /// let b = Bump::new();
+ /// let mut buffer = bumpalo::vec![in &b; 0; 3];
+ /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
+ /// ```
+ #[inline]
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ self
+ }
+
+ /// Returns a raw pointer to the vector's buffer, or a dangling raw pointer
+ /// valid for zero sized reads if the vector didn't allocate.
+ ///
+ /// The caller must ensure that the vector outlives the pointer this
+ /// function returns, or else it will end up pointing to garbage.
+ /// Modifying the vector may cause its buffer to be reallocated,
+ /// which would also make any pointers to it invalid.
+ ///
+ /// The caller must also ensure that the memory the pointer (non-transitively) points to
+ /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
+ /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let bump = Bump::new();
+ ///
+ /// let x = bumpalo::vec![in &bump; 1, 2, 4];
+ /// let x_ptr = x.as_ptr();
+ ///
+ /// unsafe {
+ /// for i in 0..x.len() {
+ /// assert_eq!(*x_ptr.add(i), 1 << i);
+ /// }
+ /// }
+ /// ```
+ ///
+ /// [`as_mut_ptr`]: Vec::as_mut_ptr
+ #[inline]
+ pub fn as_ptr(&self) -> *const T {
+ // We shadow the slice method of the same name to avoid going through
+ // `deref`, which creates an intermediate reference.
+ let ptr = self.buf.ptr();
+ unsafe {
+ if ptr.is_null() {
+ core::hint::unreachable_unchecked();
+ }
+ }
+ ptr
+ }
+
+ /// Returns an unsafe mutable pointer to the vector's buffer, or a dangling
+ /// raw pointer valid for zero sized reads if the vector didn't allocate.
+ ///
+ /// The caller must ensure that the vector outlives the pointer this
+ /// function returns, or else it will end up pointing to garbage.
+ /// Modifying the vector may cause its buffer to be reallocated,
+ /// which would also make any pointers to it invalid.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let bump = Bump::new();
+ ///
+ /// // Allocate vector big enough for 4 elements.
+ /// let size = 4;
+ /// let mut x: Vec<i32> = Vec::with_capacity_in(size, &bump);
+ /// let x_ptr = x.as_mut_ptr();
+ ///
+ /// // Initialize elements via raw pointer writes, then set length.
+ /// unsafe {
+ /// for i in 0..size {
+ /// x_ptr.add(i).write(i as i32);
+ /// }
+ /// x.set_len(size);
+ /// }
+ /// assert_eq!(&*x, &[0, 1, 2, 3]);
+ /// ```
+ #[inline]
+ pub fn as_mut_ptr(&mut self) -> *mut T {
+ // We shadow the slice method of the same name to avoid going through
+ // `deref_mut`, which creates an intermediate reference.
+ let ptr = self.buf.ptr();
+ unsafe {
+ if ptr.is_null() {
+ core::hint::unreachable_unchecked();
+ }
+ }
+ ptr
+ }
+
+ /// Sets the length of a vector.
+ ///
+ /// This will explicitly set the size of the vector, without actually
+ /// modifying its buffers, so it is up to the caller to ensure that the
+ /// vector is actually the specified size.
+ ///
+ /// # Safety
+ ///
+ /// - `new_len` must be less than or equal to [`capacity()`].
+ /// - The elements at `old_len..new_len` must be initialized.
+ ///
+ /// [`capacity()`]: struct.Vec.html#method.capacity
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// use std::ptr;
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 'r', 'u', 's', 't'];
+ ///
+ /// unsafe {
+ /// ptr::drop_in_place(&mut vec[3]);
+ /// vec.set_len(3);
+ /// }
+ /// assert_eq!(vec, ['r', 'u', 's']);
+ /// ```
+ ///
+ /// In this example, there is a memory leak since the memory locations
+ /// owned by the inner vectors were not freed prior to the `set_len` call:
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b;
+ /// bumpalo::vec![in &b; 1, 0, 0],
+ /// bumpalo::vec![in &b; 0, 1, 0],
+ /// bumpalo::vec![in &b; 0, 0, 1]];
+ /// unsafe {
+ /// vec.set_len(0);
+ /// }
+ /// ```
+ ///
+ /// In this example, the vector gets expanded from zero to four items
+ /// but we directly initialize uninitialized memory:
+ ///
+ // TODO: rely upon `spare_capacity_mut`
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let len = 4;
+ /// let b = Bump::new();
+ ///
+ /// let mut vec: Vec<u8> = Vec::with_capacity_in(len, &b);
+ ///
+ /// for i in 0..len {
+ /// // SAFETY: we initialize memory via `pointer::write`
+ /// unsafe { vec.as_mut_ptr().add(i).write(b'a') }
+ /// }
+ ///
+ /// unsafe {
+ /// vec.set_len(len);
+ /// }
+ ///
+ /// assert_eq!(b"aaaa", &*vec);
+ /// ```
+ #[inline]
+ pub unsafe fn set_len(&mut self, new_len: usize) {
+ self.len = new_len;
+ }
+
+ /// Removes an element from the vector and returns it.
+ ///
+ /// The removed element is replaced by the last element of the vector.
+ ///
+ /// This does not preserve ordering, but is O(1).
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; "foo", "bar", "baz", "qux"];
+ ///
+ /// assert_eq!(v.swap_remove(1), "bar");
+ /// assert_eq!(v, ["foo", "qux", "baz"]);
+ ///
+ /// assert_eq!(v.swap_remove(0), "foo");
+ /// assert_eq!(v, ["baz", "qux"]);
+ /// ```
+ #[inline]
+ pub fn swap_remove(&mut self, index: usize) -> T {
+ unsafe {
+ // We replace self[index] with the last element. Note that if the
+ // bounds check on hole succeeds there must be a last element (which
+ // can be self[index] itself).
+ let hole: *mut T = &mut self[index];
+ let last = ptr::read(self.get_unchecked(self.len - 1));
+ self.len -= 1;
+ ptr::replace(hole, last)
+ }
+ }
+
+ /// Inserts an element at position `index` within the vector, shifting all
+ /// elements after it to the right.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// vec.insert(1, 4);
+ /// assert_eq!(vec, [1, 4, 2, 3]);
+ /// vec.insert(4, 5);
+ /// assert_eq!(vec, [1, 4, 2, 3, 5]);
+ /// ```
+ pub fn insert(&mut self, index: usize, element: T) {
+ let len = self.len();
+ assert!(index <= len);
+
+ // space for the new element
+ if len == self.buf.cap() {
+ self.reserve(1);
+ }
+
+ unsafe {
+ // infallible
+ // The spot to put the new value
+ {
+ let p = self.as_mut_ptr().add(index);
+ // Shift everything over to make space. (Duplicating the
+ // `index`th element into two consecutive places.)
+ ptr::copy(p, p.offset(1), len - index);
+ // Write it in, overwriting the first copy of the `index`th
+ // element.
+ ptr::write(p, element);
+ }
+ self.set_len(len + 1);
+ }
+ }
+
+ /// Removes and returns the element at position `index` within the vector,
+ /// shifting all elements after it to the left.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; 1, 2, 3];
+ /// assert_eq!(v.remove(1), 2);
+ /// assert_eq!(v, [1, 3]);
+ /// ```
+ pub fn remove(&mut self, index: usize) -> T {
+ let len = self.len();
+ assert!(index < len);
+ unsafe {
+ // infallible
+ let ret;
+ {
+ // the place we are taking from.
+ let ptr = self.as_mut_ptr().add(index);
+ // copy it out, unsafely having a copy of the value on
+ // the stack and in the vector at the same time.
+ ret = ptr::read(ptr);
+
+ // Shift everything down to fill in that spot.
+ ptr::copy(ptr.offset(1), ptr, len - index - 1);
+ }
+ self.set_len(len - 1);
+ ret
+ }
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+ /// This method operates in place and preserves the order of the retained
+ /// elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4];
+ /// vec.retain(|&x| x % 2 == 0);
+ /// assert_eq!(vec, [2, 4]);
+ /// ```
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ self.drain_filter(|x| !f(x));
+ }
+
+ /// Creates an iterator that removes the elements in the vector
+ /// for which the predicate returns `true` and yields the removed items.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::Bump;
+ /// use bumpalo::collections::{CollectIn, Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut numbers = bumpalo::vec![in &b; 1, 2, 3, 4, 5];
+ ///
+ /// let evens: Vec<_> = numbers.drain_filter(|x| *x % 2 == 0).collect_in(&b);
+ ///
+ /// assert_eq!(numbers, &[1, 3, 5]);
+ /// assert_eq!(evens, &[2, 4]);
+ /// ```
+ pub fn drain_filter<'a, F>(&'a mut self, filter: F) -> DrainFilter<'a, 'bump, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ let old_len = self.len();
+
+ // Guard against us getting leaked (leak amplification)
+ unsafe {
+ self.set_len(0);
+ }
+
+ DrainFilter {
+ vec: self,
+ idx: 0,
+ del: 0,
+ old_len,
+ pred: filter,
+ }
+ }
+
+ /// Removes all but the first of consecutive elements in the vector that resolve to the same
+ /// key.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 10, 20, 21, 30, 20];
+ ///
+ /// vec.dedup_by_key(|i| *i / 10);
+ ///
+ /// assert_eq!(vec, [10, 20, 30, 20]);
+ /// ```
+ #[inline]
+ pub fn dedup_by_key<F, K>(&mut self, mut key: F)
+ where
+ F: FnMut(&mut T) -> K,
+ K: PartialEq,
+ {
+ self.dedup_by(|a, b| key(a) == key(b))
+ }
+
+ /// Removes all but the first of consecutive elements in the vector satisfying a given equality
+ /// relation.
+ ///
+ /// The `same_bucket` function is passed references to two elements from the vector and
+ /// must determine if the elements compare equal. The elements are passed in opposite order
+ /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; "foo", "bar", "Bar", "baz", "bar"];
+ ///
+ /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
+ ///
+ /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
+ /// ```
+ pub fn dedup_by<F>(&mut self, same_bucket: F)
+ where
+ F: FnMut(&mut T, &mut T) -> bool,
+ {
+ let len = {
+ let (dedup, _) = partition_dedup_by(self.as_mut_slice(), same_bucket);
+ dedup.len()
+ };
+ self.truncate(len);
+ }
+
+ /// Appends an element to the back of a vector.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the number of elements in the vector overflows a `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2];
+ /// vec.push(3);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
+ #[inline]
+ pub fn push(&mut self, value: T) {
+ // This will panic or abort if we would allocate > isize::MAX bytes
+ // or if the length increment would overflow for zero-sized types.
+ if self.len == self.buf.cap() {
+ self.reserve(1);
+ }
+ unsafe {
+ let end = self.buf.ptr().add(self.len);
+ ptr::write(end, value);
+ self.len += 1;
+ }
+ }
+
+ /// Removes the last element from a vector and returns it, or [`None`] if it
+ /// is empty.
+ ///
+ /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// assert_eq!(vec.pop(), Some(3));
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ #[inline]
+ pub fn pop(&mut self) -> Option<T> {
+ if self.len == 0 {
+ None
+ } else {
+ unsafe {
+ self.len -= 1;
+ Some(ptr::read(self.as_ptr().add(self.len())))
+ }
+ }
+ }
+
+ /// Moves all the elements of `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the number of elements in the vector overflows a `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// let mut vec2 = bumpalo::vec![in &b; 4, 5, 6];
+ /// vec.append(&mut vec2);
+ /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
+ /// assert_eq!(vec2, []);
+ /// ```
+ #[inline]
+ pub fn append(&mut self, other: &mut Self) {
+ unsafe {
+ self.append_elements(other.as_slice() as _);
+ other.set_len(0);
+ }
+ }
+
+ /// Appends elements to `Self` from other buffer.
+ #[inline]
+ unsafe fn append_elements(&mut self, other: *const [T]) {
+ let count = (*other).len();
+ self.reserve(count);
+ let len = self.len();
+ ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count);
+ self.len += count;
+ }
+
+ /// Creates a draining iterator that removes the specified range in the vector
+ /// and yields the removed items.
+ ///
+ /// Note 1: The element range is removed even if the iterator is only
+ /// partially consumed or not consumed at all.
+ ///
+ /// Note 2: It is unspecified how many elements are removed from the vector
+ /// if the `Drain` value is leaked.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::Bump;
+ /// use bumpalo::collections::{CollectIn, Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; 1, 2, 3];
+ ///
+ /// let u: Vec<_> = v.drain(1..).collect_in(&b);
+ ///
+ /// assert_eq!(v, &[1]);
+ /// assert_eq!(u, &[2, 3]);
+ ///
+ /// // A full range clears the vector
+ /// v.drain(..);
+ /// assert_eq!(v, &[]);
+ /// ```
+ pub fn drain<R>(&mut self, range: R) -> Drain<T>
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // When the Drain is first created, it shortens the length of
+ // the source vector to make sure no uninitialized or moved-from elements
+ // are accessible at all if the Drain's destructor never gets to run.
+ //
+ // Drain will ptr::read out the values to remove.
+ // When finished, remaining tail of the vec is copied back to cover
+ // the hole, and the vector length is restored to the new length.
+ //
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+ assert!(start <= end);
+ assert!(end <= len);
+
+ unsafe {
+ // set self.vec length's to start, to be safe in case Drain is leaked
+ self.set_len(start);
+ // Use the borrow in the IterMut to indicate borrowing behavior of the
+ // whole Drain iterator (like &mut T).
+ let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
+ Drain {
+ tail_start: end,
+ tail_len: len - end,
+ iter: range_slice.iter(),
+ vec: NonNull::from(self),
+ }
+ }
+ }
+
+ /// Clears the vector, removing all values.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; 1, 2, 3];
+ ///
+ /// v.clear();
+ ///
+ /// assert!(v.is_empty());
+ /// ```
+ #[inline]
+ pub fn clear(&mut self) {
+ self.truncate(0)
+ }
+
+ /// Returns the number of elements in the vector, also referred to
+ /// as its 'length'.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let a = bumpalo::vec![in &b; 1, 2, 3];
+ /// assert_eq!(a.len(), 3);
+ /// ```
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.len
+ }
+
+ /// Returns `true` if the vector contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = Vec::new_in(&b);
+ /// assert!(v.is_empty());
+ ///
+ /// v.push(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Splits the collection into two at the given index.
+ ///
+ /// Returns a newly allocated vector. `self` contains elements `[0, at)`,
+ /// and the returned vector contains elements `[at, len)`.
+ ///
+ /// Note that the capacity of `self` does not change.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];
+ /// let vec2 = vec.split_off(1);
+ /// assert_eq!(vec, [1]);
+ /// assert_eq!(vec2, [2, 3]);
+ /// ```
+ #[inline]
+ pub fn split_off(&mut self, at: usize) -> Self {
+ assert!(at <= self.len(), "`at` out of bounds");
+
+ let other_len = self.len - at;
+ let mut other = Vec::with_capacity_in(other_len, self.buf.bump());
+
+ // Unsafely `set_len` and copy items to `other`.
+ unsafe {
+ self.set_len(at);
+ other.set_len(other_len);
+
+ ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
+ }
+ other
+ }
+}
+
+#[cfg(feature = "boxed")]
+impl<'bump, T> Vec<'bump, T> {
+ /// Converts the vector into [`Box<[T]>`][owned slice].
+ ///
+ /// Note that this will drop any excess capacity.
+ ///
+ /// [owned slice]: ../../boxed/struct.Box.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec, vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let v = vec![in &b; 1, 2, 3];
+ ///
+ /// let slice = v.into_boxed_slice();
+ /// ```
+ pub fn into_boxed_slice(mut self) -> crate::boxed::Box<'bump, [T]> {
+ use crate::boxed::Box;
+
+ // Unlike `alloc::vec::Vec` shrinking here isn't necessary as `bumpalo::boxed::Box` doesn't own memory.
+ unsafe {
+ let slice = slice::from_raw_parts_mut(self.as_mut_ptr(), self.len);
+ let output: Box<'bump, [T]> = Box::from_raw(slice);
+ mem::forget(self);
+ output
+ }
+ }
+}
+
+impl<'bump, T: 'bump + Clone> Vec<'bump, T> {
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with `value`.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method requires [`Clone`] to be able clone the passed value. If
+ /// you need more flexibility (or want to rely on [`Default`] instead of
+ /// [`Clone`]), use [`resize_with`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; "hello"];
+ /// vec.resize(3, "world");
+ /// assert_eq!(vec, ["hello", "world", "world"]);
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4];
+ /// vec.resize(2, 0);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// [`Clone`]: https://doc.rust-lang.org/std/clone/trait.Clone.html
+ /// [`Default`]: https://doc.rust-lang.org/std/default/trait.Default.html
+ /// [`resize_with`]: #method.resize_with
+ pub fn resize(&mut self, new_len: usize, value: T) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendElement(value))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Clones and appends all elements in a slice to the `Vec`.
+ ///
+ /// Iterates over the slice `other`, clones each element, and then appends
+ /// it to this `Vec`. The `other` vector is traversed in-order.
+ ///
+ /// Note that this function is same as [`extend`] except that it is
+ /// specialized to work with slices instead. If and when Rust gets
+ /// specialization this function will likely be deprecated (but still
+ /// available).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.extend_from_slice(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ ///
+ /// [`extend`]: #method.extend
+ pub fn extend_from_slice(&mut self, other: &[T]) {
+ self.extend(other.iter().cloned())
+ }
+}
+
+// This code generalises `extend_with_{element,default}`.
+trait ExtendWith<T> {
+ fn next(&mut self) -> T;
+ fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+ fn next(&mut self) -> T {
+ self.0.clone()
+ }
+ fn last(self) -> T {
+ self.0
+ }
+}
+
+impl<'bump, T: 'bump> Vec<'bump, T> {
+ /// Extend the vector by `n` values, using the given generator.
+ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
+ self.reserve(n);
+
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(self.len());
+ // Use SetLenOnDrop to work around bug where compiler
+ // may not realize the store through `ptr` through self.set_len()
+ // don't alias.
+ let mut local_len = SetLenOnDrop::new(&mut self.len);
+
+ // Write all elements except the last one
+ for _ in 1..n {
+ ptr::write(ptr, value.next());
+ ptr = ptr.offset(1);
+ // Increment the length in every step in case next() panics
+ local_len.increment_len(1);
+ }
+
+ if n > 0 {
+ // We can write the last element directly without cloning needlessly
+ ptr::write(ptr, value.last());
+ local_len.increment_len(1);
+ }
+
+ // len set by scope guard
+ }
+ }
+}
+
+// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
+//
+// The idea is: The length field in SetLenOnDrop is a local variable
+// that the optimizer will see does not alias with any stores through the Vec's data
+// pointer. This is a workaround for alias analysis issue #32155
+struct SetLenOnDrop<'a> {
+ len: &'a mut usize,
+ local_len: usize,
+}
+
+impl<'a> SetLenOnDrop<'a> {
+ #[inline]
+ fn new(len: &'a mut usize) -> Self {
+ SetLenOnDrop {
+ local_len: *len,
+ len,
+ }
+ }
+
+ #[inline]
+ fn increment_len(&mut self, increment: usize) {
+ self.local_len += increment;
+ }
+
+ #[inline]
+ fn decrement_len(&mut self, decrement: usize) {
+ self.local_len -= decrement;
+ }
+}
+
+impl<'a> Drop for SetLenOnDrop<'a> {
+ #[inline]
+ fn drop(&mut self) {
+ *self.len = self.local_len;
+ }
+}
+
+impl<'bump, T: 'bump + PartialEq> Vec<'bump, T> {
+ /// Removes consecutive repeated elements in the vector according to the
+ /// [`PartialEq`] trait implementation.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1, 2, 2, 3, 2];
+ ///
+ /// vec.dedup();
+ ///
+ /// assert_eq!(vec, [1, 2, 3, 2]);
+ /// ```
+ #[inline]
+ pub fn dedup(&mut self) {
+ self.dedup_by(|a, b| a == b)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Common trait implementations for Vec
+////////////////////////////////////////////////////////////////////////////////
+
+impl<'bump, T: 'bump + Clone> Clone for Vec<'bump, T> {
+ #[cfg(not(test))]
+ fn clone(&self) -> Vec<'bump, T> {
+ let mut v = Vec::with_capacity_in(self.len(), self.buf.bump());
+ v.extend(self.iter().cloned());
+ v
+ }
+
+ // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
+ // required for this method definition, is not available. Instead use the
+ // `slice::to_vec` function which is only available with cfg(test)
+ // NB see the slice::hack module in slice.rs for more information
+ #[cfg(test)]
+ fn clone(&self) -> Vec<'bump, T> {
+ let mut v = Vec::new_in(self.buf.bump());
+ v.extend(self.iter().cloned());
+ v
+ }
+}
+
+impl<'bump, T: 'bump + Hash> Hash for Vec<'bump, T> {
+ #[inline]
+ fn hash<H: hash::Hasher>(&self, state: &mut H) {
+ Hash::hash(&**self, state)
+ }
+}
+
+impl<'bump, T, I> Index<I> for Vec<'bump, T>
+where
+ I: ::core::slice::SliceIndex<[T]>,
+{
+ type Output = I::Output;
+
+ #[inline]
+ fn index(&self, index: I) -> &Self::Output {
+ Index::index(&**self, index)
+ }
+}
+
+impl<'bump, T, I> IndexMut<I> for Vec<'bump, T>
+where
+ I: ::core::slice::SliceIndex<[T]>,
+{
+ #[inline]
+ fn index_mut(&mut self, index: I) -> &mut Self::Output {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+
+impl<'bump, T: 'bump> ops::Deref for Vec<'bump, T> {
+ type Target = [T];
+
+ fn deref(&self) -> &[T] {
+ unsafe {
+ let p = self.buf.ptr();
+ // assume(!p.is_null());
+ slice::from_raw_parts(p, self.len)
+ }
+ }
+}
+
+impl<'bump, T: 'bump> ops::DerefMut for Vec<'bump, T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ unsafe {
+ let ptr = self.buf.ptr();
+ // assume(!ptr.is_null());
+ slice::from_raw_parts_mut(ptr, self.len)
+ }
+ }
+}
+
+impl<'bump, T: 'bump> IntoIterator for Vec<'bump, T> {
+ type Item = T;
+ type IntoIter = IntoIter<'bump, T>;
+
+ /// Creates a consuming iterator, that is, one that moves each value out of
+ /// the vector (from start to end). The vector cannot be used after calling
+ /// this.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let v = bumpalo::vec![in &b; "a".to_string(), "b".to_string()];
+ /// for s in v.into_iter() {
+ /// // s has type String, not &String
+ /// println!("{}", s);
+ /// }
+ /// ```
+ #[inline]
+ fn into_iter(mut self) -> IntoIter<'bump, T> {
+ unsafe {
+ let begin = self.as_mut_ptr();
+ // assume(!begin.is_null());
+ let end = if mem::size_of::<T>() == 0 {
+ arith_offset(begin as *const i8, self.len() as isize) as *const T
+ } else {
+ begin.add(self.len()) as *const T
+ };
+ mem::forget(self);
+ IntoIter {
+ phantom: PhantomData,
+ ptr: begin,
+ end,
+ }
+ }
+ }
+}
+
+impl<'a, 'bump, T> IntoIterator for &'a Vec<'bump, T> {
+ type Item = &'a T;
+ type IntoIter = slice::Iter<'a, T>;
+
+ fn into_iter(self) -> slice::Iter<'a, T> {
+ self.iter()
+ }
+}
+
+impl<'a, 'bump, T> IntoIterator for &'a mut Vec<'bump, T> {
+ type Item = &'a mut T;
+ type IntoIter = slice::IterMut<'a, T>;
+
+ fn into_iter(self) -> slice::IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+impl<'bump, T: 'bump> Extend<T> for Vec<'bump, T> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ let iter = iter.into_iter();
+ self.reserve(iter.size_hint().0);
+
+ for t in iter {
+ self.push(t);
+ }
+ }
+}
+
+impl<'bump, T: 'bump> Vec<'bump, T> {
+ /// Creates a splicing iterator that replaces the specified range in the vector
+ /// with the given `replace_with` iterator and yields the removed items.
+ /// `replace_with` does not need to be the same length as `range`.
+ ///
+ /// Note 1: The element range is removed even if the iterator is not
+ /// consumed until the end.
+ ///
+ /// Note 2: It is unspecified how many elements are removed from the vector,
+ /// if the `Splice` value is leaked.
+ ///
+ /// Note 3: The input iterator `replace_with` is only consumed
+ /// when the `Splice` value is dropped.
+ ///
+ /// Note 4: This is optimal if:
+ ///
+ /// * The tail (elements in the vector after `range`) is empty,
+ /// * or `replace_with` yields fewer elements than `range`’s length
+ /// * or the lower bound of its `size_hint()` is exact.
+ ///
+ /// Otherwise, a temporary vector is allocated and the tail is moved twice.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut v = bumpalo::vec![in &b; 1, 2, 3];
+ /// let new = [7, 8];
+ /// let u: Vec<_> = Vec::from_iter_in(v.splice(..2, new.iter().cloned()), &b);
+ /// assert_eq!(v, &[7, 8, 3]);
+ /// assert_eq!(u, &[1, 2]);
+ /// ```
+ #[inline]
+ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<I::IntoIter>
+ where
+ R: RangeBounds<usize>,
+ I: IntoIterator<Item = T>,
+ {
+ Splice {
+ drain: self.drain(range),
+ replace_with: replace_with.into_iter(),
+ }
+ }
+}
+
+/// Extend implementation that copies elements out of references before pushing them onto the Vec.
+///
+/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to
+/// append the entire slice at once.
+///
+/// [`copy_from_slice`]: https://doc.rust-lang.org/std/primitive.slice.html#method.copy_from_slice
+impl<'a, 'bump, T: 'a + Copy> Extend<&'a T> for Vec<'bump, T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned())
+ }
+}
+
+macro_rules! __impl_slice_eq1 {
+ ($Lhs: ty, $Rhs: ty) => {
+ __impl_slice_eq1! { $Lhs, $Rhs, Sized }
+ };
+ ($Lhs: ty, $Rhs: ty, $Bound: ident) => {
+ impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs
+ where
+ A: PartialEq<B>,
+ {
+ #[inline]
+ fn eq(&self, other: &$Rhs) -> bool {
+ self[..] == other[..]
+ }
+ }
+ };
+}
+
+__impl_slice_eq1! { Vec<'a, A>, Vec<'b, B> }
+__impl_slice_eq1! { Vec<'a, A>, &'b [B] }
+__impl_slice_eq1! { Vec<'a, A>, &'b mut [B] }
+// __impl_slice_eq1! { Cow<'a, [A]>, Vec<'b, B>, Clone }
+
+macro_rules! __impl_slice_eq1_array {
+ ($Lhs: ty, $Rhs: ty) => {
+ impl<'a, 'b, A, B, const N: usize> PartialEq<$Rhs> for $Lhs
+ where
+ A: PartialEq<B>,
+ {
+ #[inline]
+ fn eq(&self, other: &$Rhs) -> bool {
+ self[..] == other[..]
+ }
+ }
+ };
+}
+
+__impl_slice_eq1_array! { Vec<'a, A>, [B; N] }
+__impl_slice_eq1_array! { Vec<'a, A>, &'b [B; N] }
+__impl_slice_eq1_array! { Vec<'a, A>, &'b mut [B; N] }
+
+/// Implements comparison of vectors, lexicographically.
+impl<'bump, T: 'bump + PartialOrd> PartialOrd for Vec<'bump, T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Vec<'bump, T>) -> Option<Ordering> {
+ PartialOrd::partial_cmp(&**self, &**other)
+ }
+}
+
+impl<'bump, T: 'bump + Eq> Eq for Vec<'bump, T> {}
+
+/// Implements ordering of vectors, lexicographically.
+impl<'bump, T: 'bump + Ord> Ord for Vec<'bump, T> {
+ #[inline]
+ fn cmp(&self, other: &Vec<'bump, T>) -> Ordering {
+ Ord::cmp(&**self, &**other)
+ }
+}
+
+impl<'bump, T: 'bump + fmt::Debug> fmt::Debug for Vec<'bump, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+impl<'bump, T: 'bump> AsRef<Vec<'bump, T>> for Vec<'bump, T> {
+ fn as_ref(&self) -> &Vec<'bump, T> {
+ self
+ }
+}
+
+impl<'bump, T: 'bump> AsMut<Vec<'bump, T>> for Vec<'bump, T> {
+ fn as_mut(&mut self) -> &mut Vec<'bump, T> {
+ self
+ }
+}
+
+impl<'bump, T: 'bump> AsRef<[T]> for Vec<'bump, T> {
+ fn as_ref(&self) -> &[T] {
+ self
+ }
+}
+
+impl<'bump, T: 'bump> AsMut<[T]> for Vec<'bump, T> {
+ fn as_mut(&mut self) -> &mut [T] {
+ self
+ }
+}
+
+#[cfg(feature = "boxed")]
+impl<'bump, T: 'bump> From<Vec<'bump, T>> for crate::boxed::Box<'bump, [T]> {
+ fn from(v: Vec<'bump, T>) -> crate::boxed::Box<'bump, [T]> {
+ v.into_boxed_slice()
+ }
+}
+
+impl<'bump, T: 'bump> Borrow<[T]> for Vec<'bump, T> {
+ #[inline]
+ fn borrow(&self) -> &[T] {
+ &self[..]
+ }
+}
+
+impl<'bump, T: 'bump> BorrowMut<[T]> for Vec<'bump, T> {
+ #[inline]
+ fn borrow_mut(&mut self) -> &mut [T] {
+ &mut self[..]
+ }
+}
+
+impl<'bump, T> Drop for Vec<'bump, T> {
+ fn drop(&mut self) {
+ unsafe {
+ // use drop for [T]
+ // use a raw slice to refer to the elements of the vector as weakest necessary type;
+ // could avoid questions of validity in certain cases
+ ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
+ }
+ // RawVec handles deallocation
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Clone-on-write
+////////////////////////////////////////////////////////////////////////////////
+
+// impl<'a, 'bump, T: Clone> From<Vec<'bump, T>> for Cow<'a, [T]> {
+// fn from(v: Vec<'bump, T>) -> Cow<'a, [T]> {
+// Cow::Owned(v)
+// }
+// }
+
+// impl<'a, 'bump, T: Clone> From<&'a Vec<'bump, T>> for Cow<'a, [T]> {
+// fn from(v: &'a Vec<'bump, T>) -> Cow<'a, [T]> {
+// Cow::Borrowed(v.as_slice())
+// }
+// }
+
+////////////////////////////////////////////////////////////////////////////////
+// Iterators
+////////////////////////////////////////////////////////////////////////////////
+
+/// An iterator that moves out of a vector.
+///
+/// This `struct` is created by the [`Vec::into_iter`] method
+/// (provided by the [`IntoIterator`] trait).
+///
+/// [`IntoIterator`]: https://doc.rust-lang.org/std/iter/trait.IntoIterator.html
+pub struct IntoIter<'bump, T> {
+ phantom: PhantomData<&'bump [T]>,
+ ptr: *const T,
+ end: *const T,
+}
+
+impl<'bump, T: fmt::Debug> fmt::Debug for IntoIter<'bump, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
+ }
+}
+
+impl<'bump, T: 'bump> IntoIter<'bump, T> {
+ /// Returns the remaining items of this iterator as a slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let vec = bumpalo::vec![in &b; 'a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// let _ = into_iter.next().unwrap();
+ /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
+ /// ```
+ pub fn as_slice(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.ptr, self.len()) }
+ }
+
+ /// Returns the remaining items of this iterator as a mutable slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let vec = bumpalo::vec![in &b; 'a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// into_iter.as_mut_slice()[2] = 'z';
+ /// assert_eq!(into_iter.next().unwrap(), 'a');
+ /// assert_eq!(into_iter.next().unwrap(), 'b');
+ /// assert_eq!(into_iter.next().unwrap(), 'z');
+ /// ```
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) }
+ }
+}
+
+unsafe impl<'bump, T: Send> Send for IntoIter<'bump, T> {}
+unsafe impl<'bump, T: Sync> Sync for IntoIter<'bump, T> {}
+
+impl<'bump, T: 'bump> Iterator for IntoIter<'bump, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ unsafe {
+ if self.ptr as *const _ == self.end {
+ None
+ } else if mem::size_of::<T>() == 0 {
+ // purposefully don't use 'ptr.offset' because for
+ // vectors with 0-size elements this would return the
+ // same pointer.
+ self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
+
+ // Make up a value of this ZST.
+ Some(mem::zeroed())
+ } else {
+ let old = self.ptr;
+ self.ptr = self.ptr.offset(1);
+
+ Some(ptr::read(old))
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let exact = if mem::size_of::<T>() == 0 {
+ (self.end as usize).wrapping_sub(self.ptr as usize)
+ } else {
+ unsafe { offset_from(self.end, self.ptr) as usize }
+ };
+ (exact, Some(exact))
+ }
+
+ #[inline]
+ fn count(self) -> usize {
+ self.len()
+ }
+}
+
+impl<'bump, T: 'bump> DoubleEndedIterator for IntoIter<'bump, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ unsafe {
+ if self.end == self.ptr {
+ None
+ } else if mem::size_of::<T>() == 0 {
+ // See above for why 'ptr.offset' isn't used
+ self.end = arith_offset(self.end as *const i8, -1) as *mut T;
+
+ // Make up a value of this ZST.
+ Some(mem::zeroed())
+ } else {
+ self.end = self.end.offset(-1);
+
+ Some(ptr::read(self.end))
+ }
+ }
+ }
+}
+
+impl<'bump, T: 'bump> ExactSizeIterator for IntoIter<'bump, T> {}
+
+impl<'bump, T: 'bump> FusedIterator for IntoIter<'bump, T> {}
+
+impl<'bump, T> Drop for IntoIter<'bump, T> {
+ fn drop(&mut self) {
+ // drop all remaining elements
+ self.for_each(drop);
+ }
+}
+
+/// A draining iterator for `Vec<'bump, T>`.
+///
+/// This `struct` is created by the [`Vec::drain`] method.
+pub struct Drain<'a, 'bump, T: 'a + 'bump> {
+ /// Index of tail to preserve
+ tail_start: usize,
+ /// Length of tail
+ tail_len: usize,
+ /// Current remaining range to remove
+ iter: slice::Iter<'a, T>,
+ vec: NonNull<Vec<'bump, T>>,
+}
+
+impl<'a, 'bump, T: 'a + 'bump + fmt::Debug> fmt::Debug for Drain<'a, 'bump, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
+ }
+}
+
+unsafe impl<'a, 'bump, T: Sync> Sync for Drain<'a, 'bump, T> {}
+unsafe impl<'a, 'bump, T: Send> Send for Drain<'a, 'bump, T> {}
+
+impl<'a, 'bump, T> Iterator for Drain<'a, 'bump, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter
+ .next()
+ .map(|elt| unsafe { ptr::read(elt as *const _) })
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, 'bump, T> DoubleEndedIterator for Drain<'a, 'bump, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter
+ .next_back()
+ .map(|elt| unsafe { ptr::read(elt as *const _) })
+ }
+}
+
+impl<'a, 'bump, T> Drop for Drain<'a, 'bump, T> {
+ fn drop(&mut self) {
+ // exhaust self first
+ self.for_each(drop);
+
+ if self.tail_len > 0 {
+ unsafe {
+ let source_vec = self.vec.as_mut();
+ // memmove back untouched tail, update to new length
+ let start = source_vec.len();
+ let tail = self.tail_start;
+ if tail != start {
+ let src = source_vec.as_ptr().add(tail);
+ let dst = source_vec.as_mut_ptr().add(start);
+ ptr::copy(src, dst, self.tail_len);
+ }
+ source_vec.set_len(start + self.tail_len);
+ }
+ }
+ }
+}
+
+impl<'a, 'bump, T> ExactSizeIterator for Drain<'a, 'bump, T> {}
+
+impl<'a, 'bump, T> FusedIterator for Drain<'a, 'bump, T> {}
+
+/// A splicing iterator for `Vec`.
+///
+/// This struct is created by the [`Vec::splice`] method. See its
+/// documentation for more information.
+#[derive(Debug)]
+pub struct Splice<'a, 'bump, I: Iterator + 'a + 'bump> {
+ drain: Drain<'a, 'bump, I::Item>,
+ replace_with: I,
+}
+
+impl<'a, 'bump, I: Iterator> Iterator for Splice<'a, 'bump, I> {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.drain.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.drain.size_hint()
+ }
+}
+
+impl<'a, 'bump, I: Iterator> DoubleEndedIterator for Splice<'a, 'bump, I> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.drain.next_back()
+ }
+}
+
+impl<'a, 'bump, I: Iterator> ExactSizeIterator for Splice<'a, 'bump, I> {}
+
+impl<'a, 'bump, I: Iterator> Drop for Splice<'a, 'bump, I> {
+ fn drop(&mut self) {
+ self.drain.by_ref().for_each(drop);
+
+ unsafe {
+ if self.drain.tail_len == 0 {
+ self.drain.vec.as_mut().extend(self.replace_with.by_ref());
+ return;
+ }
+
+ // First fill the range left by drain().
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+
+ // There may be more elements. Use the lower bound as an estimate.
+ // FIXME: Is the upper bound a better guess? Or something else?
+ let (lower_bound, _upper_bound) = self.replace_with.size_hint();
+ if lower_bound > 0 {
+ self.drain.move_tail(lower_bound);
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+ }
+
+ // Collect any remaining elements.
+ // This is a zero-length vector which does not allocate if `lower_bound` was exact.
+ let mut collected = Vec::new_in(self.drain.vec.as_ref().buf.bump());
+ collected.extend(self.replace_with.by_ref());
+ let mut collected = collected.into_iter();
+ // Now we have an exact count.
+ if collected.len() > 0 {
+ self.drain.move_tail(collected.len());
+ let filled = self.drain.fill(&mut collected);
+ debug_assert!(filled);
+ debug_assert_eq!(collected.len(), 0);
+ }
+ }
+ // Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
+ }
+}
+
+/// Private helper methods for `Splice::drop`
+impl<'a, 'bump, T> Drain<'a, 'bump, T> {
+ /// The range from `self.vec.len` to `self.tail_start` contains elements
+ /// that have been moved out.
+ /// Fill that range as much as possible with new elements from the `replace_with` iterator.
+ /// Return whether we filled the entire range. (`replace_with.next()` didn’t return `None`.)
+ unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
+ let vec = self.vec.as_mut();
+ let range_start = vec.len;
+ let range_end = self.tail_start;
+ let range_slice =
+ slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start);
+
+ for place in range_slice {
+ if let Some(new_item) = replace_with.next() {
+ ptr::write(place, new_item);
+ vec.len += 1;
+ } else {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Make room for inserting more elements before the tail.
+ unsafe fn move_tail(&mut self, extra_capacity: usize) {
+ let vec = self.vec.as_mut();
+ let used_capacity = self.tail_start + self.tail_len;
+ vec.buf.reserve(used_capacity, extra_capacity);
+
+ let new_tail_start = self.tail_start + extra_capacity;
+ let src = vec.as_ptr().add(self.tail_start);
+ let dst = vec.as_mut_ptr().add(new_tail_start);
+ ptr::copy(src, dst, self.tail_len);
+ self.tail_start = new_tail_start;
+ }
+}
+
+/// An iterator produced by calling [`Vec::drain_filter`].
+#[derive(Debug)]
+pub struct DrainFilter<'a, 'bump: 'a, T: 'a + 'bump, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ vec: &'a mut Vec<'bump, T>,
+ idx: usize,
+ del: usize,
+ old_len: usize,
+ pred: F,
+}
+
+impl<'a, 'bump, T, F> Iterator for DrainFilter<'a, 'bump, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ unsafe {
+ while self.idx != self.old_len {
+ let i = self.idx;
+ self.idx += 1;
+ let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
+ if (self.pred)(&mut v[i]) {
+ self.del += 1;
+ return Some(ptr::read(&v[i]));
+ } else if self.del > 0 {
+ let del = self.del;
+ let src: *const T = &v[i];
+ let dst: *mut T = &mut v[i - del];
+ // This is safe because self.vec has length 0
+ // thus its elements will not have Drop::drop
+ // called on them in the event of a panic.
+ ptr::copy_nonoverlapping(src, dst, 1);
+ }
+ }
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.old_len - self.idx))
+ }
+}
+
+impl<'a, 'bump, T, F> Drop for DrainFilter<'a, 'bump, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ unsafe {
+ self.vec.set_len(self.old_len - self.del);
+ }
+ }
+}
diff --git a/third_party/rust/bumpalo/src/lib.rs b/third_party/rust/bumpalo/src/lib.rs
new file mode 100644
index 0000000000..74dfcd4361
--- /dev/null
+++ b/third_party/rust/bumpalo/src/lib.rs
@@ -0,0 +1,2023 @@
+#![doc = include_str!("../README.md")]
+#![deny(missing_debug_implementations)]
+#![deny(missing_docs)]
+#![no_std]
+#![cfg_attr(
+ feature = "allocator_api",
+ feature(allocator_api, nonnull_slice_from_raw_parts)
+)]
+
+#[doc(hidden)]
+pub extern crate alloc as core_alloc;
+
+#[cfg(feature = "boxed")]
+pub mod boxed;
+#[cfg(feature = "collections")]
+pub mod collections;
+
+mod alloc;
+
+use core::cell::Cell;
+use core::fmt::Display;
+use core::iter;
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::{self, NonNull};
+use core::slice;
+use core::str;
+use core_alloc::alloc::{alloc, dealloc, Layout};
+#[cfg(feature = "allocator_api")]
+use core_alloc::alloc::{AllocError, Allocator};
+
+pub use alloc::AllocErr;
+
+/// An error returned from [`Bump::try_alloc_try_with`].
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum AllocOrInitError<E> {
+ /// Indicates that the initial allocation failed.
+ Alloc(AllocErr),
+ /// Indicates that the initializer failed with the contained error after
+ /// allocation.
+ ///
+ /// It is possible but not guaranteed that the allocated memory has been
+ /// released back to the allocator at this point.
+ Init(E),
+}
+impl<E> From<AllocErr> for AllocOrInitError<E> {
+ fn from(e: AllocErr) -> Self {
+ Self::Alloc(e)
+ }
+}
+impl<E: Display> Display for AllocOrInitError<E> {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ AllocOrInitError::Alloc(err) => err.fmt(f),
+ AllocOrInitError::Init(err) => write!(f, "initialization failed: {}", err),
+ }
+ }
+}
+
+/// An arena to bump allocate into.
+///
+/// ## No `Drop`s
+///
+/// Objects that are bump-allocated will never have their [`Drop`] implementation
+/// called &mdash; unless you do it manually yourself. This makes it relatively
+/// easy to leak memory or other resources.
+///
+/// If you have a type which internally manages
+///
+/// * an allocation from the global heap (e.g. [`Vec<T>`]),
+/// * open file descriptors (e.g. [`std::fs::File`]), or
+/// * any other resource that must be cleaned up (e.g. an `mmap`)
+///
+/// and relies on its `Drop` implementation to clean up the internal resource,
+/// then if you allocate that type with a `Bump`, you need to find a new way to
+/// clean up after it yourself.
+///
+/// Potential solutions are:
+///
+/// * Using [`bumpalo::boxed::Box::new_in`] instead of [`Bump::alloc`], that
+/// will drop wrapped values similarly to [`std::boxed::Box`]. Note that this
+/// requires enabling the `"boxed"` Cargo feature for this crate. **This is
+/// often the easiest solution.**
+///
+/// * Calling [`drop_in_place`][drop_in_place] or using
+/// [`std::mem::ManuallyDrop`][manuallydrop] to manually drop these types.
+///
+/// * Using [`bumpalo::collections::Vec`] instead of [`std::vec::Vec`].
+///
+/// * Avoiding allocating these problematic types within a `Bump`.
+///
+/// Note that not calling `Drop` is memory safe! Destructors are never
+/// guaranteed to run in Rust, you can't rely on them for enforcing memory
+/// safety.
+///
+/// [`Drop`]: https://doc.rust-lang.org/std/ops/trait.Drop.html
+/// [`Vec<T>`]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+/// [`std::fs::File`]: https://doc.rust-lang.org/std/fs/struct.File.html
+/// [drop_in_place]: https://doc.rust-lang.org/std/ptr/fn.drop_in_place.html
+/// [manuallydrop]: https://doc.rust-lang.org/std/mem/struct.ManuallyDrop.html
+/// [`bumpalo::collections::Vec`]: collections/vec/struct.Vec.html
+/// [`std::vec::Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+/// [`bumpalo::boxed::Box::new_in`]: boxed/struct.Box.html#method.new_in
+/// [`std::boxed::Box`]: https://doc.rust-lang.org/std/boxed/struct.Box.html
+///
+/// ## Example
+///
+/// ```
+/// use bumpalo::Bump;
+///
+/// // Create a new bump arena.
+/// let bump = Bump::new();
+///
+/// // Allocate values into the arena.
+/// let forty_two = bump.alloc(42);
+/// assert_eq!(*forty_two, 42);
+///
+/// // Mutable references are returned from allocation.
+/// let mut s = bump.alloc("bumpalo");
+/// *s = "the bump allocator; and also is a buffalo";
+/// ```
+///
+/// ## Allocation Methods Come in Many Flavors
+///
+/// There are various allocation methods on `Bump`, the simplest being
+/// [`alloc`][Bump::alloc]. The others exist to satisfy some combination of
+/// fallible allocation and initialization. The allocation methods are
+/// summarized in the following table:
+///
+/// <table>
+/// <thead>
+/// <tr>
+/// <th></th>
+/// <th>Infallible Allocation</th>
+/// <th>Fallible Allocation</th>
+/// </tr>
+/// </thead>
+/// <tr>
+/// <th>By Value</th>
+/// <td><a href="#method.alloc"><code>alloc</code></a></td>
+/// <td><a href="#method.try_alloc"><code>try_alloc</code></a></td>
+/// </tr>
+/// <tr>
+/// <th>Infallible Initializer Function</th>
+/// <td><a href="#method.alloc_with"><code>alloc_with</code></a></td>
+/// <td><a href="#method.try_alloc_with"><code>try_alloc_with</code></a></td>
+/// </tr>
+/// <tr>
+/// <th>Fallible Initializer Function</th>
+/// <td><a href="#method.alloc_try_with"><code>alloc_try_with</code></a></td>
+/// <td><a href="#method.try_alloc_try_with"><code>try_alloc_try_with</code></a></td>
+/// </tr>
+/// <tbody>
+/// </tbody>
+/// </table>
+///
+/// ### Fallible Allocation: The `try_alloc_` Method Prefix
+///
+/// These allocation methods let you recover from out-of-memory (OOM)
+/// scenarioes, rather than raising a panic on OOM.
+///
+/// ```
+/// use bumpalo::Bump;
+///
+/// let bump = Bump::new();
+///
+/// match bump.try_alloc(MyStruct {
+/// // ...
+/// }) {
+/// Ok(my_struct) => {
+/// // Allocation succeeded.
+/// }
+/// Err(e) => {
+/// // Out of memory.
+/// }
+/// }
+///
+/// struct MyStruct {
+/// // ...
+/// }
+/// ```
+///
+/// ### Initializer Functions: The `_with` Method Suffix
+///
+/// Calling one of the generic `…alloc(x)` methods is essentially equivalent to
+/// the matching [`…alloc_with(|| x)`](?search=alloc_with). However if you use
+/// `…alloc_with`, then the closure will not be invoked until after allocating
+/// space for storing `x` on the heap.
+///
+/// This can be useful in certain edge-cases related to compiler optimizations.
+/// When evaluating for example `bump.alloc(x)`, semantically `x` is first put
+/// on the stack and then moved onto the heap. In some cases, the compiler is
+/// able to optimize this into constructing `x` directly on the heap, however
+/// in many cases it does not.
+///
+/// The `…alloc_with` functions try to help the compiler be smarter. In most
+/// cases doing for example `bump.try_alloc_with(|| x)` on release mode will be
+/// enough to help the compiler realize that this optimization is valid and
+/// to construct `x` directly onto the heap.
+///
+/// #### Warning
+///
+/// These functions critically depend on compiler optimizations to achieve their
+/// desired effect. This means that it is not an effective tool when compiling
+/// without optimizations on.
+///
+/// Even when optimizations are on, these functions do not **guarantee** that
+/// the value is constructed on the heap. To the best of our knowledge no such
+/// guarantee can be made in stable Rust as of 1.54.
+///
+/// ### Fallible Initialization: The `_try_with` Method Suffix
+///
+/// The generic [`…alloc_try_with(|| x)`](?search=_try_with) methods behave
+/// like the purely `_with` suffixed methods explained above. However, they
+/// allow for fallible initialization by accepting a closure that returns a
+/// [`Result`] and will attempt to undo the initial allocation if this closure
+/// returns [`Err`].
+///
+/// #### Warning
+///
+/// If the inner closure returns [`Ok`], space for the entire [`Result`] remains
+/// allocated inside `self`. This can be a problem especially if the [`Err`]
+/// variant is larger, but even otherwise there may be overhead for the
+/// [`Result`]'s discriminant.
+///
+/// <p><details><summary>Undoing the allocation in the <code>Err</code> case
+/// always fails if <code>f</code> successfully made any additional allocations
+/// in <code>self</code>.</summary>
+///
+/// For example, the following will always leak also space for the [`Result`]
+/// into this `Bump`, even though the inner reference isn't kept and the [`Err`]
+/// payload is returned semantically by value:
+///
+/// ```rust
+/// let bump = bumpalo::Bump::new();
+///
+/// let r: Result<&mut [u8; 1000], ()> = bump.alloc_try_with(|| {
+/// let _ = bump.alloc(0_u8);
+/// Err(())
+/// });
+///
+/// assert!(r.is_err());
+/// ```
+///
+///</details></p>
+///
+/// Since [`Err`] payloads are first placed on the heap and then moved to the
+/// stack, `bump.…alloc_try_with(|| x)?` is likely to execute more slowly than
+/// the matching `bump.…alloc(x?)` in case of initialization failure. If this
+/// happens frequently, using the plain un-suffixed method may perform better.
+///
+/// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html
+/// [`Ok`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+/// [`Err`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err
+///
+/// ### `Bump` Allocation Limits
+///
+/// `bumpalo` supports setting a limit on the maximum bytes of memory that can
+/// be allocated for use in a particular `Bump` arena. This limit can be set and removed with
+/// [`set_allocation_limit`][Bump::set_allocation_limit].
+/// The allocation limit is only enforced when allocating new backing chunks for
+/// a `Bump`. Updating the allocation limit will not affect existing allocations
+/// or any future allocations within the `Bump`'s current chunk.
+///
+/// #### Example
+///
+/// ```
+/// let bump = bumpalo::Bump::new();
+///
+/// assert_eq!(bump.allocation_limit(), None);
+/// bump.set_allocation_limit(Some(0));
+///
+/// assert!(bump.try_alloc(5).is_err());
+///
+/// bump.set_allocation_limit(Some(6));
+///
+/// assert_eq!(bump.allocation_limit(), Some(6));
+///
+/// bump.set_allocation_limit(None);
+///
+/// assert_eq!(bump.allocation_limit(), None);
+/// ```
+///
+/// #### Warning
+///
+/// Because of backwards compatibility, allocations that fail
+/// due to allocation limits will not present differently than
+/// errors due to resource exhaustion.
+
+#[derive(Debug)]
+pub struct Bump {
+ // The current chunk we are bump allocating within.
+ current_chunk_footer: Cell<NonNull<ChunkFooter>>,
+ allocation_limit: Cell<Option<usize>>,
+}
+
+#[repr(C)]
+#[derive(Debug)]
+struct ChunkFooter {
+ // Pointer to the start of this chunk allocation. This footer is always at
+ // the end of the chunk.
+ data: NonNull<u8>,
+
+ // The layout of this chunk's allocation.
+ layout: Layout,
+
+ // Link to the previous chunk.
+ //
+ // Note that the last node in the `prev` linked list is the canonical empty
+ // chunk, whose `prev` link points to itself.
+ prev: Cell<NonNull<ChunkFooter>>,
+
+ // Bump allocation finger that is always in the range `self.data..=self`.
+ ptr: Cell<NonNull<u8>>,
+
+ // The bytes allocated in all chunks so far, the canonical empty chunk has
+ // a size of 0 and for all other chunks, `allocated_bytes` will be
+ // the allocated_bytes of the current chunk plus the allocated bytes
+ // of the `prev` chunk.
+ allocated_bytes: usize,
+}
+
+/// A wrapper type for the canonical, statically allocated empty chunk.
+///
+/// For the canonical empty chunk to be `static`, its type must be `Sync`, which
+/// is the purpose of this wrapper type. This is safe because the empty chunk is
+/// immutable and never actually modified.
+#[repr(transparent)]
+struct EmptyChunkFooter(ChunkFooter);
+
+unsafe impl Sync for EmptyChunkFooter {}
+
+static EMPTY_CHUNK: EmptyChunkFooter = EmptyChunkFooter(ChunkFooter {
+ // This chunk is empty (except the foot itself).
+ layout: Layout::new::<ChunkFooter>(),
+
+ // The start of the (empty) allocatable region for this chunk is itself.
+ data: unsafe { NonNull::new_unchecked(&EMPTY_CHUNK as *const EmptyChunkFooter as *mut u8) },
+
+ // The end of the (empty) allocatable region for this chunk is also itself.
+ ptr: Cell::new(unsafe {
+ NonNull::new_unchecked(&EMPTY_CHUNK as *const EmptyChunkFooter as *mut u8)
+ }),
+
+ // Invariant: the last chunk footer in all `ChunkFooter::prev` linked lists
+ // is the empty chunk footer, whose `prev` points to itself.
+ prev: Cell::new(unsafe {
+ NonNull::new_unchecked(&EMPTY_CHUNK as *const EmptyChunkFooter as *mut ChunkFooter)
+ }),
+
+ // Empty chunks count as 0 allocated bytes in an arena.
+ allocated_bytes: 0,
+});
+
+impl EmptyChunkFooter {
+ fn get(&'static self) -> NonNull<ChunkFooter> {
+ unsafe { NonNull::new_unchecked(&self.0 as *const ChunkFooter as *mut ChunkFooter) }
+ }
+}
+
+impl ChunkFooter {
+ // Returns the start and length of the currently allocated region of this
+ // chunk.
+ fn as_raw_parts(&self) -> (*const u8, usize) {
+ let data = self.data.as_ptr() as *const u8;
+ let ptr = self.ptr.get().as_ptr() as *const u8;
+ debug_assert!(data <= ptr);
+ debug_assert!(ptr <= self as *const ChunkFooter as *const u8);
+ let len = unsafe { (self as *const ChunkFooter as *const u8).offset_from(ptr) as usize };
+ (ptr, len)
+ }
+
+ /// Is this chunk the last empty chunk?
+ fn is_empty(&self) -> bool {
+ ptr::eq(self, EMPTY_CHUNK.get().as_ptr())
+ }
+}
+
+impl Default for Bump {
+ fn default() -> Bump {
+ Bump::new()
+ }
+}
+
+impl Drop for Bump {
+ fn drop(&mut self) {
+ unsafe {
+ dealloc_chunk_list(self.current_chunk_footer.get());
+ }
+ }
+}
+
+#[inline]
+unsafe fn dealloc_chunk_list(mut footer: NonNull<ChunkFooter>) {
+ while !footer.as_ref().is_empty() {
+ let f = footer;
+ footer = f.as_ref().prev.get();
+ dealloc(f.as_ref().data.as_ptr(), f.as_ref().layout);
+ }
+}
+
+// `Bump`s are safe to send between threads because nothing aliases its owned
+// chunks until you start allocating from it. But by the time you allocate from
+// it, the returned references to allocations borrow the `Bump` and therefore
+// prevent sending the `Bump` across threads until the borrows end.
+unsafe impl Send for Bump {}
+
+#[inline]
+pub(crate) fn round_up_to(n: usize, divisor: usize) -> Option<usize> {
+ debug_assert!(divisor > 0);
+ debug_assert!(divisor.is_power_of_two());
+ Some(n.checked_add(divisor - 1)? & !(divisor - 1))
+}
+
+#[inline]
+pub(crate) fn round_down_to(n: usize, divisor: usize) -> usize {
+ debug_assert!(divisor > 0);
+ debug_assert!(divisor.is_power_of_two());
+ n & !(divisor - 1)
+}
+
+// After this point, we try to hit page boundaries instead of powers of 2
+const PAGE_STRATEGY_CUTOFF: usize = 0x1000;
+
+// We only support alignments of up to 16 bytes for iter_allocated_chunks.
+const SUPPORTED_ITER_ALIGNMENT: usize = 16;
+const CHUNK_ALIGN: usize = SUPPORTED_ITER_ALIGNMENT;
+const FOOTER_SIZE: usize = mem::size_of::<ChunkFooter>();
+
+// Assert that ChunkFooter is at most the supported alignment. This will give a compile time error if it is not the case
+const _FOOTER_ALIGN_ASSERTION: bool = mem::align_of::<ChunkFooter>() <= CHUNK_ALIGN;
+const _: [(); _FOOTER_ALIGN_ASSERTION as usize] = [()];
+
+// Maximum typical overhead per allocation imposed by allocators.
+const MALLOC_OVERHEAD: usize = 16;
+
+// This is the overhead from malloc, footer and alignment. For instance, if
+// we want to request a chunk of memory that has at least X bytes usable for
+// allocations (where X is aligned to CHUNK_ALIGN), then we expect that the
+// after adding a footer, malloc overhead and alignment, the chunk of memory
+// the allocator actually sets aside for us is X+OVERHEAD rounded up to the
+// nearest suitable size boundary.
+const OVERHEAD: usize = (MALLOC_OVERHEAD + FOOTER_SIZE + (CHUNK_ALIGN - 1)) & !(CHUNK_ALIGN - 1);
+
+// Choose a relatively small default initial chunk size, since we double chunk
+// sizes as we grow bump arenas to amortize costs of hitting the global
+// allocator.
+const FIRST_ALLOCATION_GOAL: usize = 1 << 9;
+
+// The actual size of the first allocation is going to be a bit smaller
+// than the goal. We need to make room for the footer, and we also need
+// take the alignment into account.
+const DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER: usize = FIRST_ALLOCATION_GOAL - OVERHEAD;
+
+/// The memory size and alignment details for a potential new chunk
+/// allocation.
+#[derive(Debug, Clone, Copy)]
+struct NewChunkMemoryDetails {
+ new_size_without_footer: usize,
+ align: usize,
+ size: usize,
+}
+
+/// Wrapper around `Layout::from_size_align` that adds debug assertions.
+#[inline]
+unsafe fn layout_from_size_align(size: usize, align: usize) -> Layout {
+ if cfg!(debug_assertions) {
+ Layout::from_size_align(size, align).unwrap()
+ } else {
+ Layout::from_size_align_unchecked(size, align)
+ }
+}
+
+#[inline(never)]
+fn allocation_size_overflow<T>() -> T {
+ panic!("requested allocation size overflowed")
+}
+
+// This can be migrated to directly use `usize::abs_diff` when the MSRV
+// reaches `1.60`
+fn abs_diff(a: usize, b: usize) -> usize {
+ usize::max(a, b) - usize::min(a, b)
+}
+
+impl Bump {
+ /// Construct a new arena to bump allocate into.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// # let _ = bump;
+ /// ```
+ pub fn new() -> Bump {
+ Self::with_capacity(0)
+ }
+
+ /// Attempt to construct a new arena to bump allocate into.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::try_new();
+ /// # let _ = bump.unwrap();
+ /// ```
+ pub fn try_new() -> Result<Bump, AllocErr> {
+ Bump::try_with_capacity(0)
+ }
+
+ /// Construct a new arena with the specified byte capacity to bump allocate into.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::with_capacity(100);
+ /// # let _ = bump;
+ /// ```
+ pub fn with_capacity(capacity: usize) -> Bump {
+ Bump::try_with_capacity(capacity).unwrap_or_else(|_| oom())
+ }
+
+ /// Attempt to construct a new arena with the specified byte capacity to bump allocate into.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::try_with_capacity(100);
+ /// # let _ = bump.unwrap();
+ /// ```
+ pub fn try_with_capacity(capacity: usize) -> Result<Self, AllocErr> {
+ if capacity == 0 {
+ return Ok(Bump {
+ current_chunk_footer: Cell::new(EMPTY_CHUNK.get()),
+ allocation_limit: Cell::new(None),
+ });
+ }
+
+ let layout = unsafe { layout_from_size_align(capacity, 1) };
+
+ let chunk_footer = unsafe {
+ Self::new_chunk(
+ Bump::new_chunk_memory_details(None, layout).ok_or(AllocErr)?,
+ layout,
+ EMPTY_CHUNK.get(),
+ )
+ .ok_or(AllocErr)?
+ };
+
+ Ok(Bump {
+ current_chunk_footer: Cell::new(chunk_footer),
+ allocation_limit: Cell::new(None),
+ })
+ }
+
+ /// The allocation limit for this arena in bytes.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::with_capacity(0);
+ ///
+ /// assert_eq!(bump.allocation_limit(), None);
+ ///
+ /// bump.set_allocation_limit(Some(6));
+ ///
+ /// assert_eq!(bump.allocation_limit(), Some(6));
+ ///
+ /// bump.set_allocation_limit(None);
+ ///
+ /// assert_eq!(bump.allocation_limit(), None);
+ /// ```
+ pub fn allocation_limit(&self) -> Option<usize> {
+ self.allocation_limit.get()
+ }
+
+ /// Set the allocation limit in bytes for this arena.
+ ///
+ /// The allocation limit is only enforced when allocating new backing chunks for
+ /// a `Bump`. Updating the allocation limit will not affect existing allocations
+ /// or any future allocations within the `Bump`'s current chunk.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::with_capacity(0);
+ ///
+ /// bump.set_allocation_limit(Some(0));
+ ///
+ /// assert!(bump.try_alloc(5).is_err());
+ /// ```
+ pub fn set_allocation_limit(&self, limit: Option<usize>) {
+ self.allocation_limit.set(limit)
+ }
+
+ /// How much headroom an arena has before it hits its allocation
+ /// limit.
+ fn allocation_limit_remaining(&self) -> Option<usize> {
+ self.allocation_limit.get().and_then(|allocation_limit| {
+ let allocated_bytes = self.allocated_bytes();
+ if allocated_bytes > allocation_limit {
+ None
+ } else {
+ Some(abs_diff(allocation_limit, allocated_bytes))
+ }
+ })
+ }
+
+ /// Whether a request to allocate a new chunk with a given size for a given
+ /// requested layout will fit under the allocation limit set on a `Bump`.
+ fn chunk_fits_under_limit(
+ allocation_limit_remaining: Option<usize>,
+ new_chunk_memory_details: NewChunkMemoryDetails,
+ ) -> bool {
+ allocation_limit_remaining
+ .map(|allocation_limit_left| {
+ allocation_limit_left >= new_chunk_memory_details.new_size_without_footer
+ })
+ .unwrap_or(true)
+ }
+
+ /// Determine the memory details including final size, alignment and
+ /// final size without footer for a new chunk that would be allocated
+ /// to fulfill an allocation request.
+ fn new_chunk_memory_details(
+ new_size_without_footer: Option<usize>,
+ requested_layout: Layout,
+ ) -> Option<NewChunkMemoryDetails> {
+ let mut new_size_without_footer =
+ new_size_without_footer.unwrap_or(DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER);
+
+ // We want to have CHUNK_ALIGN or better alignment
+ let mut align = CHUNK_ALIGN;
+
+ // If we already know we need to fulfill some request,
+ // make sure we allocate at least enough to satisfy it
+ align = align.max(requested_layout.align());
+ let requested_size =
+ round_up_to(requested_layout.size(), align).unwrap_or_else(allocation_size_overflow);
+ new_size_without_footer = new_size_without_footer.max(requested_size);
+
+ // We want our allocations to play nice with the memory allocator,
+ // and waste as little memory as possible.
+ // For small allocations, this means that the entire allocation
+ // including the chunk footer and mallocs internal overhead is
+ // as close to a power of two as we can go without going over.
+ // For larger allocations, we only need to get close to a page
+ // boundary without going over.
+ if new_size_without_footer < PAGE_STRATEGY_CUTOFF {
+ new_size_without_footer =
+ (new_size_without_footer + OVERHEAD).next_power_of_two() - OVERHEAD;
+ } else {
+ new_size_without_footer =
+ round_up_to(new_size_without_footer + OVERHEAD, 0x1000)? - OVERHEAD;
+ }
+
+ debug_assert_eq!(align % CHUNK_ALIGN, 0);
+ debug_assert_eq!(new_size_without_footer % CHUNK_ALIGN, 0);
+ let size = new_size_without_footer
+ .checked_add(FOOTER_SIZE)
+ .unwrap_or_else(allocation_size_overflow);
+
+ Some(NewChunkMemoryDetails {
+ new_size_without_footer,
+ size,
+ align,
+ })
+ }
+
+ /// Allocate a new chunk and return its initialized footer.
+ ///
+ /// If given, `layouts` is a tuple of the current chunk size and the
+ /// layout of the allocation request that triggered us to fall back to
+ /// allocating a new chunk of memory.
+ unsafe fn new_chunk(
+ new_chunk_memory_details: NewChunkMemoryDetails,
+ requested_layout: Layout,
+ prev: NonNull<ChunkFooter>,
+ ) -> Option<NonNull<ChunkFooter>> {
+ let NewChunkMemoryDetails {
+ new_size_without_footer,
+ align,
+ size,
+ } = new_chunk_memory_details;
+
+ let layout = layout_from_size_align(size, align);
+
+ debug_assert!(size >= requested_layout.size());
+
+ let data = alloc(layout);
+ let data = NonNull::new(data)?;
+
+ // The `ChunkFooter` is at the end of the chunk.
+ let footer_ptr = data.as_ptr().add(new_size_without_footer);
+ debug_assert_eq!((data.as_ptr() as usize) % align, 0);
+ debug_assert_eq!(footer_ptr as usize % CHUNK_ALIGN, 0);
+ let footer_ptr = footer_ptr as *mut ChunkFooter;
+
+ // The bump pointer is initialized to the end of the range we will
+ // bump out of.
+ let ptr = Cell::new(NonNull::new_unchecked(footer_ptr as *mut u8));
+
+ // The `allocated_bytes` of a new chunk counts the total size
+ // of the chunks, not how much of the chunks are used.
+ let allocated_bytes = prev.as_ref().allocated_bytes + new_size_without_footer;
+
+ ptr::write(
+ footer_ptr,
+ ChunkFooter {
+ data,
+ layout,
+ prev: Cell::new(prev),
+ ptr,
+ allocated_bytes,
+ },
+ );
+
+ Some(NonNull::new_unchecked(footer_ptr))
+ }
+
+ /// Reset this bump allocator.
+ ///
+ /// Performs mass deallocation on everything allocated in this arena by
+ /// resetting the pointer into the underlying chunk of memory to the start
+ /// of the chunk. Does not run any `Drop` implementations on deallocated
+ /// objects; see [the top-level documentation](struct.Bump.html) for details.
+ ///
+ /// If this arena has allocated multiple chunks to bump allocate into, then
+ /// the excess chunks are returned to the global allocator.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let mut bump = bumpalo::Bump::new();
+ ///
+ /// // Allocate a bunch of things.
+ /// {
+ /// for i in 0..100 {
+ /// bump.alloc(i);
+ /// }
+ /// }
+ ///
+ /// // Reset the arena.
+ /// bump.reset();
+ ///
+ /// // Allocate some new things in the space previously occupied by the
+ /// // original things.
+ /// for j in 200..400 {
+ /// bump.alloc(j);
+ /// }
+ ///```
+ pub fn reset(&mut self) {
+ // Takes `&mut self` so `self` must be unique and there can't be any
+ // borrows active that would get invalidated by resetting.
+ unsafe {
+ if self.current_chunk_footer.get().as_ref().is_empty() {
+ return;
+ }
+
+ let mut cur_chunk = self.current_chunk_footer.get();
+
+ // Deallocate all chunks except the current one
+ let prev_chunk = cur_chunk.as_ref().prev.replace(EMPTY_CHUNK.get());
+ dealloc_chunk_list(prev_chunk);
+
+ // Reset the bump finger to the end of the chunk.
+ cur_chunk.as_ref().ptr.set(cur_chunk.cast());
+
+ // Reset the allocated size of the chunk.
+ cur_chunk.as_mut().allocated_bytes = cur_chunk.as_ref().layout.size();
+
+ debug_assert!(
+ self.current_chunk_footer
+ .get()
+ .as_ref()
+ .prev
+ .get()
+ .as_ref()
+ .is_empty(),
+ "We should only have a single chunk"
+ );
+ debug_assert_eq!(
+ self.current_chunk_footer.get().as_ref().ptr.get(),
+ self.current_chunk_footer.get().cast(),
+ "Our chunk's bump finger should be reset to the start of its allocation"
+ );
+ }
+ }
+
+ /// Allocate an object in this `Bump` and return an exclusive reference to
+ /// it.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for `T` fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc("hello");
+ /// assert_eq!(*x, "hello");
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc<T>(&self, val: T) -> &mut T {
+ self.alloc_with(|| val)
+ }
+
+ /// Try to allocate an object in this `Bump` and return an exclusive
+ /// reference to it.
+ ///
+ /// ## Errors
+ ///
+ /// Errors if reserving space for `T` fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.try_alloc("hello");
+ /// assert_eq!(x, Ok(&mut "hello"));
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn try_alloc<T>(&self, val: T) -> Result<&mut T, AllocErr> {
+ self.try_alloc_with(|| val)
+ }
+
+ /// Pre-allocate space for an object in this `Bump`, initializes it using
+ /// the closure, then returns an exclusive reference to it.
+ ///
+ /// See [The `_with` Method Suffix](#initializer-functions-the-_with-method-suffix) for a
+ /// discussion on the differences between the `_with` suffixed methods and
+ /// those methods without it, their performance characteristics, and when
+ /// you might or might not choose a `_with` suffixed method.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for `T` fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_with(|| "hello");
+ /// assert_eq!(*x, "hello");
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_with<F, T>(&self, f: F) -> &mut T
+ where
+ F: FnOnce() -> T,
+ {
+ #[inline(always)]
+ unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
+ where
+ F: FnOnce() -> T,
+ {
+ // This function is translated as:
+ // - allocate space for a T on the stack
+ // - call f() with the return value being put onto this stack space
+ // - memcpy from the stack to the heap
+ //
+ // Ideally we want LLVM to always realize that doing a stack
+ // allocation is unnecessary and optimize the code so it writes
+ // directly into the heap instead. It seems we get it to realize
+ // this most consistently if we put this critical line into it's
+ // own function instead of inlining it into the surrounding code.
+ ptr::write(ptr, f())
+ }
+
+ let layout = Layout::new::<T>();
+
+ unsafe {
+ let p = self.alloc_layout(layout);
+ let p = p.as_ptr() as *mut T;
+ inner_writer(p, f);
+ &mut *p
+ }
+ }
+
+ /// Tries to pre-allocate space for an object in this `Bump`, initializes
+ /// it using the closure, then returns an exclusive reference to it.
+ ///
+ /// See [The `_with` Method Suffix](#initializer-functions-the-_with-method-suffix) for a
+ /// discussion on the differences between the `_with` suffixed methods and
+ /// those methods without it, their performance characteristics, and when
+ /// you might or might not choose a `_with` suffixed method.
+ ///
+ /// ## Errors
+ ///
+ /// Errors if reserving space for `T` fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.try_alloc_with(|| "hello");
+ /// assert_eq!(x, Ok(&mut "hello"));
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn try_alloc_with<F, T>(&self, f: F) -> Result<&mut T, AllocErr>
+ where
+ F: FnOnce() -> T,
+ {
+ #[inline(always)]
+ unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
+ where
+ F: FnOnce() -> T,
+ {
+ // This function is translated as:
+ // - allocate space for a T on the stack
+ // - call f() with the return value being put onto this stack space
+ // - memcpy from the stack to the heap
+ //
+ // Ideally we want LLVM to always realize that doing a stack
+ // allocation is unnecessary and optimize the code so it writes
+ // directly into the heap instead. It seems we get it to realize
+ // this most consistently if we put this critical line into it's
+ // own function instead of inlining it into the surrounding code.
+ ptr::write(ptr, f())
+ }
+
+ //SAFETY: Self-contained:
+ // `p` is allocated for `T` and then a `T` is written.
+ let layout = Layout::new::<T>();
+ let p = self.try_alloc_layout(layout)?;
+ let p = p.as_ptr() as *mut T;
+
+ unsafe {
+ inner_writer(p, f);
+ Ok(&mut *p)
+ }
+ }
+
+ /// Pre-allocates space for a [`Result`] in this `Bump`, initializes it using
+ /// the closure, then returns an exclusive reference to its `T` if [`Ok`].
+ ///
+ /// Iff the allocation fails, the closure is not run.
+ ///
+ /// Iff [`Err`], an allocator rewind is *attempted* and the `E` instance is
+ /// moved out of the allocator to be consumed or dropped as normal.
+ ///
+ /// See [The `_with` Method Suffix](#initializer-functions-the-_with-method-suffix) for a
+ /// discussion on the differences between the `_with` suffixed methods and
+ /// those methods without it, their performance characteristics, and when
+ /// you might or might not choose a `_with` suffixed method.
+ ///
+ /// For caveats specific to fallible initialization, see
+ /// [The `_try_with` Method Suffix](#fallible-initialization-the-_try_with-method-suffix).
+ ///
+ /// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html
+ /// [`Ok`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Err`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err
+ ///
+ /// ## Errors
+ ///
+ /// Iff the allocation succeeds but `f` fails, that error is forwarded by value.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for `Result<T, E>` fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_try_with(|| Ok("hello"))?;
+ /// assert_eq!(*x, "hello");
+ /// # Result::<_, ()>::Ok(())
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_try_with<F, T, E>(&self, f: F) -> Result<&mut T, E>
+ where
+ F: FnOnce() -> Result<T, E>,
+ {
+ let rewind_footer = self.current_chunk_footer.get();
+ let rewind_ptr = unsafe { rewind_footer.as_ref() }.ptr.get();
+ let mut inner_result_ptr = NonNull::from(self.alloc_with(f));
+ match unsafe { inner_result_ptr.as_mut() } {
+ Ok(t) => Ok(unsafe {
+ //SAFETY:
+ // The `&mut Result<T, E>` returned by `alloc_with` may be
+ // lifetime-limited by `E`, but the derived `&mut T` still has
+ // the same validity as in `alloc_with` since the error variant
+ // is already ruled out here.
+
+ // We could conditionally truncate the allocation here, but
+ // since it grows backwards, it seems unlikely that we'd get
+ // any more than the `Result`'s discriminant this way, if
+ // anything at all.
+ &mut *(t as *mut _)
+ }),
+ Err(e) => unsafe {
+ // If this result was the last allocation in this arena, we can
+ // reclaim its space. In fact, sometimes we can do even better
+ // than simply calling `dealloc` on the result pointer: we can
+ // reclaim any alignment padding we might have added (which
+ // `dealloc` cannot do) if we didn't allocate a new chunk for
+ // this result.
+ if self.is_last_allocation(inner_result_ptr.cast()) {
+ let current_footer_p = self.current_chunk_footer.get();
+ let current_ptr = &current_footer_p.as_ref().ptr;
+ if current_footer_p == rewind_footer {
+ // It's still the same chunk, so reset the bump pointer
+ // to its original value upon entry to this method
+ // (reclaiming any alignment padding we may have
+ // added).
+ current_ptr.set(rewind_ptr);
+ } else {
+ // We allocated a new chunk for this result.
+ //
+ // We know the result is the only allocation in this
+ // chunk: Any additional allocations since the start of
+ // this method could only have happened when running
+ // the initializer function, which is called *after*
+ // reserving space for this result. Therefore, since we
+ // already determined via the check above that this
+ // result was the last allocation, there must not have
+ // been any other allocations, and this result is the
+ // only allocation in this chunk.
+ //
+ // Because this is the only allocation in this chunk,
+ // we can reset the chunk's bump finger to the start of
+ // the chunk.
+ current_ptr.set(current_footer_p.as_ref().data);
+ }
+ }
+ //SAFETY:
+ // As we received `E` semantically by value from `f`, we can
+ // just copy that value here as long as we avoid a double-drop
+ // (which can't happen as any specific references to the `E`'s
+ // data in `self` are destroyed when this function returns).
+ //
+ // The order between this and the deallocation doesn't matter
+ // because `Self: !Sync`.
+ Err(ptr::read(e as *const _))
+ },
+ }
+ }
+
+ /// Tries to pre-allocates space for a [`Result`] in this `Bump`,
+ /// initializes it using the closure, then returns an exclusive reference
+ /// to its `T` if all [`Ok`].
+ ///
+ /// Iff the allocation fails, the closure is not run.
+ ///
+ /// Iff the closure returns [`Err`], an allocator rewind is *attempted* and
+ /// the `E` instance is moved out of the allocator to be consumed or dropped
+ /// as normal.
+ ///
+ /// See [The `_with` Method Suffix](#initializer-functions-the-_with-method-suffix) for a
+ /// discussion on the differences between the `_with` suffixed methods and
+ /// those methods without it, their performance characteristics, and when
+ /// you might or might not choose a `_with` suffixed method.
+ ///
+ /// For caveats specific to fallible initialization, see
+ /// [The `_try_with` Method Suffix](#fallible-initialization-the-_try_with-method-suffix).
+ ///
+ /// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html
+ /// [`Ok`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Ok
+ /// [`Err`]: https://doc.rust-lang.org/std/result/enum.Result.html#variant.Err
+ ///
+ /// ## Errors
+ ///
+ /// Errors with the [`Alloc`](`AllocOrInitError::Alloc`) variant iff
+ /// reserving space for `Result<T, E>` fails.
+ ///
+ /// Iff the allocation succeeds but `f` fails, that error is forwarded by
+ /// value inside the [`Init`](`AllocOrInitError::Init`) variant.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.try_alloc_try_with(|| Ok("hello"))?;
+ /// assert_eq!(*x, "hello");
+ /// # Result::<_, bumpalo::AllocOrInitError<()>>::Ok(())
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn try_alloc_try_with<F, T, E>(&self, f: F) -> Result<&mut T, AllocOrInitError<E>>
+ where
+ F: FnOnce() -> Result<T, E>,
+ {
+ let rewind_footer = self.current_chunk_footer.get();
+ let rewind_ptr = unsafe { rewind_footer.as_ref() }.ptr.get();
+ let mut inner_result_ptr = NonNull::from(self.try_alloc_with(f)?);
+ match unsafe { inner_result_ptr.as_mut() } {
+ Ok(t) => Ok(unsafe {
+ //SAFETY:
+ // The `&mut Result<T, E>` returned by `alloc_with` may be
+ // lifetime-limited by `E`, but the derived `&mut T` still has
+ // the same validity as in `alloc_with` since the error variant
+ // is already ruled out here.
+
+ // We could conditionally truncate the allocation here, but
+ // since it grows backwards, it seems unlikely that we'd get
+ // any more than the `Result`'s discriminant this way, if
+ // anything at all.
+ &mut *(t as *mut _)
+ }),
+ Err(e) => unsafe {
+ // If this result was the last allocation in this arena, we can
+ // reclaim its space. In fact, sometimes we can do even better
+ // than simply calling `dealloc` on the result pointer: we can
+ // reclaim any alignment padding we might have added (which
+ // `dealloc` cannot do) if we didn't allocate a new chunk for
+ // this result.
+ if self.is_last_allocation(inner_result_ptr.cast()) {
+ let current_footer_p = self.current_chunk_footer.get();
+ let current_ptr = &current_footer_p.as_ref().ptr;
+ if current_footer_p == rewind_footer {
+ // It's still the same chunk, so reset the bump pointer
+ // to its original value upon entry to this method
+ // (reclaiming any alignment padding we may have
+ // added).
+ current_ptr.set(rewind_ptr);
+ } else {
+ // We allocated a new chunk for this result.
+ //
+ // We know the result is the only allocation in this
+ // chunk: Any additional allocations since the start of
+ // this method could only have happened when running
+ // the initializer function, which is called *after*
+ // reserving space for this result. Therefore, since we
+ // already determined via the check above that this
+ // result was the last allocation, there must not have
+ // been any other allocations, and this result is the
+ // only allocation in this chunk.
+ //
+ // Because this is the only allocation in this chunk,
+ // we can reset the chunk's bump finger to the start of
+ // the chunk.
+ current_ptr.set(current_footer_p.as_ref().data);
+ }
+ }
+ //SAFETY:
+ // As we received `E` semantically by value from `f`, we can
+ // just copy that value here as long as we avoid a double-drop
+ // (which can't happen as any specific references to the `E`'s
+ // data in `self` are destroyed when this function returns).
+ //
+ // The order between this and the deallocation doesn't matter
+ // because `Self: !Sync`.
+ Err(AllocOrInitError::Init(ptr::read(e as *const _)))
+ },
+ }
+ }
+
+ /// `Copy` a slice into this `Bump` and return an exclusive reference to
+ /// the copy.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_slice_copy(&[1, 2, 3]);
+ /// assert_eq!(x, &[1, 2, 3]);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_copy<T>(&self, src: &[T]) -> &mut [T]
+ where
+ T: Copy,
+ {
+ let layout = Layout::for_value(src);
+ let dst = self.alloc_layout(layout).cast::<T>();
+
+ unsafe {
+ ptr::copy_nonoverlapping(src.as_ptr(), dst.as_ptr(), src.len());
+ slice::from_raw_parts_mut(dst.as_ptr(), src.len())
+ }
+ }
+
+ /// `Clone` a slice into this `Bump` and return an exclusive reference to
+ /// the clone. Prefer [`alloc_slice_copy`](#method.alloc_slice_copy) if `T` is `Copy`.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// #[derive(Clone, Debug, Eq, PartialEq)]
+ /// struct Sheep {
+ /// name: String,
+ /// }
+ ///
+ /// let originals = [
+ /// Sheep { name: "Alice".into() },
+ /// Sheep { name: "Bob".into() },
+ /// Sheep { name: "Cathy".into() },
+ /// ];
+ ///
+ /// let bump = bumpalo::Bump::new();
+ /// let clones = bump.alloc_slice_clone(&originals);
+ /// assert_eq!(originals, clones);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_clone<T>(&self, src: &[T]) -> &mut [T]
+ where
+ T: Clone,
+ {
+ let layout = Layout::for_value(src);
+ let dst = self.alloc_layout(layout).cast::<T>();
+
+ unsafe {
+ for (i, val) in src.iter().cloned().enumerate() {
+ ptr::write(dst.as_ptr().add(i), val);
+ }
+
+ slice::from_raw_parts_mut(dst.as_ptr(), src.len())
+ }
+ }
+
+ /// `Copy` a string slice into this `Bump` and return an exclusive reference to it.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the string fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let hello = bump.alloc_str("hello world");
+ /// assert_eq!("hello world", hello);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_str(&self, src: &str) -> &mut str {
+ let buffer = self.alloc_slice_copy(src.as_bytes());
+ unsafe {
+ // This is OK, because it already came in as str, so it is guaranteed to be utf8
+ str::from_utf8_unchecked_mut(buffer)
+ }
+ }
+
+ /// Allocates a new slice of size `len` into this `Bump` and returns an
+ /// exclusive reference to the copy.
+ ///
+ /// The elements of the slice are initialized using the supplied closure.
+ /// The closure argument is the position in the slice.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_slice_fill_with(5, |i| 5 * (i + 1));
+ /// assert_eq!(x, &[5, 10, 15, 20, 25]);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_fill_with<T, F>(&self, len: usize, mut f: F) -> &mut [T]
+ where
+ F: FnMut(usize) -> T,
+ {
+ let layout = Layout::array::<T>(len).unwrap_or_else(|_| oom());
+ let dst = self.alloc_layout(layout).cast::<T>();
+
+ unsafe {
+ for i in 0..len {
+ ptr::write(dst.as_ptr().add(i), f(i));
+ }
+
+ let result = slice::from_raw_parts_mut(dst.as_ptr(), len);
+ debug_assert_eq!(Layout::for_value(result), layout);
+ result
+ }
+ }
+
+ /// Allocates a new slice of size `len` into this `Bump` and returns an
+ /// exclusive reference to the copy.
+ ///
+ /// All elements of the slice are initialized to `value`.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_slice_fill_copy(5, 42);
+ /// assert_eq!(x, &[42, 42, 42, 42, 42]);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_fill_copy<T: Copy>(&self, len: usize, value: T) -> &mut [T] {
+ self.alloc_slice_fill_with(len, |_| value)
+ }
+
+ /// Allocates a new slice of size `len` slice into this `Bump` and return an
+ /// exclusive reference to the copy.
+ ///
+ /// All elements of the slice are initialized to `value.clone()`.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let s: String = "Hello Bump!".to_string();
+ /// let x: &[String] = bump.alloc_slice_fill_clone(2, &s);
+ /// assert_eq!(x.len(), 2);
+ /// assert_eq!(&x[0], &s);
+ /// assert_eq!(&x[1], &s);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_fill_clone<T: Clone>(&self, len: usize, value: &T) -> &mut [T] {
+ self.alloc_slice_fill_with(len, |_| value.clone())
+ }
+
+ /// Allocates a new slice of size `len` slice into this `Bump` and return an
+ /// exclusive reference to the copy.
+ ///
+ /// The elements are initialized using the supplied iterator.
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails, or if the supplied
+ /// iterator returns fewer elements than it promised.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x: &[i32] = bump.alloc_slice_fill_iter([2, 3, 5].iter().cloned().map(|i| i * i));
+ /// assert_eq!(x, [4, 9, 25]);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_fill_iter<T, I>(&self, iter: I) -> &mut [T]
+ where
+ I: IntoIterator<Item = T>,
+ I::IntoIter: ExactSizeIterator,
+ {
+ let mut iter = iter.into_iter();
+ self.alloc_slice_fill_with(iter.len(), |_| {
+ iter.next().expect("Iterator supplied too few elements")
+ })
+ }
+
+ /// Allocates a new slice of size `len` slice into this `Bump` and return an
+ /// exclusive reference to the copy.
+ ///
+ /// All elements of the slice are initialized to [`T::default()`].
+ ///
+ /// [`T::default()`]: https://doc.rust-lang.org/std/default/trait.Default.html#tymethod.default
+ ///
+ /// ## Panics
+ ///
+ /// Panics if reserving space for the slice fails.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let x = bump.alloc_slice_fill_default::<u32>(5);
+ /// assert_eq!(x, &[0, 0, 0, 0, 0]);
+ /// ```
+ #[inline(always)]
+ #[allow(clippy::mut_from_ref)]
+ pub fn alloc_slice_fill_default<T: Default>(&self, len: usize) -> &mut [T] {
+ self.alloc_slice_fill_with(len, |_| T::default())
+ }
+
+ /// Allocate space for an object with the given `Layout`.
+ ///
+ /// The returned pointer points at uninitialized memory, and should be
+ /// initialized with
+ /// [`std::ptr::write`](https://doc.rust-lang.org/std/ptr/fn.write.html).
+ ///
+ /// # Panics
+ ///
+ /// Panics if reserving space matching `layout` fails.
+ #[inline(always)]
+ pub fn alloc_layout(&self, layout: Layout) -> NonNull<u8> {
+ self.try_alloc_layout(layout).unwrap_or_else(|_| oom())
+ }
+
+ /// Attempts to allocate space for an object with the given `Layout` or else returns
+ /// an `Err`.
+ ///
+ /// The returned pointer points at uninitialized memory, and should be
+ /// initialized with
+ /// [`std::ptr::write`](https://doc.rust-lang.org/std/ptr/fn.write.html).
+ ///
+ /// # Errors
+ ///
+ /// Errors if reserving space matching `layout` fails.
+ #[inline(always)]
+ pub fn try_alloc_layout(&self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
+ if let Some(p) = self.try_alloc_layout_fast(layout) {
+ Ok(p)
+ } else {
+ self.alloc_layout_slow(layout).ok_or(AllocErr)
+ }
+ }
+
+ #[inline(always)]
+ fn try_alloc_layout_fast(&self, layout: Layout) -> Option<NonNull<u8>> {
+ // We don't need to check for ZSTs here since they will automatically
+ // be handled properly: the pointer will be bumped by zero bytes,
+ // modulo alignment. This keeps the fast path optimized for non-ZSTs,
+ // which are much more common.
+ unsafe {
+ let footer = self.current_chunk_footer.get();
+ let footer = footer.as_ref();
+ let ptr = footer.ptr.get().as_ptr();
+ let start = footer.data.as_ptr();
+ debug_assert!(start <= ptr);
+ debug_assert!(ptr as *const u8 <= footer as *const _ as *const u8);
+
+ if (ptr as usize) < layout.size() {
+ return None;
+ }
+
+ let ptr = ptr.wrapping_sub(layout.size());
+ let rem = ptr as usize % layout.align();
+ let aligned_ptr = ptr.wrapping_sub(rem);
+
+ if aligned_ptr >= start {
+ let aligned_ptr = NonNull::new_unchecked(aligned_ptr as *mut u8);
+ footer.ptr.set(aligned_ptr);
+ Some(aligned_ptr)
+ } else {
+ None
+ }
+ }
+ }
+
+ /// Gets the remaining capacity in the current chunk (in bytes).
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use bumpalo::Bump;
+ ///
+ /// let bump = Bump::with_capacity(100);
+ ///
+ /// let capacity = bump.chunk_capacity();
+ /// assert!(capacity >= 100);
+ /// ```
+ pub fn chunk_capacity(&self) -> usize {
+ let current_footer = self.current_chunk_footer.get();
+ let current_footer = unsafe { current_footer.as_ref() };
+
+ current_footer as *const _ as usize - current_footer.data.as_ptr() as usize
+ }
+
+ /// Slow path allocation for when we need to allocate a new chunk from the
+ /// parent bump set because there isn't enough room in our current chunk.
+ #[inline(never)]
+ fn alloc_layout_slow(&self, layout: Layout) -> Option<NonNull<u8>> {
+ unsafe {
+ let size = layout.size();
+ let allocation_limit_remaining = self.allocation_limit_remaining();
+
+ // Get a new chunk from the global allocator.
+ let current_footer = self.current_chunk_footer.get();
+ let current_layout = current_footer.as_ref().layout;
+
+ // By default, we want our new chunk to be about twice as big
+ // as the previous chunk. If the global allocator refuses it,
+ // we try to divide it by half until it works or the requested
+ // size is smaller than the default footer size.
+ let min_new_chunk_size = layout.size().max(DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER);
+ let mut base_size = (current_layout.size() - FOOTER_SIZE)
+ .checked_mul(2)?
+ .max(min_new_chunk_size);
+ let chunk_memory_details = iter::from_fn(|| {
+ let bypass_min_chunk_size_for_small_limits = match self.allocation_limit() {
+ Some(limit)
+ if layout.size() < limit
+ && base_size >= layout.size()
+ && limit < DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER
+ && self.allocated_bytes() == 0 =>
+ {
+ true
+ }
+ _ => false,
+ };
+
+ if base_size >= min_new_chunk_size || bypass_min_chunk_size_for_small_limits {
+ let size = base_size;
+ base_size = base_size / 2;
+ Bump::new_chunk_memory_details(Some(size), layout)
+ } else {
+ None
+ }
+ });
+
+ let new_footer = chunk_memory_details
+ .filter_map(|chunk_memory_details| {
+ if Bump::chunk_fits_under_limit(
+ allocation_limit_remaining,
+ chunk_memory_details,
+ ) {
+ Bump::new_chunk(chunk_memory_details, layout, current_footer)
+ } else {
+ None
+ }
+ })
+ .next()?;
+
+ debug_assert_eq!(
+ new_footer.as_ref().data.as_ptr() as usize % layout.align(),
+ 0
+ );
+
+ // Set the new chunk as our new current chunk.
+ self.current_chunk_footer.set(new_footer);
+
+ let new_footer = new_footer.as_ref();
+
+ // Move the bump ptr finger down to allocate room for `val`. We know
+ // this can't overflow because we successfully allocated a chunk of
+ // at least the requested size.
+ let mut ptr = new_footer.ptr.get().as_ptr().sub(size);
+ // Round the pointer down to the requested alignment.
+ ptr = ptr.sub(ptr as usize % layout.align());
+ debug_assert!(
+ ptr as *const _ <= new_footer,
+ "{:p} <= {:p}",
+ ptr,
+ new_footer
+ );
+ let ptr = NonNull::new_unchecked(ptr as *mut u8);
+ new_footer.ptr.set(ptr);
+
+ // Return a pointer to the freshly allocated region in this chunk.
+ Some(ptr)
+ }
+ }
+
+ /// Returns an iterator over each chunk of allocated memory that
+ /// this arena has bump allocated into.
+ ///
+ /// The chunks are returned ordered by allocation time, with the most
+ /// recently allocated chunk being returned first, and the least recently
+ /// allocated chunk being returned last.
+ ///
+ /// The values inside each chunk are also ordered by allocation time, with
+ /// the most recent allocation being earlier in the slice, and the least
+ /// recent allocation being towards the end of the slice.
+ ///
+ /// ## Safety
+ ///
+ /// Because this method takes `&mut self`, we know that the bump arena
+ /// reference is unique and therefore there aren't any active references to
+ /// any of the objects we've allocated in it either. This potential aliasing
+ /// of exclusive references is one common footgun for unsafe code that we
+ /// don't need to worry about here.
+ ///
+ /// However, there could be regions of uninitialized memory used as padding
+ /// between allocations, which is why this iterator has items of type
+ /// `[MaybeUninit<u8>]`, instead of simply `[u8]`.
+ ///
+ /// The only way to guarantee that there is no padding between allocations
+ /// or within allocated objects is if all of these properties hold:
+ ///
+ /// 1. Every object allocated in this arena has the same alignment,
+ /// and that alignment is at most 16.
+ /// 2. Every object's size is a multiple of its alignment.
+ /// 3. None of the objects allocated in this arena contain any internal
+ /// padding.
+ ///
+ /// If you want to use this `iter_allocated_chunks` method, it is *your*
+ /// responsibility to ensure that these properties hold before calling
+ /// `MaybeUninit::assume_init` or otherwise reading the returned values.
+ ///
+ /// Finally, you must also ensure that any values allocated into the bump
+ /// arena have not had their `Drop` implementations called on them,
+ /// e.g. after dropping a [`bumpalo::boxed::Box<T>`][crate::boxed::Box].
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let mut bump = bumpalo::Bump::new();
+ ///
+ /// // Allocate a bunch of `i32`s in this bump arena, potentially causing
+ /// // additional memory chunks to be reserved.
+ /// for i in 0..10000 {
+ /// bump.alloc(i);
+ /// }
+ ///
+ /// // Iterate over each chunk we've bump allocated into. This is safe
+ /// // because we have only allocated `i32`s in this arena, which fulfills
+ /// // the above requirements.
+ /// for ch in bump.iter_allocated_chunks() {
+ /// println!("Used a chunk that is {} bytes long", ch.len());
+ /// println!("The first byte is {:?}", unsafe {
+ /// ch[0].assume_init()
+ /// });
+ /// }
+ ///
+ /// // Within a chunk, allocations are ordered from most recent to least
+ /// // recent. If we allocated 'a', then 'b', then 'c', when we iterate
+ /// // through the chunk's data, we get them in the order 'c', then 'b',
+ /// // then 'a'.
+ ///
+ /// bump.reset();
+ /// bump.alloc(b'a');
+ /// bump.alloc(b'b');
+ /// bump.alloc(b'c');
+ ///
+ /// assert_eq!(bump.iter_allocated_chunks().count(), 1);
+ /// let chunk = bump.iter_allocated_chunks().nth(0).unwrap();
+ /// assert_eq!(chunk.len(), 3);
+ ///
+ /// // Safe because we've only allocated `u8`s in this arena, which
+ /// // fulfills the above requirements.
+ /// unsafe {
+ /// assert_eq!(chunk[0].assume_init(), b'c');
+ /// assert_eq!(chunk[1].assume_init(), b'b');
+ /// assert_eq!(chunk[2].assume_init(), b'a');
+ /// }
+ /// ```
+ pub fn iter_allocated_chunks(&mut self) -> ChunkIter<'_> {
+ // SAFE: Ensured by mutable borrow of `self`.
+ let raw = unsafe { self.iter_allocated_chunks_raw() };
+ ChunkIter {
+ raw,
+ bump: PhantomData,
+ }
+ }
+
+ /// Returns an iterator over raw pointers to chunks of allocated memory that
+ /// this arena has bump allocated into.
+ ///
+ /// This is an unsafe version of [`iter_allocated_chunks()`](Bump::iter_allocated_chunks),
+ /// with the caller responsible for safe usage of the returned pointers as
+ /// well as ensuring that the iterator is not invalidated by new
+ /// allocations.
+ ///
+ /// ## Safety
+ ///
+ /// Allocations from this arena must not be performed while the returned
+ /// iterator is alive. If reading the chunk data (or casting to a reference)
+ /// the caller must ensure that there exist no mutable references to
+ /// previously allocated data.
+ ///
+ /// In addition, all of the caveats when reading the chunk data from
+ /// [`iter_allocated_chunks()`](Bump::iter_allocated_chunks) still apply.
+ pub unsafe fn iter_allocated_chunks_raw(&self) -> ChunkRawIter<'_> {
+ ChunkRawIter {
+ footer: self.current_chunk_footer.get(),
+ bump: PhantomData,
+ }
+ }
+
+ /// Calculates the number of bytes currently allocated across all chunks in
+ /// this bump arena.
+ ///
+ /// If you allocate types of different alignments or types with
+ /// larger-than-typical alignment in the same arena, some padding
+ /// bytes might get allocated in the bump arena. Note that those padding
+ /// bytes will add to this method's resulting sum, so you cannot rely
+ /// on it only counting the sum of the sizes of the things
+ /// you've allocated in the arena.
+ ///
+ /// The allocated bytes do not include the size of bumpalo's metadata,
+ /// so the amount of memory requested from the Rust allocator is higher
+ /// than the returned value.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// let bump = bumpalo::Bump::new();
+ /// let _x = bump.alloc_slice_fill_default::<u32>(5);
+ /// let bytes = bump.allocated_bytes();
+ /// assert!(bytes >= core::mem::size_of::<u32>() * 5);
+ /// ```
+ pub fn allocated_bytes(&self) -> usize {
+ let footer = self.current_chunk_footer.get();
+
+ unsafe { footer.as_ref().allocated_bytes }
+ }
+
+ #[inline]
+ unsafe fn is_last_allocation(&self, ptr: NonNull<u8>) -> bool {
+ let footer = self.current_chunk_footer.get();
+ let footer = footer.as_ref();
+ footer.ptr.get() == ptr
+ }
+
+ #[inline]
+ unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
+ // If the pointer is the last allocation we made, we can reuse the bytes,
+ // otherwise they are simply leaked -- at least until somebody calls reset().
+ if self.is_last_allocation(ptr) {
+ let ptr = NonNull::new_unchecked(ptr.as_ptr().add(layout.size()));
+ self.current_chunk_footer.get().as_ref().ptr.set(ptr);
+ }
+ }
+
+ #[inline]
+ unsafe fn shrink(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<u8>, AllocErr> {
+ let old_size = old_layout.size();
+ let new_size = new_layout.size();
+ let align_is_compatible = old_layout.align() >= new_layout.align();
+
+ if !align_is_compatible {
+ return Err(AllocErr);
+ }
+
+ // This is how much space we would *actually* reclaim while satisfying
+ // the requested alignment.
+ let delta = round_down_to(old_size - new_size, new_layout.align());
+
+ if self.is_last_allocation(ptr)
+ // Only reclaim the excess space (which requires a copy) if it
+ // is worth it: we are actually going to recover "enough" space
+ // and we can do a non-overlapping copy.
+ && delta >= old_size / 2
+ {
+ let footer = self.current_chunk_footer.get();
+ let footer = footer.as_ref();
+
+ // NB: new_ptr is aligned, because ptr *has to* be aligned, and we
+ // made sure delta is aligned.
+ let new_ptr = NonNull::new_unchecked(footer.ptr.get().as_ptr().add(delta));
+ footer.ptr.set(new_ptr);
+
+ // NB: we know it is non-overlapping because of the size check
+ // in the `if` condition.
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size);
+
+ return Ok(new_ptr);
+ } else {
+ return Ok(ptr);
+ }
+ }
+
+ #[inline]
+ unsafe fn grow(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<u8>, AllocErr> {
+ let old_size = old_layout.size();
+ let new_size = new_layout.size();
+ let align_is_compatible = old_layout.align() >= new_layout.align();
+
+ if align_is_compatible && self.is_last_allocation(ptr) {
+ // Try to allocate the delta size within this same block so we can
+ // reuse the currently allocated space.
+ let delta = new_size - old_size;
+ if let Some(p) =
+ self.try_alloc_layout_fast(layout_from_size_align(delta, old_layout.align()))
+ {
+ ptr::copy(ptr.as_ptr(), p.as_ptr(), old_size);
+ return Ok(p);
+ }
+ }
+
+ // Fallback: do a fresh allocation and copy the existing data into it.
+ let new_ptr = self.try_alloc_layout(new_layout)?;
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size);
+ Ok(new_ptr)
+ }
+}
+
+/// An iterator over each chunk of allocated memory that
+/// an arena has bump allocated into.
+///
+/// The chunks are returned ordered by allocation time, with the most recently
+/// allocated chunk being returned first.
+///
+/// The values inside each chunk are also ordered by allocation time, with the most
+/// recent allocation being earlier in the slice.
+///
+/// This struct is created by the [`iter_allocated_chunks`] method on
+/// [`Bump`]. See that function for a safety description regarding reading from the returned items.
+///
+/// [`Bump`]: struct.Bump.html
+/// [`iter_allocated_chunks`]: struct.Bump.html#method.iter_allocated_chunks
+#[derive(Debug)]
+pub struct ChunkIter<'a> {
+ raw: ChunkRawIter<'a>,
+ bump: PhantomData<&'a mut Bump>,
+}
+
+impl<'a> Iterator for ChunkIter<'a> {
+ type Item = &'a [mem::MaybeUninit<u8>];
+ fn next(&mut self) -> Option<&'a [mem::MaybeUninit<u8>]> {
+ unsafe {
+ let (ptr, len) = self.raw.next()?;
+ let slice = slice::from_raw_parts(ptr as *const mem::MaybeUninit<u8>, len);
+ Some(slice)
+ }
+ }
+}
+
+impl<'a> iter::FusedIterator for ChunkIter<'a> {}
+
+/// An iterator over raw pointers to chunks of allocated memory that this
+/// arena has bump allocated into.
+///
+/// See [`ChunkIter`] for details regarding the returned chunks.
+///
+/// This struct is created by the [`iter_allocated_chunks_raw`] method on
+/// [`Bump`]. See that function for a safety description regarding reading from
+/// the returned items.
+///
+/// [`Bump`]: struct.Bump.html
+/// [`iter_allocated_chunks_raw`]: struct.Bump.html#method.iter_allocated_chunks_raw
+#[derive(Debug)]
+pub struct ChunkRawIter<'a> {
+ footer: NonNull<ChunkFooter>,
+ bump: PhantomData<&'a Bump>,
+}
+
+impl Iterator for ChunkRawIter<'_> {
+ type Item = (*mut u8, usize);
+ fn next(&mut self) -> Option<(*mut u8, usize)> {
+ unsafe {
+ let foot = self.footer.as_ref();
+ if foot.is_empty() {
+ return None;
+ }
+ let (ptr, len) = foot.as_raw_parts();
+ self.footer = foot.prev.get();
+ Some((ptr as *mut u8, len))
+ }
+ }
+}
+
+impl iter::FusedIterator for ChunkRawIter<'_> {}
+
+#[inline(never)]
+#[cold]
+fn oom() -> ! {
+ panic!("out of memory")
+}
+
+unsafe impl<'a> alloc::Alloc for &'a Bump {
+ #[inline(always)]
+ unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
+ self.try_alloc_layout(layout)
+ }
+
+ #[inline]
+ unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
+ Bump::dealloc(self, ptr, layout)
+ }
+
+ #[inline]
+ unsafe fn realloc(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<NonNull<u8>, AllocErr> {
+ let old_size = layout.size();
+
+ if old_size == 0 {
+ return self.try_alloc_layout(layout);
+ }
+
+ let new_layout = layout_from_size_align(new_size, layout.align());
+ if new_size <= old_size {
+ self.shrink(ptr, layout, new_layout)
+ } else {
+ self.grow(ptr, layout, new_layout)
+ }
+ }
+}
+
+#[cfg(feature = "allocator_api")]
+unsafe impl<'a> Allocator for &'a Bump {
+ fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+ self.try_alloc_layout(layout)
+ .map(|p| NonNull::slice_from_raw_parts(p, layout.size()))
+ .map_err(|_| AllocError)
+ }
+
+ unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+ Bump::dealloc(self, ptr, layout)
+ }
+
+ unsafe fn shrink(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ Bump::shrink(self, ptr, old_layout, new_layout)
+ .map(|p| NonNull::slice_from_raw_parts(p, new_layout.size()))
+ .map_err(|_| AllocError)
+ }
+
+ unsafe fn grow(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ Bump::grow(self, ptr, old_layout, new_layout)
+ .map(|p| NonNull::slice_from_raw_parts(p, new_layout.size()))
+ .map_err(|_| AllocError)
+ }
+
+ unsafe fn grow_zeroed(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ let mut ptr = self.grow(ptr, old_layout, new_layout)?;
+ ptr.as_mut()[old_layout.size()..].fill(0);
+ Ok(ptr)
+ }
+}
+
+// NB: Only tests which require private types, fields, or methods should be in
+// here. Anything that can just be tested via public API surface should be in
+// `bumpalo/tests/all/*`.
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // Uses private type `ChunkFooter`.
+ #[test]
+ fn chunk_footer_is_five_words() {
+ assert_eq!(mem::size_of::<ChunkFooter>(), mem::size_of::<usize>() * 6);
+ }
+
+ // Uses private `alloc` module.
+ #[test]
+ #[allow(clippy::cognitive_complexity)]
+ fn test_realloc() {
+ use crate::alloc::Alloc;
+
+ unsafe {
+ const CAPACITY: usize = 1024 - OVERHEAD;
+ let mut b = Bump::with_capacity(CAPACITY);
+
+ // `realloc` doesn't shrink allocations that aren't "worth it".
+ let layout = Layout::from_size_align(100, 1).unwrap();
+ let p = b.alloc_layout(layout);
+ let q = (&b).realloc(p, layout, 51).unwrap();
+ assert_eq!(p, q);
+ b.reset();
+
+ // `realloc` will shrink allocations that are "worth it".
+ let layout = Layout::from_size_align(100, 1).unwrap();
+ let p = b.alloc_layout(layout);
+ let q = (&b).realloc(p, layout, 50).unwrap();
+ assert!(p != q);
+ b.reset();
+
+ // `realloc` will reuse the last allocation when growing.
+ let layout = Layout::from_size_align(10, 1).unwrap();
+ let p = b.alloc_layout(layout);
+ let q = (&b).realloc(p, layout, 11).unwrap();
+ assert_eq!(q.as_ptr() as usize, p.as_ptr() as usize - 1);
+ b.reset();
+
+ // `realloc` will allocate a new chunk when growing the last
+ // allocation, if need be.
+ let layout = Layout::from_size_align(1, 1).unwrap();
+ let p = b.alloc_layout(layout);
+ let q = (&b).realloc(p, layout, CAPACITY + 1).unwrap();
+ assert!(q.as_ptr() as usize != p.as_ptr() as usize - CAPACITY);
+ b = Bump::with_capacity(CAPACITY);
+
+ // `realloc` will allocate and copy when reallocating anything that
+ // wasn't the last allocation.
+ let layout = Layout::from_size_align(1, 1).unwrap();
+ let p = b.alloc_layout(layout);
+ let _ = b.alloc_layout(layout);
+ let q = (&b).realloc(p, layout, 2).unwrap();
+ assert!(q.as_ptr() as usize != p.as_ptr() as usize - 1);
+ b.reset();
+ }
+ }
+
+ // Uses our private `alloc` module.
+ #[test]
+ fn invalid_read() {
+ use alloc::Alloc;
+
+ let mut b = &Bump::new();
+
+ unsafe {
+ let l1 = Layout::from_size_align(12000, 4).unwrap();
+ let p1 = Alloc::alloc(&mut b, l1).unwrap();
+
+ let l2 = Layout::from_size_align(1000, 4).unwrap();
+ Alloc::alloc(&mut b, l2).unwrap();
+
+ let p1 = b.realloc(p1, l1, 24000).unwrap();
+ let l3 = Layout::from_size_align(24000, 4).unwrap();
+ b.realloc(p1, l3, 48000).unwrap();
+ }
+ }
+}