summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_arena
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:24 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:24 +0000
commit023939b627b7dc93b01471f7d41fb8553ddb4ffa (patch)
tree60fc59477c605c72b0a1051409062ddecc43f877 /compiler/rustc_arena
parentAdding debian version 1.72.1+dfsg1-1. (diff)
downloadrustc-023939b627b7dc93b01471f7d41fb8553ddb4ffa.tar.xz
rustc-023939b627b7dc93b01471f7d41fb8553ddb4ffa.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_arena')
-rw-r--r--compiler/rustc_arena/src/lib.rs88
1 files changed, 71 insertions, 17 deletions
diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs
index ba47ebd68..e45b7c154 100644
--- a/compiler/rustc_arena/src/lib.rs
+++ b/compiler/rustc_arena/src/lib.rs
@@ -11,6 +11,7 @@
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
test(no_crate_inject, attr(deny(warnings)))
)]
+#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(new_uninit)]
#![feature(maybe_uninit_slice)]
@@ -23,17 +24,18 @@
#![deny(unsafe_op_in_unsafe_fn)]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
+#![cfg_attr(not(bootstrap), allow(internal_features))]
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.
use smallvec::SmallVec;
use std::alloc::Layout;
use std::cell::{Cell, RefCell};
-use std::cmp;
use std::marker::PhantomData;
use std::mem::{self, MaybeUninit};
use std::ptr::{self, NonNull};
use std::slice;
+use std::{cmp, intrinsics};
#[inline(never)]
#[cold]
@@ -362,6 +364,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
unsafe impl<T: Send> Send for TypedArena<T> {}
+#[inline(always)]
+fn align_down(val: usize, align: usize) -> usize {
+ debug_assert!(align.is_power_of_two());
+ val & !(align - 1)
+}
+
+#[inline(always)]
+fn align_up(val: usize, align: usize) -> usize {
+ debug_assert!(align.is_power_of_two());
+ (val + align - 1) & !(align - 1)
+}
+
+// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
+// to optimize away alignment code.
+const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
+
/// An arena that can hold objects of multiple different types that impl `Copy`
/// and/or satisfy `!mem::needs_drop`.
pub struct DroplessArena {
@@ -374,6 +392,8 @@ pub struct DroplessArena {
/// start. (This is slightly simpler and faster than allocating upwards,
/// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
/// When this pointer crosses the start pointer, a new chunk is allocated.
+ ///
+ /// This is kept aligned to DROPLESS_ALIGNMENT.
end: Cell<*mut u8>,
/// A vector of arena chunks.
@@ -394,9 +414,11 @@ impl Default for DroplessArena {
}
impl DroplessArena {
- #[inline(never)]
- #[cold]
- fn grow(&self, additional: usize) {
+ fn grow(&self, layout: Layout) {
+ // Add some padding so we can align `self.end` while
+ // stilling fitting in a `layout` allocation.
+ let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
+
unsafe {
let mut chunks = self.chunks.borrow_mut();
let mut new_cap;
@@ -415,13 +437,35 @@ impl DroplessArena {
// Also ensure that this chunk can fit `additional`.
new_cap = cmp::max(additional, new_cap);
- let mut chunk = ArenaChunk::new(new_cap);
+ let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
self.start.set(chunk.start());
- self.end.set(chunk.end());
+
+ // Align the end to DROPLESS_ALIGNMENT
+ let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
+
+ // Make sure we don't go past `start`. This should not happen since the allocation
+ // should be at least DROPLESS_ALIGNMENT - 1 bytes.
+ debug_assert!(chunk.start().addr() <= end);
+
+ self.end.set(chunk.end().with_addr(end));
+
chunks.push(chunk);
}
}
+ #[inline(never)]
+ #[cold]
+ fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
+ self.grow(layout);
+ self.alloc_raw_without_grow(layout).unwrap()
+ }
+
+ #[inline(never)]
+ #[cold]
+ fn grow_and_alloc<T>(&self) -> *mut u8 {
+ self.grow_and_alloc_raw(Layout::new::<T>())
+ }
+
/// Allocates a byte slice with specified layout from the current memory
/// chunk. Returns `None` if there is no free space left to satisfy the
/// request.
@@ -431,12 +475,17 @@ impl DroplessArena {
let old_end = self.end.get();
let end = old_end.addr();
- let align = layout.align();
- let bytes = layout.size();
+ // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
+ let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
+
+ // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
+ unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
- let new_end = end.checked_sub(bytes)? & !(align - 1);
+ let new_end = align_down(end.checked_sub(bytes)?, layout.align());
if start <= new_end {
let new_end = old_end.with_addr(new_end);
+ // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
+ // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
self.end.set(new_end);
Some(new_end)
} else {
@@ -447,21 +496,26 @@ impl DroplessArena {
#[inline]
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
assert!(layout.size() != 0);
- loop {
- if let Some(a) = self.alloc_raw_without_grow(layout) {
- break a;
- }
- // No free space left. Allocate a new chunk to satisfy the request.
- // On failure the grow will panic or abort.
- self.grow(layout.size());
+ if let Some(a) = self.alloc_raw_without_grow(layout) {
+ return a;
}
+ // No free space left. Allocate a new chunk to satisfy the request.
+ // On failure the grow will panic or abort.
+ self.grow_and_alloc_raw(layout)
}
#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
assert!(!mem::needs_drop::<T>());
+ assert!(mem::size_of::<T>() != 0);
- let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
+ let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::<T>(&object)) {
+ a
+ } else {
+ // No free space left. Allocate a new chunk to satisfy the request.
+ // On failure the grow will panic or abort.
+ self.grow_and_alloc::<T>()
+ } as *mut T;
unsafe {
// Write into uninitialized memory.