summaryrefslogtreecommitdiffstats
path: root/tests/run-make/wasm-exceptions-nostd/src/arena_alloc.rs
blob: 572d253309cee229bc78aec6c266fe747d577af3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
use core::alloc::{GlobalAlloc, Layout};
use core::cell::UnsafeCell;

#[global_allocator]
static ALLOCATOR: ArenaAllocator = ArenaAllocator::new();

/// Very simple allocator which never deallocates memory
///
/// Based on the example from
/// https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html
pub struct ArenaAllocator {
    arena: UnsafeCell<Arena>,
}

impl ArenaAllocator {
    pub const fn new() -> Self {
        Self {
            arena: UnsafeCell::new(Arena::new()),
        }
    }
}

/// Safe because we are singlethreaded
unsafe impl Sync for ArenaAllocator {}

unsafe impl GlobalAlloc for ArenaAllocator {
    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
        let arena = &mut *self.arena.get();
        arena.alloc(layout)
    }

    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
}

const ARENA_SIZE: usize = 64 * 1024; // more than enough

#[repr(C, align(4096))]
struct Arena {
    buf: [u8; ARENA_SIZE], // aligned at 4096
    allocated: usize,
}

impl Arena {
    pub const fn new() -> Self {
        Self {
            buf: [0x55; ARENA_SIZE],
            allocated: 0,
        }
    }

    pub unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 {
        if layout.align() > 4096 || layout.size() > ARENA_SIZE {
            return core::ptr::null_mut();
        }

        let align_minus_one = layout.align() - 1;
        let start = (self.allocated + align_minus_one) & !align_minus_one; // round up
        let new_cursor = start + layout.size();

        if new_cursor >= ARENA_SIZE {
            return core::ptr::null_mut();
        }

        self.allocated = new_cursor;
        self.buf.as_mut_ptr().add(start)
    }
}