use core::mem::{self, MaybeUninit}; use core::ops::{Deref, DerefMut}; use core::{ptr, slice}; use crate::Result; use crate::{PartialAllocStrategy, PhysallocFlags}; /// An RAII guard of a physical memory allocation. Currently all physically allocated memory are /// page-aligned and take up at least 4k of space (on x86_64). #[derive(Debug)] pub struct PhysBox { address: usize, size: usize } impl PhysBox { /// Construct a PhysBox from an address and a size. /// /// # Safety /// This function is unsafe because when dropping, Self has to a valid allocation. pub unsafe fn from_raw_parts(address: usize, size: usize) -> Self { Self { address, size, } } /// Retrieve the byte address in physical memory, of this allocation. pub fn address(&self) -> usize { self.address } /// Retrieve the size in bytes of the alloc. pub fn size(&self) -> usize { self.size } /// Allocate physical memory that must reside in 32-bit space. pub fn new_in_32bit_space(size: usize) -> Result { Self::new_with_flags(size, PhysallocFlags::SPACE_32) } pub fn new_with_flags(size: usize, flags: PhysallocFlags) -> Result { assert!(!flags.contains(PhysallocFlags::PARTIAL_ALLOC)); let address = unsafe { crate::physalloc2(size, flags.bits())? }; Ok(Self { address, size, }) } /// "Partially" allocate physical memory, in the sense that the allocation may be smaller than /// expected, but still with a minimum limit. This is particularly useful when the physical /// memory space is fragmented, and a device supports scatter-gather I/O. In that case, the /// driver can optimistically request e.g. 1 alloc of 1 MiB, with the minimum of 512 KiB. If /// that first allocation only returns half the size, the driver can do another allocation /// and then let the device use both buffers. pub fn new_partial_allocation(size: usize, flags: PhysallocFlags, strategy: Option, mut min: usize) -> Result { debug_assert!(!(flags.contains(PhysallocFlags::PARTIAL_ALLOC) && strategy.is_none())); let address = unsafe { crate::physalloc3(size, flags.bits() | strategy.map(|s| s as usize).unwrap_or(0), &mut min)? }; Ok(Self { address, size: min, }) } pub fn new(size: usize) -> Result { let address = unsafe { crate::physalloc(size)? }; Ok(Self { address, size, }) } } impl Drop for PhysBox { fn drop(&mut self) { let _ = unsafe { crate::physfree(self.address, self.size) }; } } pub struct Dma { phys: PhysBox, virt: *mut T, } impl Dma { pub fn from_physbox_uninit(phys: PhysBox) -> Result>> { let virt = unsafe { crate::physmap(phys.address, phys.size, crate::PHYSMAP_WRITE)? } as *mut MaybeUninit; Ok(Dma { phys, virt, }) } pub fn from_physbox_zeroed(phys: PhysBox) -> Result>> { let this = Self::from_physbox_uninit(phys)?; unsafe { ptr::write_bytes(this.virt as *mut MaybeUninit, 0, this.phys.size) } Ok(this) } pub fn from_physbox(phys: PhysBox, value: T) -> Result { let this = Self::from_physbox_uninit(phys)?; Ok(unsafe { ptr::write(this.virt, MaybeUninit::new(value)); this.assume_init() }) } pub fn new(value: T) -> Result { let phys = PhysBox::new(mem::size_of::())?; Self::from_physbox(phys, value) } pub fn zeroed() -> Result>> { let phys = PhysBox::new(mem::size_of::())?; Self::from_physbox_zeroed(phys) } } impl Dma> { pub unsafe fn assume_init(self) -> Dma { let &Dma { phys: PhysBox { address, size }, virt } = &self; mem::forget(self); Dma { phys: PhysBox { address, size }, virt: virt as *mut T, } } } impl Dma { pub fn physical(&self) -> usize { self.phys.address() } pub fn size(&self) -> usize { self.phys.size() } pub fn phys(&self) -> &PhysBox { &self.phys } } impl Dma<[T]> { pub fn from_physbox_uninit_unsized(phys: PhysBox, len: usize) -> Result]>> { let max_len = phys.size() / mem::size_of::(); assert!(len <= max_len); Ok(Dma { virt: unsafe { slice::from_raw_parts_mut(crate::physmap(phys.address, phys.size, crate::PHYSMAP_WRITE)? as *mut MaybeUninit, len) } as *mut [MaybeUninit], phys, }) } pub fn from_physbox_zeroed_unsized(phys: PhysBox, len: usize) -> Result]>> { let this = Self::from_physbox_uninit_unsized(phys, len)?; unsafe { ptr::write_bytes(this.virt as *mut MaybeUninit, 0, this.phys.size()) } Ok(this) } /// Creates a new DMA buffer with a size only known at runtime. /// ## Safety /// * `T` must be properly aligned. /// * `T` must be valid as zeroed (i.e. no NonNull pointers). pub unsafe fn zeroed_unsized(count: usize) -> Result { let phys = PhysBox::new(mem::size_of::() * count)?; Ok(Self::from_physbox_zeroed_unsized(phys, count)?.assume_init()) } } impl Dma<[MaybeUninit]> { pub unsafe fn assume_init(self) -> Dma<[T]> { let &Dma { phys: PhysBox { address, size }, virt } = &self; mem::forget(self); Dma { phys: PhysBox { address, size }, virt: virt as *mut [T], } } } impl Deref for Dma { type Target = T; fn deref(&self) -> &T { unsafe { &*self.virt } } } impl DerefMut for Dma { fn deref_mut(&mut self) -> &mut T { unsafe { &mut *self.virt } } } impl Drop for Dma { fn drop(&mut self) { unsafe { ptr::drop_in_place(self.virt) } let _ = unsafe { crate::physunmap(self.virt as *mut u8 as usize) }; } }