1use core::{alloc::AllocError, ptr::NonNull};
6
7use super::{slot::HeapSlot, slot_list::SlabSlotList};
8use crate::mm::{
9 FrameAllocOptions, HasPaddr, HasSize, PAGE_SIZE, UniqueFrame,
10 frame::{linked_list::Link, meta::AnyFrameMeta},
11 paddr_to_vaddr,
12};
13
14pub type Slab<const SLOT_SIZE: usize> = UniqueFrame<Link<SlabMeta<SLOT_SIZE>>>;
26
27#[derive(Debug)]
31pub struct SlabMeta<const SLOT_SIZE: usize> {
32 free_list: SlabSlotList<SLOT_SIZE>,
36
37 nr_allocated: u16,
42}
43
44unsafe impl<const SLOT_SIZE: usize> Send for SlabMeta<SLOT_SIZE> {}
45unsafe impl<const SLOT_SIZE: usize> Sync for SlabMeta<SLOT_SIZE> {}
46
47unsafe impl<const SLOT_SIZE: usize> AnyFrameMeta for SlabMeta<SLOT_SIZE> {
48 fn on_drop(&mut self, _reader: &mut crate::mm::VmReader<crate::mm::Infallible>) {
49 if self.nr_allocated != 0 {
50 panic!("{} slots allocated when dropping a slab", self.nr_allocated);
53 }
54 }
55
56 fn is_untyped(&self) -> bool {
57 false
58 }
59}
60
61impl<const SLOT_SIZE: usize> SlabMeta<SLOT_SIZE> {
62 pub const fn capacity(&self) -> u16 {
64 (PAGE_SIZE / SLOT_SIZE) as u16
65 }
66
67 pub fn nr_allocated(&self) -> u16 {
69 self.nr_allocated
70 }
71
72 pub fn alloc(&mut self) -> Result<HeapSlot, AllocError> {
74 let Some(allocated) = self.free_list.pop() else {
75 log::error!("Allocating a slot from a full slab");
76 return Err(AllocError);
77 };
78 self.nr_allocated += 1;
79 Ok(allocated)
80 }
81}
82
83impl<const SLOT_SIZE: usize> Slab<SLOT_SIZE> {
84 pub fn new() -> crate::prelude::Result<Self> {
89 const { assert!(SLOT_SIZE <= PAGE_SIZE) };
90 const { assert!(SLOT_SIZE >= size_of::<usize>()) };
92 const { assert!(PAGE_SIZE / SLOT_SIZE <= u16::MAX as usize) };
94
95 let mut slab: Slab<SLOT_SIZE> = FrameAllocOptions::new()
96 .zeroed(false)
97 .alloc_frame_with(Link::new(SlabMeta::<SLOT_SIZE> {
98 free_list: SlabSlotList::new(),
99 nr_allocated: 0,
100 }))?
101 .try_into()
102 .unwrap();
103
104 let head_paddr = slab.paddr();
105 let head_vaddr = paddr_to_vaddr(head_paddr);
106
107 for slot_offset in (0..PAGE_SIZE).step_by(SLOT_SIZE) {
109 let slot_ptr = unsafe { NonNull::new_unchecked((head_vaddr + slot_offset) as *mut u8) };
111 slab.meta_mut()
113 .free_list
114 .push(unsafe { HeapSlot::new(slot_ptr, super::SlotInfo::SlabSlot(SLOT_SIZE)) });
115 }
116
117 Ok(slab)
118 }
119
120 pub fn dealloc(&mut self, slot: HeapSlot) -> Result<(), AllocError> {
124 if !(self.paddr()..self.paddr() + self.size()).contains(&slot.paddr()) {
125 log::error!("Deallocating a slot to a slab that does not own the slot");
126 return Err(AllocError);
127 }
128 debug_assert_eq!(slot.size(), SLOT_SIZE);
129 self.meta_mut().free_list.push(slot);
130 self.meta_mut().nr_allocated -= 1;
131
132 Ok(())
133 }
134}