ostd/mm/heap/
slab.rs

1// SPDX-License-Identifier: MPL-2.0
2
3//! Slabs for implementing the slab allocator.
4
5use core::{alloc::AllocError, ptr::NonNull};
6
7use super::{slot::HeapSlot, slot_list::SlabSlotList};
8use crate::mm::{
9    FrameAllocOptions, HasPaddr, HasSize, PAGE_SIZE, UniqueFrame,
10    frame::{linked_list::Link, meta::AnyFrameMeta},
11    paddr_to_vaddr,
12};
13
14/// A slab.
15///
16/// The slot size is the maximum size of objects that can be allocated from the
17/// slab. The slab is densely divided into slots of this size.
18///
19/// The `SLOT_SIZE` is the size of the slots in bytes. The size of the slots
20/// cannot be smaller than the size of [`usize`]. It must be smaller than or
21/// equal to [`PAGE_SIZE`].
22///
23/// A slab should have the size of one basic page. This restriction may be
24/// lifted in the future.
25pub type Slab<const SLOT_SIZE: usize> = UniqueFrame<Link<SlabMeta<SLOT_SIZE>>>;
26
27/// Frame metadata of a slab.
28///
29/// Each slab is backed by a [`UniqueFrame`].
30#[derive(Debug)]
31pub struct SlabMeta<const SLOT_SIZE: usize> {
32    /// The list of free slots inside the slab.
33    ///
34    /// Slots not inside the slab should not be in the list.
35    free_list: SlabSlotList<SLOT_SIZE>,
36
37    /// The number of allocated slots in the slab.
38    ///
39    /// Even if a slot is free, as long as it does not stay in the
40    /// [`Self::free_list`], it is considered allocated.
41    nr_allocated: u16,
42}
43
44unsafe impl<const SLOT_SIZE: usize> Send for SlabMeta<SLOT_SIZE> {}
45unsafe impl<const SLOT_SIZE: usize> Sync for SlabMeta<SLOT_SIZE> {}
46
47unsafe impl<const SLOT_SIZE: usize> AnyFrameMeta for SlabMeta<SLOT_SIZE> {
48    fn on_drop(&mut self, _reader: &mut crate::mm::VmReader<crate::mm::Infallible>) {
49        if self.nr_allocated != 0 {
50            // FIXME: We have no mechanisms to forget the slab once we are here,
51            // so we require the user to deallocate all slots before dropping.
52            panic!("{} slots allocated when dropping a slab", self.nr_allocated);
53        }
54    }
55
56    fn is_untyped(&self) -> bool {
57        false
58    }
59}
60
61impl<const SLOT_SIZE: usize> SlabMeta<SLOT_SIZE> {
62    /// Gets the capacity of the slab (regardless of the number of allocated slots).
63    pub const fn capacity(&self) -> u16 {
64        (PAGE_SIZE / SLOT_SIZE) as u16
65    }
66
67    /// Gets the number of allocated slots.
68    pub fn nr_allocated(&self) -> u16 {
69        self.nr_allocated
70    }
71
72    /// Allocates a slot from the slab.
73    pub fn alloc(&mut self) -> Result<HeapSlot, AllocError> {
74        let Some(allocated) = self.free_list.pop() else {
75            log::error!("Allocating a slot from a full slab");
76            return Err(AllocError);
77        };
78        self.nr_allocated += 1;
79        Ok(allocated)
80    }
81}
82
83impl<const SLOT_SIZE: usize> Slab<SLOT_SIZE> {
84    /// Allocates a new slab of the given size.
85    ///
86    /// If the size is less than `SLOT_SIZE` or [`PAGE_SIZE`], the size will be
87    /// the maximum of the two.
88    pub fn new() -> crate::prelude::Result<Self> {
89        const { assert!(SLOT_SIZE <= PAGE_SIZE) };
90        // To ensure we can store a pointer in each slot.
91        const { assert!(SLOT_SIZE >= size_of::<usize>()) };
92        // To ensure `nr_allocated` can be stored in a `u16`.
93        const { assert!(PAGE_SIZE / SLOT_SIZE <= u16::MAX as usize) };
94
95        let mut slab: Slab<SLOT_SIZE> = FrameAllocOptions::new()
96            .zeroed(false)
97            .alloc_frame_with(Link::new(SlabMeta::<SLOT_SIZE> {
98                free_list: SlabSlotList::new(),
99                nr_allocated: 0,
100            }))?
101            .try_into()
102            .unwrap();
103
104        let head_paddr = slab.paddr();
105        let head_vaddr = paddr_to_vaddr(head_paddr);
106
107        // Push each slot to the free list.
108        for slot_offset in (0..PAGE_SIZE).step_by(SLOT_SIZE) {
109            // SAFETY: The slot is within the slab so it can't be NULL.
110            let slot_ptr = unsafe { NonNull::new_unchecked((head_vaddr + slot_offset) as *mut u8) };
111            // SAFETY: The slot is newly allocated in the slab.
112            slab.meta_mut()
113                .free_list
114                .push(unsafe { HeapSlot::new(slot_ptr, super::SlotInfo::SlabSlot(SLOT_SIZE)) });
115        }
116
117        Ok(slab)
118    }
119
120    /// Deallocates a slot to the slab.
121    ///
122    /// If the slot does not belong to the slab it returns [`AllocError`].
123    pub fn dealloc(&mut self, slot: HeapSlot) -> Result<(), AllocError> {
124        if !(self.paddr()..self.paddr() + self.size()).contains(&slot.paddr()) {
125            log::error!("Deallocating a slot to a slab that does not own the slot");
126            return Err(AllocError);
127        }
128        debug_assert_eq!(slot.size(), SLOT_SIZE);
129        self.meta_mut().free_list.push(slot);
130        self.meta_mut().nr_allocated -= 1;
131
132        Ok(())
133    }
134}