ostd/mm/frame/
segment.rs

1// SPDX-License-Identifier: MPL-2.0
2
3//! A contiguous range of frames.
4
5use core::{fmt::Debug, mem::ManuallyDrop, ops::Range};
6
7use super::{
8    Frame, inc_frame_ref_count,
9    meta::{AnyFrameMeta, GetFrameError},
10};
11use crate::mm::{AnyUFrameMeta, HasPaddr, HasSize, PAGE_SIZE, Paddr};
12
13/// A contiguous range of homogeneous physical memory frames.
14///
15/// This is a handle to multiple contiguous frames. It will be more lightweight
16/// than owning an array of frame handles.
17///
18/// The ownership is achieved by the reference counting mechanism of frames.
19/// When constructing a [`Segment`], the frame handles are created then
20/// forgotten, leaving the reference count. When dropping a it, the frame
21/// handles are restored and dropped, decrementing the reference count.
22///
23/// All the metadata of the frames are homogeneous, i.e., they are of the same
24/// type.
25#[repr(transparent)]
26pub struct Segment<M: AnyFrameMeta + ?Sized> {
27    range: Range<Paddr>,
28    _marker: core::marker::PhantomData<M>,
29}
30
31impl<M: AnyFrameMeta + ?Sized> Debug for Segment<M> {
32    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
33        write!(f, "Segment({:#x}..{:#x})", self.range.start, self.range.end)
34    }
35}
36
37/// A contiguous range of homogeneous untyped physical memory frames that have any metadata.
38///
39/// In other words, the metadata of the frames are of the same type, and they
40/// are untyped, but the type of metadata is not known at compile time. An
41/// [`USegment`] as a parameter accepts any untyped segments.
42///
43/// The usage of this frame will not be changed while this object is alive.
44pub type USegment = Segment<dyn AnyUFrameMeta>;
45
46impl<M: AnyFrameMeta + ?Sized> Drop for Segment<M> {
47    fn drop(&mut self) {
48        for paddr in self.range.clone().step_by(PAGE_SIZE) {
49            // SAFETY: for each frame there would be a forgotten handle
50            // when creating the `Segment` object.
51            drop(unsafe { Frame::<M>::from_raw(paddr) });
52        }
53    }
54}
55
56impl<M: AnyFrameMeta + ?Sized> Clone for Segment<M> {
57    fn clone(&self) -> Self {
58        for paddr in self.range.clone().step_by(PAGE_SIZE) {
59            // SAFETY: for each frame there would be a forgotten handle
60            // when creating the `Segment` object, so we already have
61            // reference counts for the frames.
62            unsafe { inc_frame_ref_count(paddr) };
63        }
64        Self {
65            range: self.range.clone(),
66            _marker: core::marker::PhantomData,
67        }
68    }
69}
70
71impl<M: AnyFrameMeta> Segment<M> {
72    /// Creates a new [`Segment`] from unused frames.
73    ///
74    /// The caller must provide a closure to initialize metadata for all the frames.
75    /// The closure receives the physical address of the frame and returns the
76    /// metadata, which is similar to [`core::array::from_fn`].
77    ///
78    /// It returns an error if:
79    ///  - the physical address is invalid or not aligned;
80    ///  - any of the frames cannot be created with a specific reason.
81    ///
82    /// # Panics
83    ///
84    /// It panics if the range is empty.
85    pub fn from_unused<F>(range: Range<Paddr>, mut metadata_fn: F) -> Result<Self, GetFrameError>
86    where
87        F: FnMut(Paddr) -> M,
88    {
89        if !range.start.is_multiple_of(PAGE_SIZE) || !range.end.is_multiple_of(PAGE_SIZE) {
90            return Err(GetFrameError::NotAligned);
91        }
92        if range.end > super::max_paddr() {
93            return Err(GetFrameError::OutOfBound);
94        }
95        assert!(range.start < range.end);
96        // Construct a segment early to recycle previously forgotten frames if
97        // the subsequent operations fails in the middle.
98        let mut segment = Self {
99            range: range.start..range.start,
100            _marker: core::marker::PhantomData,
101        };
102        for paddr in range.step_by(PAGE_SIZE) {
103            let frame = Frame::<M>::from_unused(paddr, metadata_fn(paddr))?;
104            let _ = ManuallyDrop::new(frame);
105            segment.range.end = paddr + PAGE_SIZE;
106        }
107        Ok(segment)
108    }
109
110    /// Restores the [`Segment`] from the raw physical address range.
111    ///
112    /// # Safety
113    ///
114    /// The range must be a forgotten [`Segment`] that matches the type `M`.
115    /// It could be manually forgotten by [`core::mem::forget`],
116    /// [`ManuallyDrop`], or [`Self::into_raw`].
117    pub(crate) unsafe fn from_raw(range: Range<Paddr>) -> Self {
118        debug_assert_eq!(range.start % PAGE_SIZE, 0);
119        debug_assert_eq!(range.end % PAGE_SIZE, 0);
120        Self {
121            range,
122            _marker: core::marker::PhantomData,
123        }
124    }
125}
126
127impl<M: AnyFrameMeta + ?Sized> Segment<M> {
128    /// Splits the frames into two at the given byte offset from the start.
129    ///
130    /// The resulting frames cannot be empty. So the offset cannot be neither
131    /// zero nor the length of the frames.
132    ///
133    /// # Panics
134    ///
135    /// The function panics if the offset is out of bounds, at either ends, or
136    /// not base-page-aligned.
137    pub fn split(self, offset: usize) -> (Self, Self) {
138        assert!(offset.is_multiple_of(PAGE_SIZE));
139        assert!(0 < offset && offset < self.size());
140
141        let old = ManuallyDrop::new(self);
142        let at = old.range.start + offset;
143
144        (
145            Self {
146                range: old.range.start..at,
147                _marker: core::marker::PhantomData,
148            },
149            Self {
150                range: at..old.range.end,
151                _marker: core::marker::PhantomData,
152            },
153        )
154    }
155
156    /// Gets an extra handle to the frames in the byte offset range.
157    ///
158    /// The sliced byte offset range in indexed by the offset from the start of
159    /// the contiguous frames. The resulting frames holds extra reference counts.
160    ///
161    /// # Panics
162    ///
163    /// The function panics if the byte offset range is out of bounds, or if
164    /// any of the ends of the byte offset range is not base-page aligned.
165    pub fn slice(&self, range: &Range<usize>) -> Self {
166        assert!(range.start.is_multiple_of(PAGE_SIZE) && range.end.is_multiple_of(PAGE_SIZE));
167        let start = self.range.start + range.start;
168        let end = self.range.start + range.end;
169        assert!(start <= end && end <= self.range.end);
170
171        for paddr in (start..end).step_by(PAGE_SIZE) {
172            // SAFETY: We already have reference counts for the frames since
173            // for each frame there would be a forgotten handle when creating
174            // the `Segment` object.
175            unsafe { inc_frame_ref_count(paddr) };
176        }
177
178        Self {
179            range: start..end,
180            _marker: core::marker::PhantomData,
181        }
182    }
183
184    /// Forgets the [`Segment`] and gets a raw range of physical addresses.
185    pub(crate) fn into_raw(self) -> Range<Paddr> {
186        let range = self.range.clone();
187        let _ = ManuallyDrop::new(self);
188        range
189    }
190}
191
192impl<M: AnyFrameMeta + ?Sized> HasPaddr for Segment<M> {
193    fn paddr(&self) -> Paddr {
194        self.range.start
195    }
196}
197
198impl<M: AnyFrameMeta + ?Sized> HasSize for Segment<M> {
199    fn size(&self) -> usize {
200        self.range.end - self.range.start
201    }
202}
203
204impl<M: AnyFrameMeta + ?Sized> From<Frame<M>> for Segment<M> {
205    fn from(frame: Frame<M>) -> Self {
206        let pa = frame.paddr();
207        let _ = ManuallyDrop::new(frame);
208        Self {
209            range: pa..pa + PAGE_SIZE,
210            _marker: core::marker::PhantomData,
211        }
212    }
213}
214
215impl<M: AnyFrameMeta + ?Sized> Iterator for Segment<M> {
216    type Item = Frame<M>;
217
218    fn next(&mut self) -> Option<Self::Item> {
219        if self.range.start < self.range.end {
220            // SAFETY: each frame in the range would be a handle forgotten
221            // when creating the `Segment` object.
222            let frame = unsafe { Frame::<M>::from_raw(self.range.start) };
223            self.range.start += PAGE_SIZE;
224            // The end cannot be non-page-aligned.
225            debug_assert!(self.range.start <= self.range.end);
226            Some(frame)
227        } else {
228            None
229        }
230    }
231}
232
233impl<M: AnyFrameMeta> From<Segment<M>> for Segment<dyn AnyFrameMeta> {
234    fn from(seg: Segment<M>) -> Self {
235        let seg = ManuallyDrop::new(seg);
236        Self {
237            range: seg.range.clone(),
238            _marker: core::marker::PhantomData,
239        }
240    }
241}
242
243impl<M: AnyFrameMeta> TryFrom<Segment<dyn AnyFrameMeta>> for Segment<M> {
244    type Error = Segment<dyn AnyFrameMeta>;
245
246    fn try_from(seg: Segment<dyn AnyFrameMeta>) -> core::result::Result<Self, Self::Error> {
247        // SAFETY: for each page there would be a forgotten handle
248        // when creating the `Segment` object.
249        let first_frame = unsafe { Frame::<dyn AnyFrameMeta>::from_raw(seg.range.start) };
250        let first_frame = ManuallyDrop::new(first_frame);
251        if !(first_frame.dyn_meta() as &dyn core::any::Any).is::<M>() {
252            return Err(seg);
253        }
254        // Since segments are homogeneous, we can safely assume that the rest
255        // of the frames are of the same type. We just debug-check here.
256        #[cfg(debug_assertions)]
257        {
258            for paddr in seg.range.clone().step_by(PAGE_SIZE) {
259                let frame = unsafe { Frame::<dyn AnyFrameMeta>::from_raw(paddr) };
260                let frame = ManuallyDrop::new(frame);
261                debug_assert!((frame.dyn_meta() as &dyn core::any::Any).is::<M>());
262            }
263        }
264        // SAFETY: The metadata is coerceable and the struct is transmutable.
265        Ok(unsafe { core::mem::transmute::<Segment<dyn AnyFrameMeta>, Segment<M>>(seg) })
266    }
267}
268
269impl<M: AnyUFrameMeta> From<Segment<M>> for USegment {
270    fn from(seg: Segment<M>) -> Self {
271        // SAFETY: The metadata is coerceable and the struct is transmutable.
272        unsafe { core::mem::transmute(seg) }
273    }
274}
275
276impl TryFrom<Segment<dyn AnyFrameMeta>> for USegment {
277    type Error = Segment<dyn AnyFrameMeta>;
278
279    /// Try converting a [`Segment<dyn AnyFrameMeta>`] into [`USegment`].
280    ///
281    /// If the usage of the page is not the same as the expected usage, it will
282    /// return the dynamic page itself as is.
283    fn try_from(seg: Segment<dyn AnyFrameMeta>) -> core::result::Result<Self, Self::Error> {
284        // SAFETY: for each page there would be a forgotten handle
285        // when creating the `Segment` object.
286        let first_frame = unsafe { Frame::<dyn AnyFrameMeta>::from_raw(seg.range.start) };
287        let first_frame = ManuallyDrop::new(first_frame);
288        if !first_frame.dyn_meta().is_untyped() {
289            return Err(seg);
290        }
291        // Since segments are homogeneous, we can safely assume that the rest
292        // of the frames are of the same type. We just debug-check here.
293        #[cfg(debug_assertions)]
294        {
295            for paddr in seg.range.clone().step_by(PAGE_SIZE) {
296                let frame = unsafe { Frame::<dyn AnyFrameMeta>::from_raw(paddr) };
297                let frame = ManuallyDrop::new(frame);
298                debug_assert!(frame.dyn_meta().is_untyped());
299            }
300        }
301        // SAFETY: The metadata is coerceable and the struct is transmutable.
302        Ok(unsafe { core::mem::transmute::<Segment<dyn AnyFrameMeta>, USegment>(seg) })
303    }
304}