1use core::ops::Deref;
6
7use align_ext::AlignExt;
8
9use crate::mm::{PAGE_SIZE, Paddr, Vaddr, kspace::kernel_loaded_offset};
10
11#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
13pub enum MemoryRegionType {
14 BadMemory = 0,
16 Unknown = 1,
19 NonVolatileSleep = 2,
21 Reserved = 3,
23 Kernel = 4,
25 Module = 5,
27 Framebuffer = 6,
29 Reclaimable = 7,
31 Usable = 8,
33}
34
35impl MemoryRegionType {
36 pub fn is_physical(self) -> bool {
42 !matches!(
44 self,
45 Self::BadMemory | Self::Unknown | Self::Reserved | Self::Framebuffer
46 )
47 }
48}
49
50#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
54pub struct MemoryRegion {
55 base: usize,
56 len: usize,
57 typ: MemoryRegionType,
58}
59
60impl MemoryRegion {
61 pub const fn new(base: Paddr, len: usize, typ: MemoryRegionType) -> Self {
63 MemoryRegion { base, len, typ }
64 }
65
66 pub const fn bad() -> Self {
68 MemoryRegion {
69 base: 0,
70 len: 0,
71 typ: MemoryRegionType::BadMemory,
72 }
73 }
74
75 pub fn kernel() -> Self {
80 unsafe extern "C" {
82 fn __kernel_start();
83 fn __kernel_end();
84 }
85 MemoryRegion {
86 base: __kernel_start as *const () as usize - kernel_loaded_offset(),
87 len: __kernel_end as *const () as usize - __kernel_start as *const () as usize,
88 typ: MemoryRegionType::Kernel,
89 }
90 }
91
92 pub fn framebuffer(fb: &crate::boot::BootloaderFramebufferArg) -> Self {
94 Self {
95 base: fb.address,
96 len: (fb.width * fb.height * fb.bpp).div_ceil(8), typ: MemoryRegionType::Framebuffer,
98 }
99 }
100
101 pub fn module(bytes: &[u8]) -> Self {
107 let vaddr = bytes.as_ptr() as Vaddr;
108 assert!(crate::mm::kspace::LINEAR_MAPPING_VADDR_RANGE.contains(&vaddr));
109
110 Self {
111 base: vaddr - crate::mm::kspace::LINEAR_MAPPING_BASE_VADDR,
112 len: bytes.len(),
113 typ: MemoryRegionType::Reclaimable,
114 }
115 }
116
117 pub fn base(&self) -> Paddr {
119 self.base
120 }
121
122 pub fn len(&self) -> usize {
124 self.len
125 }
126
127 pub fn end(&self) -> Paddr {
129 self.base + self.len
130 }
131
132 pub fn is_empty(&self) -> bool {
134 self.len == 0
135 }
136
137 pub fn typ(&self) -> MemoryRegionType {
139 self.typ
140 }
141
142 fn as_aligned(&self) -> Self {
143 let (base, end) = match self.typ() {
144 MemoryRegionType::Usable => (
145 self.base().align_up(PAGE_SIZE),
146 self.end().align_down(PAGE_SIZE),
147 ),
148 _ => (
149 self.base().align_down(PAGE_SIZE),
150 self.end().align_up(PAGE_SIZE),
151 ),
152 };
153 MemoryRegion {
154 base,
155 len: end - base,
156 typ: self.typ,
157 }
158 }
159}
160
161const MAX_REGIONS: usize = 512;
168
169pub(crate) struct MemoryRegionArray<const LEN: usize = MAX_REGIONS> {
173 regions: [MemoryRegion; LEN],
174 count: usize,
175}
176
177impl<const LEN: usize> Default for MemoryRegionArray<LEN> {
178 fn default() -> Self {
179 Self::new()
180 }
181}
182
183impl<const LEN: usize> Deref for MemoryRegionArray<LEN> {
184 type Target = [MemoryRegion];
185
186 fn deref(&self) -> &Self::Target {
187 &self.regions[..self.count]
188 }
189}
190
191#[derive(Debug)]
193pub(crate) struct ArrayFullError;
194
195impl<const LEN: usize> MemoryRegionArray<LEN> {
196 pub(crate) const fn new() -> Self {
198 Self {
199 regions: [MemoryRegion::bad(); LEN],
200 count: 0,
201 }
202 }
203
204 pub(crate) fn push(&mut self, region: MemoryRegion) -> Result<(), ArrayFullError> {
208 if self.count >= self.regions.len() {
209 return Err(ArrayFullError);
210 }
211
212 self.regions[self.count] = region;
213 self.count += 1;
214
215 Ok(())
216 }
217
218 pub(crate) fn into_non_overlapping(mut self) -> Self {
234 let max_addr = self
235 .iter()
236 .map(|r| r.end())
237 .max()
238 .unwrap_or(0)
239 .align_down(PAGE_SIZE);
240 self.regions.iter_mut().for_each(|r| *r = r.as_aligned());
241
242 let mut result = MemoryRegionArray::<LEN>::new();
243
244 let mut cur_right = 0;
245
246 while cur_right < max_addr {
247 let typ = self
249 .iter()
250 .filter(|region| (region.base()..region.end()).contains(&cur_right))
251 .map(|region| region.typ())
252 .min()
253 .unwrap_or(MemoryRegionType::Unknown);
254
255 let right = self
257 .iter()
258 .filter_map(|region| {
259 if region.base() > cur_right {
260 Some(region.base())
261 } else if region.end() > cur_right {
262 Some(region.end())
263 } else {
264 None
265 }
266 })
267 .min()
268 .unwrap();
269
270 result
271 .push(MemoryRegion::new(cur_right, right - cur_right, typ))
272 .unwrap();
273
274 cur_right = right;
275 }
276
277 let mut merged_count = 1;
279 for i in 1..result.count {
280 if result[i].typ() == result.regions[merged_count - 1].typ() {
281 result.regions[merged_count - 1] = MemoryRegion::new(
282 result.regions[merged_count - 1].base(),
283 result.regions[merged_count - 1].len() + result[i].len(),
284 result.regions[merged_count - 1].typ(),
285 );
286 } else {
287 result.regions[merged_count] = result[i];
288 merged_count += 1;
289 }
290 }
291 result.count = merged_count;
292
293 result
294 }
295}
296
297#[cfg(ktest)]
298mod test {
299 use super::*;
300 use crate::prelude::ktest;
301
302 #[ktest]
303 fn sort_full_non_overlapping() {
304 let mut regions = MemoryRegionArray::<64>::new();
305 regions
307 .push(MemoryRegion::new(
308 0,
309 PAGE_SIZE + 1,
310 MemoryRegionType::Usable,
311 ))
312 .unwrap();
313 regions
314 .push(MemoryRegion::new(
315 PAGE_SIZE - 1,
316 PAGE_SIZE + 2,
317 MemoryRegionType::Usable,
318 ))
319 .unwrap();
320 regions
321 .push(MemoryRegion::new(
322 PAGE_SIZE * 2,
323 PAGE_SIZE * 5,
324 MemoryRegionType::Usable,
325 ))
326 .unwrap();
327 regions
329 .push(MemoryRegion::new(
330 PAGE_SIZE * 3 + 1,
331 PAGE_SIZE - 2,
332 MemoryRegionType::BadMemory,
333 ))
334 .unwrap();
335 regions
337 .push(MemoryRegion::new(
338 PAGE_SIZE * 9,
339 PAGE_SIZE * 2,
340 MemoryRegionType::Usable,
341 ))
342 .unwrap();
343
344 let regions = regions.into_non_overlapping();
345
346 assert_eq!(regions.count, 5);
347 assert_eq!(regions[0].base(), 0);
348 assert_eq!(regions[0].len(), PAGE_SIZE * 3);
349 assert_eq!(regions[0].typ(), MemoryRegionType::Usable);
350
351 assert_eq!(regions[1].base(), PAGE_SIZE * 3);
352 assert_eq!(regions[1].len(), PAGE_SIZE);
353 assert_eq!(regions[1].typ(), MemoryRegionType::BadMemory);
354
355 assert_eq!(regions[2].base(), PAGE_SIZE * 4);
356 assert_eq!(regions[2].len(), PAGE_SIZE * 3);
357 assert_eq!(regions[2].typ(), MemoryRegionType::Usable);
358
359 assert_eq!(regions[3].base(), PAGE_SIZE * 7);
360 assert_eq!(regions[3].len(), PAGE_SIZE * 2);
361 assert_eq!(regions[3].typ(), MemoryRegionType::Unknown);
362
363 assert_eq!(regions[4].base(), PAGE_SIZE * 9);
364 assert_eq!(regions[4].len(), PAGE_SIZE * 2);
365 assert_eq!(regions[4].typ(), MemoryRegionType::Usable);
366 }
367}