1pub use uefi_raw::table::boot::MemoryAttribute as EFIMemoryAttribute;
5pub use uefi_raw::table::boot::MemoryDescriptor as EFIMemoryDesc;
6pub use uefi_raw::table::boot::MemoryType as EFIMemoryAreaType;
7
8use crate::tag::TagHeader;
9use crate::{TagType, TagTypeId};
10use core::fmt::{Debug, Formatter};
11use core::marker::PhantomData;
12use core::mem;
13use multiboot2_common::{MaybeDynSized, Tag};
14#[cfg(feature = "builder")]
15use {alloc::boxed::Box, core::slice, multiboot2_common::new_boxed};
16
17#[derive(ptr_meta::Pointee, Debug, PartialEq, Eq)]
28#[repr(C, align(8))]
29pub struct MemoryMapTag {
30 header: TagHeader,
31 entry_size: u32,
32 entry_version: u32,
33 areas: [MemoryArea],
34}
35
36impl MemoryMapTag {
37 #[cfg(feature = "builder")]
39 #[must_use]
40 pub fn new(areas: &[MemoryArea]) -> Box<Self> {
41 let header = TagHeader::new(Self::ID, 0);
42 let entry_size = (mem::size_of::<MemoryArea>() as u32).to_ne_bytes();
43 let entry_version = 0_u32.to_ne_bytes();
44 let areas = {
45 let ptr = areas.as_ptr().cast::<u8>();
46 let len = mem::size_of_val(areas);
47 unsafe { slice::from_raw_parts(ptr, len) }
48 };
49 new_boxed(header, &[&entry_size, &entry_version, areas])
50 }
51
52 #[must_use]
54 pub const fn entry_size(&self) -> u32 {
55 self.entry_size
56 }
57
58 #[must_use]
60 pub const fn entry_version(&self) -> u32 {
61 self.entry_version
62 }
63
64 #[must_use]
69 pub fn memory_areas(&self) -> &[MemoryArea] {
70 assert_eq!(self.entry_size as usize, mem::size_of::<MemoryArea>());
72 &self.areas
73 }
74}
75
76impl MaybeDynSized for MemoryMapTag {
77 type Header = TagHeader;
78
79 const BASE_SIZE: usize = mem::size_of::<TagHeader>() + 2 * mem::size_of::<u32>();
80
81 fn dst_len(header: &TagHeader) -> usize {
82 assert!(header.size as usize >= Self::BASE_SIZE);
83 let size = header.size as usize - Self::BASE_SIZE;
84 assert_eq!(size % mem::size_of::<MemoryArea>(), 0);
85 size / mem::size_of::<MemoryArea>()
86 }
87}
88
89impl Tag for MemoryMapTag {
90 type IDType = TagType;
91
92 const ID: TagType = TagType::Mmap;
93}
94
95#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
97#[repr(C)]
98pub struct MemoryArea {
99 base_addr: u64,
100 length: u64,
101 typ: MemoryAreaTypeId,
102 _reserved: u32,
103}
104
105impl MemoryArea {
106 pub fn new(base_addr: u64, length: u64, typ: impl Into<MemoryAreaTypeId>) -> Self {
108 Self {
109 base_addr,
110 length,
111 typ: typ.into(),
112 _reserved: 0,
113 }
114 }
115
116 #[must_use]
118 pub const fn start_address(&self) -> u64 {
119 self.base_addr
120 }
121
122 #[must_use]
124 pub const fn end_address(&self) -> u64 {
125 self.base_addr + self.length
126 }
127
128 #[must_use]
130 pub const fn size(&self) -> u64 {
131 self.length
132 }
133
134 #[must_use]
136 pub const fn typ(&self) -> MemoryAreaTypeId {
137 self.typ
138 }
139}
140
141impl Debug for MemoryArea {
142 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
143 f.debug_struct("MemoryArea")
144 .field("base_addr", &self.base_addr)
145 .field("length", &self.length)
146 .field("typ", &self.typ)
147 .finish()
148 }
149}
150
151#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
153#[repr(C)]
154pub struct MemoryAreaTypeId(u32);
155
156impl From<u32> for MemoryAreaTypeId {
157 fn from(value: u32) -> Self {
158 Self(value)
159 }
160}
161
162impl From<MemoryAreaTypeId> for u32 {
163 fn from(value: MemoryAreaTypeId) -> Self {
164 value.0
165 }
166}
167
168impl Debug for MemoryAreaTypeId {
169 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
170 let mt = MemoryAreaType::from(*self);
171 Debug::fmt(&mt, f)
172 }
173}
174
175#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
182pub enum MemoryAreaType {
183 Available, Reserved, AcpiAvailable, ReservedHibernate, Defective, Custom(u32),
202}
203
204impl From<MemoryAreaTypeId> for MemoryAreaType {
205 fn from(value: MemoryAreaTypeId) -> Self {
206 match value.0 {
207 1 => Self::Available,
208 2 => Self::Reserved,
209 3 => Self::AcpiAvailable,
210 4 => Self::ReservedHibernate,
211 5 => Self::Defective,
212 val => Self::Custom(val),
213 }
214 }
215}
216
217impl From<MemoryAreaType> for MemoryAreaTypeId {
218 fn from(value: MemoryAreaType) -> Self {
219 let integer = match value {
220 MemoryAreaType::Available => 1,
221 MemoryAreaType::Reserved => 2,
222 MemoryAreaType::AcpiAvailable => 3,
223 MemoryAreaType::ReservedHibernate => 4,
224 MemoryAreaType::Defective => 5,
225 MemoryAreaType::Custom(val) => val,
226 };
227 integer.into()
228 }
229}
230
231impl PartialEq<MemoryAreaType> for MemoryAreaTypeId {
232 fn eq(&self, other: &MemoryAreaType) -> bool {
233 let val: Self = (*other).into();
234 let val: u32 = val.0;
235 self.0.eq(&val)
236 }
237}
238
239impl PartialEq<MemoryAreaTypeId> for MemoryAreaType {
240 fn eq(&self, other: &MemoryAreaTypeId) -> bool {
241 let val: MemoryAreaTypeId = (*self).into();
242 let val: u32 = val.0;
243 other.0.eq(&val)
244 }
245}
246
247#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
262#[repr(C)]
263pub struct BasicMemoryInfoTag {
264 header: TagHeader,
265 memory_lower: u32,
266 memory_upper: u32,
267}
268
269impl BasicMemoryInfoTag {
270 #[must_use]
272 pub fn new(memory_lower: u32, memory_upper: u32) -> Self {
273 Self {
274 header: TagHeader::new(Self::ID, mem::size_of::<Self>().try_into().unwrap()),
275 memory_lower,
276 memory_upper,
277 }
278 }
279
280 #[must_use]
281 pub const fn memory_lower(&self) -> u32 {
283 self.memory_lower
284 }
285
286 #[must_use]
287 pub const fn memory_upper(&self) -> u32 {
289 self.memory_upper
290 }
291}
292
293impl MaybeDynSized for BasicMemoryInfoTag {
294 type Header = TagHeader;
295
296 const BASE_SIZE: usize = mem::size_of::<Self>();
297
298 fn dst_len(_: &TagHeader) {}
299}
300
301impl Tag for BasicMemoryInfoTag {
302 type IDType = TagType;
303
304 const ID: TagType = TagType::BasicMeminfo;
305}
306
307#[derive(ptr_meta::Pointee, PartialEq, Eq, PartialOrd, Ord, Hash)]
310#[repr(C)]
311pub struct EFIMemoryMapTag {
312 header: TagHeader,
313 desc_size: u32,
317 desc_version: u32,
321 memory_map: [u8],
330}
331
332impl EFIMemoryMapTag {
333 #[cfg(feature = "builder")]
335 #[must_use]
336 pub fn new_from_descs(descs: &[EFIMemoryDesc]) -> Box<Self> {
337 let efi_mmap = {
338 let ptr = descs.as_ptr().cast::<u8>();
339 let len = mem::size_of_val(descs);
340 unsafe { slice::from_raw_parts(ptr, len) }
341 };
342
343 Self::new_from_map(
344 mem::size_of::<EFIMemoryDesc>() as u32,
345 EFIMemoryDesc::VERSION,
346 efi_mmap,
347 )
348 }
349
350 #[cfg(feature = "builder")]
352 #[must_use]
353 pub fn new_from_map(desc_size: u32, desc_version: u32, efi_mmap: &[u8]) -> Box<Self> {
354 let header = TagHeader::new(Self::ID, 0);
355 assert_ne!(desc_size, 0);
356 let desc_size = desc_size.to_ne_bytes();
357 let desc_version = desc_version.to_ne_bytes();
358 new_boxed(header, &[&desc_size, &desc_version, efi_mmap])
359 }
360
361 #[must_use]
366 pub fn memory_areas(&self) -> EFIMemoryAreaIter {
367 assert_eq!(self.desc_version, EFIMemoryDesc::VERSION);
370 assert_eq!(
371 self.memory_map
372 .as_ptr()
373 .align_offset(mem::align_of::<EFIMemoryDesc>()),
374 0
375 );
376
377 EFIMemoryAreaIter::new(self)
378 }
379}
380
381impl Debug for EFIMemoryMapTag {
382 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
383 f.debug_struct("EFIMemoryMapTag")
384 .field("typ", &self.header.typ)
385 .field("size", &self.header.size)
386 .field("desc_size", &self.desc_size)
387 .field("desc_version", &self.desc_version)
388 .field("buf", &self.memory_map.as_ptr())
389 .field("buf_len", &self.memory_map.len())
390 .field("entries", &self.memory_areas())
391 .finish()
392 }
393}
394
395impl MaybeDynSized for EFIMemoryMapTag {
396 type Header = TagHeader;
397
398 const BASE_SIZE: usize = mem::size_of::<TagTypeId>() + 3 * mem::size_of::<u32>();
399
400 fn dst_len(header: &TagHeader) -> usize {
401 assert!(header.size as usize >= Self::BASE_SIZE);
402 header.size as usize - Self::BASE_SIZE
403 }
404}
405
406impl Tag for EFIMemoryMapTag {
407 type IDType = TagType;
408
409 const ID: TagType = TagType::EfiMmap;
410}
411
412#[derive(Clone)]
414pub struct EFIMemoryAreaIter<'a> {
415 mmap_tag: &'a EFIMemoryMapTag,
416 i: usize,
417 entries: usize,
418 phantom: PhantomData<&'a EFIMemoryDesc>,
419}
420
421impl<'a> EFIMemoryAreaIter<'a> {
422 fn new(mmap_tag: &'a EFIMemoryMapTag) -> Self {
423 let desc_size = mmap_tag.desc_size as usize;
424 let mmap_len = mmap_tag.memory_map.len();
425 assert_eq!(
426 mmap_len % desc_size,
427 0,
428 "memory map length must be a multiple of `desc_size` by definition. The MBI seems to be corrupt."
429 );
430 Self {
431 mmap_tag,
432 i: 0,
433 entries: mmap_len / desc_size,
434 phantom: PhantomData,
435 }
436 }
437}
438
439impl<'a> Iterator for EFIMemoryAreaIter<'a> {
440 type Item = &'a EFIMemoryDesc;
441 fn next(&mut self) -> Option<&'a EFIMemoryDesc> {
442 if self.i >= self.entries {
443 return None;
444 }
445
446 let desc = unsafe {
447 self.mmap_tag
448 .memory_map
449 .as_ptr()
450 .add(self.i * self.mmap_tag.desc_size as usize)
451 .cast::<EFIMemoryDesc>()
452 .as_ref()
453 .unwrap()
454 };
455
456 self.i += 1;
457
458 Some(desc)
459 }
460}
461
462impl ExactSizeIterator for EFIMemoryAreaIter<'_> {
463 fn len(&self) -> usize {
464 self.entries
465 }
466}
467
468impl Debug for EFIMemoryAreaIter<'_> {
469 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
470 let mut debug = f.debug_list();
471 let iter = self.clone();
472 for elem in iter {
473 debug.entry(elem);
474 }
475 debug.finish()
476 }
477}
478
479#[cfg(all(test, feature = "builder"))]
480mod tests {
481 use super::*;
482 use std::mem::size_of;
483
484 #[test]
485 fn test_create_old_mmap() {
486 let _mmap = MemoryMapTag::new(&[]);
487 let mmap = MemoryMapTag::new(&[
488 MemoryArea::new(0x1000, 0x2000, MemoryAreaType::Available),
489 MemoryArea::new(0x2000, 0x3000, MemoryAreaType::Available),
490 ]);
491 dbg!(mmap);
492 }
493
494 #[test]
495 fn efi_construct_and_parse() {
496 let descs = [
497 EFIMemoryDesc {
498 ty: EFIMemoryAreaType::CONVENTIONAL,
499 phys_start: 0x1000,
500 virt_start: 0x1000,
501 page_count: 1,
502 att: Default::default(),
503 },
504 EFIMemoryDesc {
505 ty: EFIMemoryAreaType::LOADER_DATA,
506 phys_start: 0x2000,
507 virt_start: 0x2000,
508 page_count: 3,
509 att: Default::default(),
510 },
511 ];
512 let efi_mmap_tag = EFIMemoryMapTag::new_from_descs(&descs);
513
514 let mut iter = efi_mmap_tag.memory_areas();
515
516 assert_eq!(iter.next(), Some(&descs[0]));
517 assert_eq!(iter.next(), Some(&descs[1]));
518
519 assert_eq!(iter.next(), None);
520 }
521
522 #[test]
526 fn efi_test_real_data() {
527 const DESC_SIZE: u32 = 48;
528 const DESC_VERSION: u32 = 1;
529 const MMAP_RAW: [u64; 60] = [
532 3, 0, 0, 1, 15, 0, 7, 4096, 0, 134, 15, 0, 4, 552960, 0, 1, 15, 0, 7, 557056, 0, 24,
533 15, 0, 7, 1048576, 0, 1792, 15, 0, 10, 8388608, 0, 8, 15, 0, 7, 8421376, 0, 3, 15, 0,
534 10, 8433664, 0, 1, 15, 0, 7, 8437760, 0, 4, 15, 0, 10, 8454144, 0, 240, 15, 0,
535 ];
536 let buf = MMAP_RAW;
537 let buf = unsafe {
538 core::slice::from_raw_parts(buf.as_ptr().cast::<u8>(), buf.len() * size_of::<u64>())
539 };
540 let tag = EFIMemoryMapTag::new_from_map(DESC_SIZE, DESC_VERSION, buf);
541 let entries = tag.memory_areas().copied().collect::<alloc::vec::Vec<_>>();
542 let expected = [
543 EFIMemoryDesc {
544 ty: EFIMemoryAreaType::BOOT_SERVICES_CODE,
545 phys_start: 0x0,
546 virt_start: 0x0,
547 page_count: 0x1,
548 att: EFIMemoryAttribute::UNCACHEABLE
549 | EFIMemoryAttribute::WRITE_COMBINE
550 | EFIMemoryAttribute::WRITE_THROUGH
551 | EFIMemoryAttribute::WRITE_BACK,
552 },
553 EFIMemoryDesc {
554 ty: EFIMemoryAreaType::CONVENTIONAL,
555 phys_start: 0x1000,
556 virt_start: 0x0,
557 page_count: 0x86,
558 att: EFIMemoryAttribute::UNCACHEABLE
559 | EFIMemoryAttribute::WRITE_COMBINE
560 | EFIMemoryAttribute::WRITE_THROUGH
561 | EFIMemoryAttribute::WRITE_BACK,
562 },
563 EFIMemoryDesc {
564 ty: EFIMemoryAreaType::BOOT_SERVICES_DATA,
565 phys_start: 0x87000,
566 virt_start: 0x0,
567 page_count: 0x1,
568 att: EFIMemoryAttribute::UNCACHEABLE
569 | EFIMemoryAttribute::WRITE_COMBINE
570 | EFIMemoryAttribute::WRITE_THROUGH
571 | EFIMemoryAttribute::WRITE_BACK,
572 },
573 EFIMemoryDesc {
574 ty: EFIMemoryAreaType::CONVENTIONAL,
575 phys_start: 0x88000,
576 virt_start: 0x0,
577 page_count: 0x18,
578 att: EFIMemoryAttribute::UNCACHEABLE
579 | EFIMemoryAttribute::WRITE_COMBINE
580 | EFIMemoryAttribute::WRITE_THROUGH
581 | EFIMemoryAttribute::WRITE_BACK,
582 },
583 EFIMemoryDesc {
584 ty: EFIMemoryAreaType::CONVENTIONAL,
585 phys_start: 0x100000,
586 virt_start: 0x0,
587 page_count: 0x700,
588 att: EFIMemoryAttribute::UNCACHEABLE
589 | EFIMemoryAttribute::WRITE_COMBINE
590 | EFIMemoryAttribute::WRITE_THROUGH
591 | EFIMemoryAttribute::WRITE_BACK,
592 },
593 EFIMemoryDesc {
594 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
595 phys_start: 0x800000,
596 virt_start: 0x0,
597 page_count: 0x8,
598 att: EFIMemoryAttribute::UNCACHEABLE
599 | EFIMemoryAttribute::WRITE_COMBINE
600 | EFIMemoryAttribute::WRITE_THROUGH
601 | EFIMemoryAttribute::WRITE_BACK,
602 },
603 EFIMemoryDesc {
604 ty: EFIMemoryAreaType::CONVENTIONAL,
605 phys_start: 0x808000,
606 virt_start: 0x0,
607 page_count: 0x3,
608 att: EFIMemoryAttribute::UNCACHEABLE
609 | EFIMemoryAttribute::WRITE_COMBINE
610 | EFIMemoryAttribute::WRITE_THROUGH
611 | EFIMemoryAttribute::WRITE_BACK,
612 },
613 EFIMemoryDesc {
614 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
615 phys_start: 0x80b000,
616 virt_start: 0x0,
617 page_count: 0x1,
618 att: EFIMemoryAttribute::UNCACHEABLE
619 | EFIMemoryAttribute::WRITE_COMBINE
620 | EFIMemoryAttribute::WRITE_THROUGH
621 | EFIMemoryAttribute::WRITE_BACK,
622 },
623 EFIMemoryDesc {
624 ty: EFIMemoryAreaType::CONVENTIONAL,
625 phys_start: 0x80c000,
626 virt_start: 0x0,
627 page_count: 0x4,
628 att: EFIMemoryAttribute::UNCACHEABLE
629 | EFIMemoryAttribute::WRITE_COMBINE
630 | EFIMemoryAttribute::WRITE_THROUGH
631 | EFIMemoryAttribute::WRITE_BACK,
632 },
633 EFIMemoryDesc {
634 ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
635 phys_start: 0x810000,
636 virt_start: 0x0,
637 page_count: 0xf0,
638 att: EFIMemoryAttribute::UNCACHEABLE
639 | EFIMemoryAttribute::WRITE_COMBINE
640 | EFIMemoryAttribute::WRITE_THROUGH
641 | EFIMemoryAttribute::WRITE_BACK,
642 },
643 ];
644 assert_eq!(entries.as_slice(), &expected);
645 }
646}