pub use uefi_raw::table::boot::MemoryAttribute as EFIMemoryAttribute;
pub use uefi_raw::table::boot::MemoryDescriptor as EFIMemoryDesc;
pub use uefi_raw::table::boot::MemoryType as EFIMemoryAreaType;
use crate::{Tag, TagTrait, TagType, TagTypeId};
use core::fmt::{Debug, Formatter};
use core::marker::PhantomData;
use core::mem;
#[cfg(feature = "builder")]
use {crate::builder::AsBytes, crate::builder::BoxedDst};
const METADATA_SIZE: usize = mem::size_of::<TagTypeId>() + 3 * mem::size_of::<u32>();
#[derive(ptr_meta::Pointee, Debug, PartialEq, Eq)]
#[repr(C)]
pub struct MemoryMapTag {
typ: TagTypeId,
size: u32,
entry_size: u32,
entry_version: u32,
areas: [MemoryArea],
}
impl MemoryMapTag {
#[cfg(feature = "builder")]
pub fn new(areas: &[MemoryArea]) -> BoxedDst<Self> {
let entry_size: u32 = mem::size_of::<MemoryArea>().try_into().unwrap();
let entry_version: u32 = 0;
let mut bytes = [entry_size.to_le_bytes(), entry_version.to_le_bytes()].concat();
for area in areas {
bytes.extend(area.as_bytes());
}
BoxedDst::new(bytes.as_slice())
}
pub fn entry_size(&self) -> u32 {
self.entry_size
}
pub fn entry_version(&self) -> u32 {
self.entry_version
}
pub fn memory_areas(&self) -> &[MemoryArea] {
assert_eq!(self.entry_size as usize, mem::size_of::<MemoryArea>());
&self.areas
}
}
impl TagTrait for MemoryMapTag {
const ID: TagType = TagType::Mmap;
fn dst_size(base_tag: &Tag) -> usize {
assert!(base_tag.size as usize >= METADATA_SIZE);
let size = base_tag.size as usize - METADATA_SIZE;
assert_eq!(size % mem::size_of::<MemoryArea>(), 0);
size / mem::size_of::<MemoryArea>()
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct MemoryArea {
base_addr: u64,
length: u64,
typ: MemoryAreaTypeId,
_reserved: u32,
}
impl MemoryArea {
pub fn new(base_addr: u64, length: u64, typ: impl Into<MemoryAreaTypeId>) -> Self {
Self {
base_addr,
length,
typ: typ.into(),
_reserved: 0,
}
}
pub fn start_address(&self) -> u64 {
self.base_addr
}
pub fn end_address(&self) -> u64 {
self.base_addr + self.length
}
pub fn size(&self) -> u64 {
self.length
}
pub fn typ(&self) -> MemoryAreaTypeId {
self.typ
}
}
impl Debug for MemoryArea {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
f.debug_struct("MemoryArea")
.field("base_addr", &self.base_addr)
.field("length", &self.length)
.field("typ", &self.typ)
.finish()
}
}
#[cfg(feature = "builder")]
impl AsBytes for MemoryArea {}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct MemoryAreaTypeId(u32);
impl From<u32> for MemoryAreaTypeId {
fn from(value: u32) -> Self {
Self(value)
}
}
impl From<MemoryAreaTypeId> for u32 {
fn from(value: MemoryAreaTypeId) -> Self {
value.0
}
}
impl Debug for MemoryAreaTypeId {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
let mt = MemoryAreaType::from(*self);
Debug::fmt(&mt, f)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum MemoryAreaType {
Available, Reserved, AcpiAvailable, ReservedHibernate, Defective, Custom(u32),
}
impl From<MemoryAreaTypeId> for MemoryAreaType {
fn from(value: MemoryAreaTypeId) -> Self {
match value.0 {
1 => Self::Available,
2 => Self::Reserved,
3 => Self::AcpiAvailable,
4 => Self::ReservedHibernate,
5 => Self::Defective,
val => Self::Custom(val),
}
}
}
impl From<MemoryAreaType> for MemoryAreaTypeId {
fn from(value: MemoryAreaType) -> Self {
let integer = match value {
MemoryAreaType::Available => 1,
MemoryAreaType::Reserved => 2,
MemoryAreaType::AcpiAvailable => 3,
MemoryAreaType::ReservedHibernate => 4,
MemoryAreaType::Defective => 5,
MemoryAreaType::Custom(val) => val,
};
integer.into()
}
}
impl PartialEq<MemoryAreaType> for MemoryAreaTypeId {
fn eq(&self, other: &MemoryAreaType) -> bool {
let val: MemoryAreaTypeId = (*other).into();
let val: u32 = val.0;
self.0.eq(&val)
}
}
impl PartialEq<MemoryAreaTypeId> for MemoryAreaType {
fn eq(&self, other: &MemoryAreaTypeId) -> bool {
let val: MemoryAreaTypeId = (*self).into();
let val: u32 = val.0;
other.0.eq(&val)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct BasicMemoryInfoTag {
typ: TagTypeId,
size: u32,
memory_lower: u32,
memory_upper: u32,
}
impl BasicMemoryInfoTag {
pub fn new(memory_lower: u32, memory_upper: u32) -> Self {
Self {
typ: Self::ID.into(),
size: mem::size_of::<BasicMemoryInfoTag>().try_into().unwrap(),
memory_lower,
memory_upper,
}
}
pub fn memory_lower(&self) -> u32 {
self.memory_lower
}
pub fn memory_upper(&self) -> u32 {
self.memory_upper
}
}
impl TagTrait for BasicMemoryInfoTag {
const ID: TagType = TagType::BasicMeminfo;
fn dst_size(_base_tag: &Tag) {}
}
const EFI_METADATA_SIZE: usize = mem::size_of::<TagTypeId>() + 3 * mem::size_of::<u32>();
#[cfg(feature = "builder")]
impl AsBytes for EFIMemoryDesc {}
#[derive(ptr_meta::Pointee, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct EFIMemoryMapTag {
typ: TagTypeId,
size: u32,
desc_size: u32,
desc_version: u32,
memory_map: [u8],
}
impl EFIMemoryMapTag {
#[cfg(feature = "builder")]
pub fn new_from_descs(descs: &[EFIMemoryDesc]) -> BoxedDst<Self> {
let size_base = mem::size_of::<EFIMemoryDesc>();
let desc_size_diff = mem::size_of::<u64>() - size_base % mem::size_of::<u64>();
let desc_size = size_base + desc_size_diff;
assert!(desc_size >= size_base);
let mut efi_mmap = alloc::vec::Vec::with_capacity(descs.len() * desc_size);
for desc in descs {
efi_mmap.extend(desc.as_bytes());
efi_mmap.extend([0].repeat(desc_size_diff));
}
Self::new_from_map(
desc_size as u32,
EFIMemoryDesc::VERSION,
efi_mmap.as_slice(),
)
}
#[cfg(feature = "builder")]
pub fn new_from_map(desc_size: u32, desc_version: u32, efi_mmap: &[u8]) -> BoxedDst<Self> {
assert!(desc_size > 0);
assert_eq!(efi_mmap.len() % desc_size as usize, 0);
assert_eq!(
efi_mmap
.as_ptr()
.align_offset(mem::align_of::<EFIMemoryDesc>()),
0
);
let bytes = [
&desc_size.to_le_bytes(),
&desc_version.to_le_bytes(),
efi_mmap,
]
.concat();
BoxedDst::new(&bytes)
}
pub fn memory_areas(&self) -> EFIMemoryAreaIter {
assert_eq!(self.desc_version, EFIMemoryDesc::VERSION);
assert_eq!(
self.memory_map
.as_ptr()
.align_offset(mem::align_of::<EFIMemoryDesc>()),
0
);
EFIMemoryAreaIter::new(self)
}
}
impl Debug for EFIMemoryMapTag {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
f.debug_struct("EFIMemoryMapTag")
.field("typ", &self.typ)
.field("size", &self.size)
.field("desc_size", &self.desc_size)
.field("buf", &self.memory_map.as_ptr())
.field("buf_len", &self.memory_map.len())
.field("entries", &self.memory_areas().len())
.finish()
}
}
impl TagTrait for EFIMemoryMapTag {
const ID: TagType = TagType::EfiMmap;
fn dst_size(base_tag: &Tag) -> usize {
assert!(base_tag.size as usize >= EFI_METADATA_SIZE);
base_tag.size as usize - EFI_METADATA_SIZE
}
}
#[derive(Clone, Debug)]
pub struct EFIMemoryAreaIter<'a> {
mmap_tag: &'a EFIMemoryMapTag,
i: usize,
entries: usize,
phantom: PhantomData<&'a EFIMemoryDesc>,
}
impl<'a> EFIMemoryAreaIter<'a> {
fn new(mmap_tag: &'a EFIMemoryMapTag) -> Self {
let desc_size = mmap_tag.desc_size as usize;
let mmap_len = mmap_tag.memory_map.len();
assert_eq!(mmap_len % desc_size, 0, "memory map length must be a multiple of `desc_size` by definition. The MBI seems to be corrupt.");
Self {
mmap_tag,
i: 0,
entries: mmap_len / desc_size,
phantom: PhantomData,
}
}
}
impl<'a> Iterator for EFIMemoryAreaIter<'a> {
type Item = &'a EFIMemoryDesc;
fn next(&mut self) -> Option<&'a EFIMemoryDesc> {
if self.i >= self.entries {
return None;
}
let desc = unsafe {
self.mmap_tag
.memory_map
.as_ptr()
.add(self.i * self.mmap_tag.desc_size as usize)
.cast::<EFIMemoryDesc>()
.as_ref()
.unwrap()
};
self.i += 1;
Some(desc)
}
}
impl<'a> ExactSizeIterator for EFIMemoryAreaIter<'a> {
fn len(&self) -> usize {
self.entries
}
}
#[cfg(all(test, feature = "builder", not(miri)))]
mod tests {
use super::*;
use std::mem::size_of;
#[test]
fn construction_and_parsing() {
let descs = [
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x1000,
virt_start: 0x1000,
page_count: 1,
att: Default::default(),
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::LOADER_DATA,
phys_start: 0x2000,
virt_start: 0x2000,
page_count: 3,
att: Default::default(),
},
];
let efi_mmap_tag = EFIMemoryMapTag::new_from_descs(&descs);
assert_eq!(efi_mmap_tag.desc_size, 48 );
let mut iter = efi_mmap_tag.memory_areas();
assert_eq!(iter.next(), Some(&descs[0]));
assert_eq!(iter.next(), Some(&descs[1]));
assert_eq!(iter.next(), None);
}
#[test]
fn test_real_data() {
const DESC_SIZE: u32 = 48;
const DESC_VERSION: u32 = 1;
const MMAP_RAW: [u64; 60] = [
3, 0, 0, 1, 15, 0, 7, 4096, 0, 134, 15, 0, 4, 552960, 0, 1, 15, 0, 7, 557056, 0, 24,
15, 0, 7, 1048576, 0, 1792, 15, 0, 10, 8388608, 0, 8, 15, 0, 7, 8421376, 0, 3, 15, 0,
10, 8433664, 0, 1, 15, 0, 7, 8437760, 0, 4, 15, 0, 10, 8454144, 0, 240, 15, 0,
];
let buf = MMAP_RAW;
let buf = unsafe {
core::slice::from_raw_parts(buf.as_ptr().cast::<u8>(), buf.len() * size_of::<u64>())
};
let tag = EFIMemoryMapTag::new_from_map(DESC_SIZE, DESC_VERSION, buf);
let entries = tag.memory_areas().copied().collect::<alloc::vec::Vec<_>>();
let expected = [
EFIMemoryDesc {
ty: EFIMemoryAreaType::BOOT_SERVICES_CODE,
phys_start: 0x0,
virt_start: 0x0,
page_count: 0x1,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x1000,
virt_start: 0x0,
page_count: 0x86,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::BOOT_SERVICES_DATA,
phys_start: 0x87000,
virt_start: 0x0,
page_count: 0x1,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x88000,
virt_start: 0x0,
page_count: 0x18,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x100000,
virt_start: 0x0,
page_count: 0x700,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
phys_start: 0x800000,
virt_start: 0x0,
page_count: 0x8,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x808000,
virt_start: 0x0,
page_count: 0x3,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
phys_start: 0x80b000,
virt_start: 0x0,
page_count: 0x1,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::CONVENTIONAL,
phys_start: 0x80c000,
virt_start: 0x0,
page_count: 0x4,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
EFIMemoryDesc {
ty: EFIMemoryAreaType::ACPI_NON_VOLATILE,
phys_start: 0x810000,
virt_start: 0x0,
page_count: 0xf0,
att: EFIMemoryAttribute::UNCACHEABLE
| EFIMemoryAttribute::WRITE_COMBINE
| EFIMemoryAttribute::WRITE_THROUGH
| EFIMemoryAttribute::WRITE_BACK,
},
];
assert_eq!(entries.as_slice(), &expected);
}
}