use crate::{TagHeader, TagType};
use core::fmt::{Debug, Formatter};
use core::marker::PhantomData;
use core::mem;
use core::str::Utf8Error;
use multiboot2_common::{MaybeDynSized, Tag};
#[cfg(feature = "builder")]
use {alloc::boxed::Box, multiboot2_common::new_boxed};
#[derive(ptr_meta::Pointee, PartialEq, Eq)]
#[repr(C, align(8))]
pub struct ElfSectionsTag {
header: TagHeader,
number_of_sections: u32,
entry_size: u32,
shndx: u32,
sections: [u8],
}
impl ElfSectionsTag {
#[cfg(feature = "builder")]
#[must_use]
pub fn new(number_of_sections: u32, entry_size: u32, shndx: u32, sections: &[u8]) -> Box<Self> {
let header = TagHeader::new(Self::ID, 0);
let number_of_sections = number_of_sections.to_ne_bytes();
let entry_size = entry_size.to_ne_bytes();
let shndx = shndx.to_ne_bytes();
new_boxed(
header,
&[&number_of_sections, &entry_size, &shndx, sections],
)
}
#[must_use]
pub const fn sections(&self) -> ElfSectionIter {
let string_section_offset = (self.shndx * self.entry_size) as isize;
let string_section_ptr =
unsafe { self.sections.as_ptr().offset(string_section_offset) as *const _ };
ElfSectionIter {
current_section: self.sections.as_ptr(),
remaining_sections: self.number_of_sections,
entry_size: self.entry_size,
string_section: string_section_ptr,
_phantom_data: PhantomData,
}
}
#[must_use]
pub const fn number_of_sections(&self) -> u32 {
self.number_of_sections
}
#[must_use]
pub const fn entry_size(&self) -> u32 {
self.entry_size
}
#[must_use]
pub const fn shndx(&self) -> u32 {
self.shndx
}
}
impl MaybeDynSized for ElfSectionsTag {
type Header = TagHeader;
const BASE_SIZE: usize = mem::size_of::<TagHeader>() + 3 * mem::size_of::<u32>();
fn dst_len(header: &TagHeader) -> usize {
assert!(header.size as usize >= Self::BASE_SIZE);
header.size as usize - Self::BASE_SIZE
}
}
impl Tag for ElfSectionsTag {
type IDType = TagType;
const ID: TagType = TagType::ElfSections;
}
impl Debug for ElfSectionsTag {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
f.debug_struct("ElfSectionsTag")
.field("typ", &self.header.typ)
.field("size", &self.header.size)
.field("number_of_sections", &self.number_of_sections)
.field("entry_size", &self.entry_size)
.field("shndx", &self.shndx)
.field("sections", &self.sections())
.finish()
}
}
#[derive(Clone)]
pub struct ElfSectionIter<'a> {
current_section: *const u8,
remaining_sections: u32,
entry_size: u32,
string_section: *const u8,
_phantom_data: PhantomData<&'a ()>,
}
impl<'a> Iterator for ElfSectionIter<'a> {
type Item = ElfSection<'a>;
fn next(&mut self) -> Option<ElfSection<'a>> {
while self.remaining_sections != 0 {
let section = ElfSection {
inner: self.current_section,
string_section: self.string_section,
entry_size: self.entry_size,
_phantom: PhantomData,
};
self.current_section = unsafe { self.current_section.offset(self.entry_size as isize) };
self.remaining_sections -= 1;
if section.section_type() != ElfSectionType::Unused {
return Some(section);
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(
self.remaining_sections as usize,
Some(self.remaining_sections as usize),
)
}
}
impl ExactSizeIterator for ElfSectionIter<'_> {
fn len(&self) -> usize {
self.remaining_sections as usize
}
}
impl Debug for ElfSectionIter<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
const ELF_SECTIONS_LIMIT: usize = 7;
let mut debug = f.debug_list();
self.clone().take(ELF_SECTIONS_LIMIT).for_each(|ref e| {
debug.entry(e);
});
if self.clone().len() > ELF_SECTIONS_LIMIT {
debug.entry(&"...");
}
debug.finish()
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ElfSection<'a> {
inner: *const u8,
string_section: *const u8,
entry_size: u32,
_phantom: PhantomData<&'a ()>,
}
impl Debug for ElfSection<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
let inner = self.get();
f.debug_struct("ElfSection")
.field("inner", &inner)
.field("string_section_ptr", &self.string_section)
.finish()
}
}
#[derive(Clone, Copy, Debug)]
#[repr(C, packed)]
struct ElfSectionInner32 {
name_index: u32,
typ: u32,
flags: u32,
addr: u32,
offset: u32,
size: u32,
link: u32,
info: u32,
addralign: u32,
entry_size: u32,
}
#[derive(Clone, Copy, Debug)]
#[repr(C, packed)]
struct ElfSectionInner64 {
name_index: u32,
typ: u32,
flags: u64,
addr: u64,
offset: u64,
size: u64,
link: u32,
info: u32,
addralign: u64,
entry_size: u64,
}
impl ElfSection<'_> {
#[must_use]
pub fn section_type(&self) -> ElfSectionType {
match self.get().typ() {
0 => ElfSectionType::Unused,
1 => ElfSectionType::ProgramSection,
2 => ElfSectionType::LinkerSymbolTable,
3 => ElfSectionType::StringTable,
4 => ElfSectionType::RelaRelocation,
5 => ElfSectionType::SymbolHashTable,
6 => ElfSectionType::DynamicLinkingTable,
7 => ElfSectionType::Note,
8 => ElfSectionType::Uninitialized,
9 => ElfSectionType::RelRelocation,
10 => ElfSectionType::Reserved,
11 => ElfSectionType::DynamicLoaderSymbolTable,
0x6000_0000..=0x6FFF_FFFF => ElfSectionType::EnvironmentSpecific,
0x7000_0000..=0x7FFF_FFFF => ElfSectionType::ProcessorSpecific,
e => {
log::warn!("Unknown section type {e:x}. Treating as ElfSectionType::Unused");
ElfSectionType::Unused
}
}
}
#[must_use]
pub fn section_type_raw(&self) -> u32 {
self.get().typ()
}
pub fn name(&self) -> Result<&str, Utf8Error> {
use core::{slice, str};
let name_ptr = unsafe { self.string_table().offset(self.get().name_index() as isize) };
let strlen = {
let mut len = 0;
while unsafe { *name_ptr.offset(len) } != 0 {
len += 1;
}
len as usize
};
str::from_utf8(unsafe { slice::from_raw_parts(name_ptr, strlen) })
}
#[must_use]
pub fn start_address(&self) -> u64 {
self.get().addr()
}
#[must_use]
pub fn end_address(&self) -> u64 {
self.get().addr() + self.get().size()
}
#[must_use]
pub fn size(&self) -> u64 {
self.get().size()
}
#[must_use]
pub fn addralign(&self) -> u64 {
self.get().addralign()
}
#[must_use]
pub fn flags(&self) -> ElfSectionFlags {
ElfSectionFlags::from_bits_truncate(self.get().flags())
}
#[must_use]
pub fn is_allocated(&self) -> bool {
self.flags().contains(ElfSectionFlags::ALLOCATED)
}
fn get(&self) -> &dyn ElfSectionInner {
match self.entry_size {
40 => unsafe { &*(self.inner as *const ElfSectionInner32) },
64 => unsafe { &*(self.inner as *const ElfSectionInner64) },
s => panic!("Unexpected entry size: {s}"),
}
}
unsafe fn string_table(&self) -> *const u8 {
match self.entry_size {
40 => {
let ptr = self.string_section.cast::<ElfSectionInner32>();
let reference = unsafe { ptr.as_ref().unwrap() };
reference.addr() as *const u8
}
64 => {
let ptr = self.string_section.cast::<ElfSectionInner64>();
let reference = unsafe { ptr.as_ref().unwrap() };
reference.addr() as *const u8
}
s => panic!("Unexpected entry size: {s}"),
}
}
}
trait ElfSectionInner: Debug {
fn name_index(&self) -> u32;
fn typ(&self) -> u32;
fn flags(&self) -> u64;
fn addr(&self) -> u64;
fn size(&self) -> u64;
fn addralign(&self) -> u64;
}
impl ElfSectionInner for ElfSectionInner32 {
fn name_index(&self) -> u32 {
self.name_index
}
fn typ(&self) -> u32 {
self.typ
}
fn flags(&self) -> u64 {
self.flags.into()
}
fn addr(&self) -> u64 {
self.addr.into()
}
fn size(&self) -> u64 {
self.size.into()
}
fn addralign(&self) -> u64 {
self.addralign.into()
}
}
impl ElfSectionInner for ElfSectionInner64 {
fn name_index(&self) -> u32 {
self.name_index
}
fn typ(&self) -> u32 {
self.typ
}
fn flags(&self) -> u64 {
self.flags
}
fn addr(&self) -> u64 {
self.addr
}
fn size(&self) -> u64 {
self.size
}
fn addralign(&self) -> u64 {
self.addralign
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u32)]
pub enum ElfSectionType {
Unused = 0,
ProgramSection = 1,
LinkerSymbolTable = 2,
StringTable = 3,
RelaRelocation = 4,
SymbolHashTable = 5,
DynamicLinkingTable = 6,
Note = 7,
Uninitialized = 8,
RelRelocation = 9,
Reserved = 10,
DynamicLoaderSymbolTable = 11,
EnvironmentSpecific = 0x6000_0000,
ProcessorSpecific = 0x7000_0000,
}
bitflags! {
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
#[repr(transparent)]
pub struct ElfSectionFlags: u64 {
const WRITABLE = 0x1;
const ALLOCATED = 0x2;
const EXECUTABLE = 0x4;
}
}