EntryOwner

Struct EntryOwner 

Source
pub struct EntryOwner<C: PageTableConfig> {
    pub node: Option<NodeOwner<C>>,
    pub frame: Option<FrameEntryOwner>,
    pub locked: Option<Ghost<Seq<FrameView<C>>>>,
    pub absent: bool,
    pub in_scope: bool,
    pub path: TreePath<NR_ENTRIES>,
    pub parent_level: PagingLevel,
}

Fields§

§node: Option<NodeOwner<C>>§frame: Option<FrameEntryOwner>§locked: Option<Ghost<Seq<FrameView<C>>>>§absent: bool§in_scope: bool§path: TreePath<NR_ENTRIES>§parent_level: PagingLevel

Implementations§

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn from_pte_regions_spec( self, regions: MetaRegionOwners, ) -> MetaRegionOwners

{
    if self.is_node() {
        let index = frame_to_index(self.meta_slot_paddr().unwrap());
        let old_slot = regions.slot_owners[index];
        let new_slot = MetaSlotOwner {
            raw_count: 0usize,
            ..old_slot
        };
        MetaRegionOwners {
            slots: regions.slots.insert(index, self.node.unwrap().meta_perm.points_to),
            slot_owners: regions.slot_owners.insert(index, new_slot),
        }
    } else {
        regions
    }
}
Source

pub open spec fn into_pte_regions_spec( self, regions: MetaRegionOwners, ) -> MetaRegionOwners

{
    if self.is_node() {
        let index = frame_to_index(self.meta_slot_paddr().unwrap());
        let old_slot = regions.slot_owners[index];
        let new_slot = MetaSlotOwner {
            raw_count: (old_slot.raw_count + 1) as usize,
            ..old_slot
        };
        MetaRegionOwners {
            slots: regions.slots,
            slot_owners: regions.slot_owners.insert(index, new_slot),
            ..regions
        }
    } else {
        regions
    }
}
Source

pub open spec fn into_pte_owner_spec(self) -> EntryOwner<C>

{
    EntryOwner {
        in_scope: false,
        ..self
    }
}
Source

pub open spec fn from_pte_owner_spec(self) -> EntryOwner<C>

{
    EntryOwner {
        in_scope: true,
        ..self
    }
}
Source

pub open spec fn pte_invariants(self, pte: C::E, regions: MetaRegionOwners) -> bool

{
    &&& self.inv()
    &&& regions.inv()
    &&& self.match_pte(pte, self.parent_level)
    &&& self.relate_region(regions)
    &&& !self.in_scope

}

This is equivalent to the other invariants relations, combining the inv predicates for each object and the well-formedness relations between them.

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn is_node(self) -> bool

{ self.node is Some }
Source

pub open spec fn is_frame(self) -> bool

{ self.frame is Some }
Source

pub open spec fn is_locked(self) -> bool

{ self.locked is Some }
Source

pub open spec fn is_absent(self) -> bool

{ self.absent }
Source

pub open spec fn new_absent_spec( path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, ) -> Self

{
    EntryOwner {
        node: None,
        frame: None,
        locked: None,
        absent: true,
        in_scope: true,
        path,
        parent_level,
    }
}
Source

pub open spec fn new_frame_spec( paddr: Paddr, path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, prop: PageProperty, slot_perm: PointsTo<MetaSlot>, ) -> Self

{
    EntryOwner {
        node: None,
        frame: Some(FrameEntryOwner {
            mapped_pa: paddr,
            size: page_size(parent_level),
            prop,
            slot_perm,
        }),
        locked: None,
        absent: false,
        in_scope: true,
        path,
        parent_level,
    }
}
Source

pub open spec fn new_node_spec(node: NodeOwner<C>, path: TreePath<NR_ENTRIES>) -> Self

{
    EntryOwner {
        node: Some(node),
        frame: None,
        locked: None,
        absent: false,
        in_scope: true,
        path,
        parent_level: (node.level + 1) as PagingLevel,
    }
}
Source

pub proof fn new_absent(path: TreePath<NR_ENTRIES>, parent_level: PagingLevel) -> tracked Self

returns
Self::new_absent_spec(path, parent_level),
Source

pub proof fn new_frame( paddr: Paddr, path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, prop: PageProperty, tracked slot_perm: PointsTo<MetaSlot>, ) -> tracked Self

returns
Self::new_frame_spec(paddr, path, parent_level, prop, slot_perm),
Source

pub proof fn placeholder_slot_perm( paddr: Paddr, tracked regions: &MetaRegionOwners, ) -> tracked res : PointsTo<MetaSlot>

requires
regions.inv(),
paddr % PAGE_SIZE == 0,
paddr < MAX_PADDR,
ensures
res.addr() == meta_addr(frame_to_index(paddr)),
res.is_init(),
res.value().wf(regions.slot_owners[frame_to_index(paddr)]),
regions.slot_owners[frame_to_index(paddr)].inner_perms.ref_count.value()
    != REF_COUNT_UNUSED,
regions.slot_owners[frame_to_index(paddr)].path_if_in_pt is None,

Produces a slot permission for a frame address without requiring it from regions.slots. Used as a placeholder in cases (e.g. huge-page split) where the sub-frame slot perms are not yet fully tracked. Replace with real perm threading when the split path is verified.

Source

pub proof fn new_node(node: NodeOwner<C>, path: TreePath<NR_ENTRIES>) -> tracked Self

returns
Self::new_node_spec(node, path),
Source

pub proof fn new_untracked_frame( paddr: Paddr, parent_level: PagingLevel, prop: PageProperty, ) -> tracked res : Self

requires
paddr % PAGE_SIZE == 0,
paddr < MAX_PADDR,
1 <= parent_level,
parent_level <= NR_LEVELS,
ensures
res.is_frame(),
res.frame.unwrap().mapped_pa == paddr,
res.frame.unwrap().prop == prop,
res.frame.unwrap().size == page_size(parent_level),
res.parent_level == parent_level,
res.path.inv(),
res.in_scope,
res.inv(),
crate::mm::page_table::Child::<C>::Frame(paddr, parent_level, prop).wf(res),

Creates a ghost entry owner for mapping an untracked (device memory) frame. Unlike new_frame, this does not consume a slot permission from the meta region, since device memory PAs are outside the tracked frame allocator. The actual mapping correctness is guaranteed by the caller’s unsafe contract.

The requires reflect properties guaranteed by collect_largest_pages postconditions, so this axiom is only ever called with values that satisfy them.

Source

pub open spec fn match_pte(self, pte: C::E, parent_level: PagingLevel) -> bool

{
    &&& pte.paddr() % PAGE_SIZE == 0
    &&& pte.paddr() < MAX_PADDR
    &&& !pte.is_present()
        ==> {
            &&& self.is_absent()
            &&& parent_level > 1 ==> !pte.is_last(parent_level)

        }
    &&& pte.is_present() && !pte.is_last(parent_level)
        ==> {
            &&& self.is_node()
            &&& meta_to_frame(self.node.unwrap().meta_perm.addr()) == pte.paddr()

        }
    &&& pte.is_present() && pte.is_last(parent_level)
        ==> {
            &&& self.is_frame()
            &&& self.frame.unwrap().mapped_pa == pte.paddr()
            &&& self.frame.unwrap().prop == pte.prop()

        }

}
Source

pub proof fn absent_match_pte(owner: Self, pte: C::E, parent_level: PagingLevel)

requires
owner.is_absent(),
pte == C::E::new_absent_spec(),
pte.paddr() % PAGE_SIZE == 0,
pte.paddr() < MAX_PADDR,
ensures
owner.match_pte(pte, parent_level),

When owner is absent and pte is the absent PTE with valid paddr, match_pte holds.

Source

pub proof fn last_pte_implies_frame_match(self, pte: C::E, parent_level: PagingLevel)

requires
self.inv(),
self.match_pte(pte, parent_level),
1 < parent_level,
pte.is_last(parent_level),
ensures
self.is_frame(),
self.frame.unwrap().mapped_pa == pte.paddr(),
self.frame.unwrap().prop == pte.prop(),
Source

pub proof fn huge_frame_split_child_at(self, regions: MetaRegionOwners, idx: usize)

requires
self.inv(),
self.is_frame(),
regions.inv(),
1 < self.parent_level < NR_LEVELS,
idx < NR_ENTRIES,
ensures
self.frame.unwrap().mapped_pa + idx * page_size((self.parent_level - 1) as PagingLevel)
    < MAX_PADDR,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr)
    % page_size((self.parent_level - 1) as PagingLevel) == 0,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr)
    + page_size((self.parent_level - 1) as PagingLevel) <= MAX_PADDR,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr) % PAGE_SIZE == 0,
Source

pub open spec fn expected_raw_count(self) -> usize

{ if self.in_scope { 0 } else { 1 } }
Source

pub open spec fn relate_region(self, regions: MetaRegionOwners) -> bool

{
    if self.is_node() {
        let idx = frame_to_index(self.meta_slot_paddr().unwrap());
        &&& regions.slot_owners[idx].inner_perms.ref_count.value() != REF_COUNT_UNUSED
        &&& regions.slot_owners[idx].raw_count == self.expected_raw_count()
        &&& regions.slot_owners[idx].self_addr == self.node.unwrap().meta_perm.addr()
        &&& self.node.unwrap().meta_perm.points_to.value().wf(regions.slot_owners[idx])
        &&& regions.slot_owners[idx].path_if_in_pt is Some
            ==> regions.slot_owners[idx].path_if_in_pt.unwrap() == self.path

    } else if self.is_frame() {
        let idx = frame_to_index(self.meta_slot_paddr().unwrap());
        &&& self.frame.unwrap().slot_perm.addr() == meta_addr(idx)
        &&& self.frame.unwrap().slot_perm.is_init()
        &&& self.frame.unwrap().slot_perm.value().wf(regions.slot_owners[idx])
        &&& regions.slot_owners[idx].inner_perms.ref_count.value() != REF_COUNT_UNUSED
        &&& regions.slot_owners[idx].path_if_in_pt is Some
            ==> regions.slot_owners[idx].path_if_in_pt.unwrap() == self.path

    } else {
        true
    }
}
Source

pub proof fn active_entry_not_in_free_pool( entry: Self, regions: MetaRegionOwners, free_idx: usize, )

requires
regions.inv(),
entry.inv(),
entry.relate_region(regions),
regions.slots.contains_key(free_idx),
entry.meta_slot_paddr() is Some,
ensures
frame_to_index(entry.meta_slot_paddr().unwrap()) != free_idx,

PointsTo uniqueness: if meta slot free_idx is in the free pool (regions.slots), no active page table entry can own a PointsTo at the same slot address. Justified by Verus’s linear ownership of PointsTo<MetaSlot>: the slot’s PointsTo is either in regions.slots OR held by an active entry, never both.

Source

pub proof fn get_path(self) -> tracked TreePath<NR_ENTRIES>

returns
self.path,
Source

pub open spec fn meta_slot_paddr(self) -> Option<Paddr>

{
    if self.is_node() {
        Some(meta_to_frame(self.node.unwrap().meta_perm.addr()))
    } else if self.is_frame() {
        Some(self.frame.unwrap().mapped_pa)
    } else {
        None
    }
}
Source

pub open spec fn meta_slot_paddr_neq(self, other: Self) -> bool

{
    self.meta_slot_paddr() is Some
        ==> (other.meta_slot_paddr() is Some
            ==> self.meta_slot_paddr().unwrap() != other.meta_slot_paddr().unwrap())
}
Source

pub proof fn relate_region_slot_owners_only( self, r0: MetaRegionOwners, r1: MetaRegionOwners, )

requires
self.relate_region(r0),
r0.slot_owners == r1.slot_owners,
ensures
self.relate_region(r1),

relate_region only uses regions.slot_owners, not regions.slots. So if two MetaRegionOwners have the same slot_owners, relate_region transfers.

Source

pub proof fn relate_region_one_slot_changed( self, r0: MetaRegionOwners, r1: MetaRegionOwners, changed_idx: usize, )

requires
self.relate_region(r0),
forall |i: usize| i != changed_idx ==> r0.slot_owners[i] == r1.slot_owners[i],
r0.slot_owners.dom() =~= r1.slot_owners.dom(),
self.meta_slot_paddr() is Some
    ==> frame_to_index(self.meta_slot_paddr().unwrap()) != changed_idx,
ensures
self.relate_region(r1),

If relate_region(r0) holds and r1 differs from r0 only at one slot index that this entry does not reference, then relate_region(r1) also holds.

Source

pub proof fn same_paddr_implies_same_path( self, other: Self, regions: MetaRegionOwners, )

requires
self.meta_slot_paddr() is Some,
self.meta_slot_paddr() == other.meta_slot_paddr(),
self.relate_region(regions),
other.relate_region(regions),
regions
    .slot_owners[frame_to_index(self.meta_slot_paddr().unwrap())]
    .path_if_in_pt is Some,
ensures
self.path == other.path,

Under relate_region + path_if_in_pt is Some, two entries with the same physical address must have the same path. Equivalently, different paths ↔ different paddrs. This is the fundamental paddr-uniqueness invariant: path_if_in_pt encodes the unique path for each physical address in the page table.

Source

pub proof fn relate_region_rc_value_changed( self, r0: MetaRegionOwners, r1: MetaRegionOwners, )

requires
self.relate_region(r0),
self.meta_slot_paddr() is Some,
({
    let idx = frame_to_index(self.meta_slot_paddr().unwrap());
    &&& r1.slot_owners[idx].inner_perms.ref_count.id()
        == r0.slot_owners[idx].inner_perms.ref_count.id()
    &&& r1.slot_owners[idx].inner_perms.ref_count.value()
        != crate::specs::mm::frame::meta_owners::REF_COUNT_UNUSED
    &&& r1.slot_owners[idx].inner_perms.storage
        == r0.slot_owners[idx].inner_perms.storage
    &&& r1.slot_owners[idx].inner_perms.vtable_ptr
        == r0.slot_owners[idx].inner_perms.vtable_ptr
    &&& r1.slot_owners[idx].inner_perms.in_list
        == r0.slot_owners[idx].inner_perms.in_list
    &&& r1.slot_owners[idx].path_if_in_pt == r0.slot_owners[idx].path_if_in_pt
    &&& r1.slot_owners[idx].self_addr == r0.slot_owners[idx].self_addr
    &&& r1.slot_owners[idx].raw_count == r0.slot_owners[idx].raw_count

}),
ensures
self.relate_region(r1),

relate_region is preserved when only ref_count.value() changes at this entry’s slot.

Source

pub proof fn nodes_different_paths_different_addrs( self, other: Self, regions: MetaRegionOwners, )

requires
self.is_node(),
other.is_node(),
self.relate_region(regions),
self.meta_slot_paddr() is Some
    ==> regions
        .slot_owners[frame_to_index(self.meta_slot_paddr().unwrap())]
        .path_if_in_pt is Some,
other.meta_slot_paddr() is Some
    ==> regions
        .slot_owners[frame_to_index(other.meta_slot_paddr().unwrap())]
        .path_if_in_pt is Some,
other.relate_region(regions),
self.path != other.path,
ensures
self.node.unwrap().meta_perm.addr() != other.node.unwrap().meta_perm.addr(),

Two nodes satisfying relate_region with the same regions have different addresses if and only if they have different paths.

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn inv_base(self) -> bool

{
    &&& self.node is Some
        ==> {
            &&& self.frame is None
            &&& self.locked is None
            &&& self.node.unwrap().inv()
            &&& !self.absent

        }
    &&& self.frame is Some
        ==> {
            &&& self.node is None
            &&& self.locked is None
            &&& !self.absent
            &&& self.frame.unwrap().mapped_pa % PAGE_SIZE == 0
            &&& self.frame.unwrap().mapped_pa < MAX_PADDR
            &&& self.frame.unwrap().size == page_size(self.parent_level)
            &&& self.frame.unwrap().mapped_pa % page_size(self.parent_level) == 0
            &&& self.frame.unwrap().mapped_pa + page_size(self.parent_level) <= MAX_PADDR

        }
    &&& self.locked is Some
        ==> {
            &&& self.frame is None
            &&& self.node is None
            &&& !self.absent

        }
    &&& self.path.inv()

}

Structural invariant without !in_scope. Used by Child::invariants for entries that have been taken out of the tree (in_scope == true).

Trait Implementations§

Source§

impl<C: PageTableConfig> Inv for EntryOwner<C>

Source§

open spec fn inv(self) -> bool

{
    &&& !self.in_scope
    &&& self.inv_base()

}
Source§

impl<C: PageTableConfig> InvView for EntryOwner<C>

Source§

impl<C: PageTableConfig, const L: usize> TreeNodeValue<L> for EntryOwner<C>

Source§

open spec fn default(lv: nat) -> Self

{
    Self {
        in_scope: false,
        path: TreePath::new(Seq::empty()),
        parent_level: (INC_LEVELS - lv) as PagingLevel,
        node: None,
        frame: None,
        locked: None,
        absent: true,
    }
}
Source§

proof fn default_preserves_inv()

Source§

open spec fn la_inv(self, lv: nat) -> bool

{ self.is_node() ==> lv < L - 1 }
Source§

proof fn default_preserves_la_inv()

Source§

open spec fn rel_children(self, i: int, child: Option<Self>) -> bool

{
    if self.is_node() {
        &&& child is Some
        &&& child.unwrap().path.len() == self.node.unwrap().tree_level + 1
        &&& child
            .unwrap()
            .match_pte(
                self.node.unwrap().children_perm.value()[i],
                self.node.unwrap().level,
            )
        &&& child.unwrap().path == self.path.push_tail(i as usize)

    } else {
        &&& child is None

    }
}
Source§

proof fn default_preserves_rel_children(self, lv: nat)

Source§

impl<C: PageTableConfig> View for EntryOwner<C>

Source§

open spec fn view(&self) -> <Self as View>::V

{
    if let Some(frame) = self.frame {
        EntryView::Leaf {
            leaf: LeafPageTableEntryView {
                map_va: vaddr(self.path) as int,
                map_to_pa: frame.mapped_pa as int,
                level: self.path.len() as u8,
                prop: frame.prop,
                phantom: PhantomData,
            },
        }
    } else if let Some(node) = self.node {
        EntryView::Intermediate {
            node: IntermediatePageTableEntryView {
                map_va: vaddr(self.path) as int,
                map_to_pa: meta_to_frame(node.meta_perm.addr()) as int,
                level: self.path.len() as u8,
                phantom: PhantomData,
            },
        }
    } else if let Some(view) = self.locked {
        EntryView::LockedSubtree {
            views: view@,
        }
    } else {
        EntryView::Absent
    }
}
Source§

type V = EntryView<C>

Auto Trait Implementations§

§

impl<C> Freeze for EntryOwner<C>

§

impl<C> !RefUnwindSafe for EntryOwner<C>

§

impl<C> Send for EntryOwner<C>

§

impl<C> Sync for EntryOwner<C>

§

impl<C> Unpin for EntryOwner<C>
where <C as PageTableConfig>::E: Unpin, C: Unpin,

§

impl<C> UnwindSafe for EntryOwner<C>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

§

impl<T, VERUS_SPEC__A> FromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: From<T>,

§

fn obeys_from_spec() -> bool

§

fn from_spec(v: T) -> VERUS_SPEC__A

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

§

impl<T, VERUS_SPEC__A> IntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: Into<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> T

§

impl<T, U> IntoSpecImpl<U> for T
where U: From<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> U

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryFromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryFrom<T>,

§

fn obeys_try_from_spec() -> bool

§

fn try_from_spec( v: T, ) -> Result<VERUS_SPEC__A, <VERUS_SPEC__A as TryFrom<T>>::Error>

Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryIntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryInto<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<T, <VERUS_SPEC__A as TryInto<T>>::Error>

§

impl<T, U> TryIntoSpecImpl<U> for T
where U: TryFrom<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<U, <U as TryFrom<T>>::Error>