Skip to main content

EntryOwner

Struct EntryOwner 

Source
pub struct EntryOwner<C: PageTableConfig> {
    pub node: Option<NodeOwner<C>>,
    pub frame: Option<FrameEntryOwner>,
    pub locked: Option<Ghost<Seq<FrameView<C>>>>,
    pub absent: bool,
    pub in_scope: bool,
    pub path: TreePath<NR_ENTRIES>,
    pub parent_level: PagingLevel,
}

Fields§

§node: Option<NodeOwner<C>>§frame: Option<FrameEntryOwner>§locked: Option<Ghost<Seq<FrameView<C>>>>§absent: bool§in_scope: bool§path: TreePath<NR_ENTRIES>§parent_level: PagingLevel

Implementations§

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn from_pte_regions_spec( self, regions: MetaRegionOwners, ) -> MetaRegionOwners

{
    if self.is_node() {
        let index = frame_to_index(self.meta_slot_paddr().unwrap());
        let old_slot = regions.slot_owners[index];
        let new_slot = MetaSlotOwner {
            raw_count: 0usize,
            ..old_slot
        };
        MetaRegionOwners {
            slots: regions.slots.insert(index, self.node.unwrap().meta_perm.points_to),
            slot_owners: regions.slot_owners.insert(index, new_slot),
        }
    } else {
        regions
    }
}
Source

pub open spec fn into_pte_regions_spec( self, regions: MetaRegionOwners, ) -> MetaRegionOwners

{
    if self.is_node() {
        let index = frame_to_index(self.meta_slot_paddr().unwrap());
        let old_slot = regions.slot_owners[index];
        let new_slot = MetaSlotOwner {
            raw_count: (old_slot.raw_count + 1) as usize,
            ..old_slot
        };
        MetaRegionOwners {
            slots: regions.slots,
            slot_owners: regions.slot_owners.insert(index, new_slot),
            ..regions
        }
    } else {
        regions
    }
}
Source

pub open spec fn into_pte_owner_spec(self) -> EntryOwner<C>

{
    EntryOwner {
        in_scope: false,
        ..self
    }
}
Source

pub open spec fn from_pte_owner_spec(self) -> EntryOwner<C>

{
    EntryOwner {
        in_scope: true,
        ..self
    }
}
Source

pub open spec fn pte_invariants(self, pte: C::E, regions: MetaRegionOwners) -> bool

{
    &&& self.inv()
    &&& regions.inv()
    &&& self.match_pte(pte, self.parent_level)
    &&& self.metaregion_sound(regions)
    &&& !self.in_scope

}

This is equivalent to the other invariants relations, combining the inv predicates for each object and the well-formedness relations between them.

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn is_node(self) -> bool

{ self.node is Some }
Source

pub open spec fn is_frame(self) -> bool

{ self.frame is Some }
Source

pub open spec fn is_locked(self) -> bool

{ self.locked is Some }
Source

pub open spec fn is_absent(self) -> bool

{ self.absent }
Source

pub open spec fn new_absent_spec( path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, ) -> Self

{
    EntryOwner {
        node: None,
        frame: None,
        locked: None,
        absent: true,
        in_scope: true,
        path,
        parent_level,
    }
}
Source

pub open spec fn new_frame_spec( paddr: Paddr, path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, prop: PageProperty, ) -> Self

{
    EntryOwner {
        node: None,
        frame: Some(FrameEntryOwner {
            mapped_pa: paddr,
            size: page_size(parent_level),
            prop,
        }),
        locked: None,
        absent: false,
        in_scope: true,
        path,
        parent_level,
    }
}
Source

pub open spec fn new_node_spec(node: NodeOwner<C>, path: TreePath<NR_ENTRIES>) -> Self

{
    EntryOwner {
        node: Some(node),
        frame: None,
        locked: None,
        absent: false,
        in_scope: true,
        path,
        parent_level: (node.level + 1) as PagingLevel,
    }
}
Source

pub proof fn new_absent(path: TreePath<NR_ENTRIES>, parent_level: PagingLevel) -> tracked Self

returns
Self::new_absent_spec(path, parent_level),
Source

pub proof fn new_frame( paddr: Paddr, path: TreePath<NR_ENTRIES>, parent_level: PagingLevel, prop: PageProperty, ) -> tracked Self

returns
Self::new_frame_spec(paddr, path, parent_level, prop),
Source

pub proof fn new_node(node: NodeOwner<C>, path: TreePath<NR_ENTRIES>) -> tracked Self

returns
Self::new_node_spec(node, path),
Source

pub proof fn new_untracked_frame( paddr: Paddr, parent_level: PagingLevel, prop: PageProperty, ) -> tracked res : Self

requires
paddr % PAGE_SIZE == 0,
paddr < MAX_PADDR,
1 <= parent_level,
parent_level <= NR_LEVELS,
ensures
res.is_frame(),
res.frame.unwrap().mapped_pa == paddr,
res.frame.unwrap().prop == prop,
res.frame.unwrap().size == page_size(parent_level),
res.parent_level == parent_level,
res.path.inv(),
res.in_scope,
res.inv_base(),
crate::mm::page_table::Child::<C>::Frame(paddr, parent_level, prop).wf(res),

Creates a ghost entry owner for mapping an untracked (device memory) frame. Unlike new_frame, this does not consume a slot permission from the meta region, since device memory PAs are outside the tracked frame allocator. The actual mapping correctness is guaranteed by the caller’s unsafe contract.

The requires reflect properties guaranteed by collect_largest_pages postconditions, so this axiom is only ever called with values that satisfy them.

Source

pub open spec fn match_pte(self, pte: C::E, parent_level: PagingLevel) -> bool

{
    &&& pte.paddr() % PAGE_SIZE == 0
    &&& pte.paddr() < MAX_PADDR
    &&& !pte.is_present()
        ==> {
            &&& self.is_absent()
            &&& parent_level > 1 ==> !pte.is_last(parent_level)

        }
    &&& pte.is_present() && !pte.is_last(parent_level)
        ==> {
            &&& self.is_node()
            &&& meta_to_frame(self.node.unwrap().meta_perm.addr()) == pte.paddr()

        }
    &&& pte.is_present() && pte.is_last(parent_level)
        ==> {
            &&& self.is_frame()
            &&& self.frame.unwrap().mapped_pa == pte.paddr()
            &&& self.frame.unwrap().prop == pte.prop()

        }

}
Source

pub proof fn absent_match_pte(owner: Self, pte: C::E, parent_level: PagingLevel)

requires
owner.is_absent(),
pte == C::E::new_absent_spec(),
pte.paddr() % PAGE_SIZE == 0,
pte.paddr() < MAX_PADDR,
ensures
owner.match_pte(pte, parent_level),

When owner is absent and pte is the absent PTE with valid paddr, match_pte holds.

Source

pub proof fn last_pte_implies_frame_match(self, pte: C::E, parent_level: PagingLevel)

requires
self.inv(),
self.match_pte(pte, parent_level),
1 < parent_level,
pte.is_last(parent_level),
ensures
self.is_frame(),
self.frame.unwrap().mapped_pa == pte.paddr(),
self.frame.unwrap().prop == pte.prop(),
Source

pub proof fn huge_frame_split_child_at(self, regions: MetaRegionOwners, idx: usize)

requires
self.inv(),
self.is_frame(),
regions.inv(),
1 < self.parent_level < NR_LEVELS,
idx < NR_ENTRIES,
ensures
self.frame.unwrap().mapped_pa + idx * page_size((self.parent_level - 1) as PagingLevel)
    < MAX_PADDR,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr)
    % page_size((self.parent_level - 1) as PagingLevel) == 0,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr)
    + page_size((self.parent_level - 1) as PagingLevel) <= MAX_PADDR,
((self.frame.unwrap().mapped_pa
    + idx * page_size((self.parent_level - 1) as PagingLevel)) as Paddr) % PAGE_SIZE == 0,
Source

pub open spec fn expected_raw_count(self) -> usize

{ if self.in_scope { 0 } else { 1 } }
Source

pub proof fn frame_sub_pages_valid_preserved_at_own_slot( self, r0: MetaRegionOwners, r1: MetaRegionOwners, )

requires
self.inv(),
r0.inv(),
self.is_frame(),
self.parent_level <= NR_LEVELS,
self.frame_sub_pages_valid(r0),
r0.slots == r1.slots,
r0.slot_owners.dom() =~= r1.slot_owners.dom(),
forall |i: usize| {
    i != frame_to_index(self.meta_slot_paddr().unwrap())
        && r0.slot_owners.contains_key(i) ==> r0.slot_owners[i] == r1.slot_owners[i]
},
ensures
self.frame_sub_pages_valid(r1),

Helper: sub-page validity is preserved when the only slot that changed is the frame’s own slot (and slots map and other slot owners are unchanged).

Source

pub open spec fn frame_sub_pages_valid(self, regions: MetaRegionOwners) -> bool

{
    self.is_frame() && self.parent_level > 1
        ==> {
            let pa = self.frame.unwrap().mapped_pa;
            let nr_pages = page_size(self.parent_level) / PAGE_SIZE;
            forall |j: usize| {
                0 < j < nr_pages
                    ==> {
                        let sub_idx = frame_to_index((pa + j * PAGE_SIZE) as usize);
                        &&& regions.slots.contains_key(sub_idx)
                        &&& regions.slot_owners[sub_idx].inner_perms.ref_count.value()
                            != REF_COUNT_UNUSED
                        &&& regions.slot_owners[sub_idx].inner_perms.ref_count.value()
                            > 0

                    }
            }
        }
}

Sub-page slot validity for huge frames (fine-grained: all 4KB pages within).

When a frame at this entry has parent_level > 1, it is a huge page covering page_size(parent_level) bytes. Every 4KB sub-page within this range (excluding the j = 0 case which coincides with the frame’s own slot) must be allocated (in the free pool) with rc != UNUSED.

The fine-grained form (over j * PAGE_SIZE rather than j * page_size(L-1)) is what enables the recursive split case in split_if_mapped_huge: when splitting a 1GB frame into 2MB sub-frames, each 2MB sub-frame’s own sub-page validity (over its 511 4KB sub-sub-pages) follows from the 1GB frame’s fine-grained validity over the corresponding subrange of indices.

Source

pub open spec fn metaregion_sound(self, regions: MetaRegionOwners) -> bool

{
    if self.is_node() {
        let idx = frame_to_index(self.meta_slot_paddr().unwrap());
        &&& regions.slot_owners[idx].inner_perms.ref_count.value() != REF_COUNT_UNUSED
        &&& regions.slot_owners[idx].raw_count == self.expected_raw_count()
        &&& regions.slot_owners[idx].self_addr == self.node.unwrap().meta_perm.addr()
        &&& self.node.unwrap().meta_perm.points_to.value().wf(regions.slot_owners[idx])
        &&& regions.slot_owners[idx].paths_in_pt == set![self.path]

    } else if self.is_frame() {
        let idx = frame_to_index(self.meta_slot_paddr().unwrap());
        &&& regions.slots.contains_key(idx)
        &&& regions.slots[idx].addr() == meta_addr(idx)
        &&& regions.slots[idx].is_init()
        &&& regions.slots[idx].value().wf(regions.slot_owners[idx])
        &&& regions.slot_owners[idx].inner_perms.ref_count.value() != REF_COUNT_UNUSED
        &&& regions.slot_owners[idx].inner_perms.ref_count.value() > 0
        &&& regions.slot_owners[idx].paths_in_pt.contains(self.path)
        &&& self.frame_sub_pages_valid(regions)

    } else {
        true
    }
}
Source

pub proof fn active_entry_not_in_free_pool( entry: Self, regions: MetaRegionOwners, free_idx: usize, )

requires
regions.inv(),
entry.inv(),
entry.is_node(),
entry.metaregion_sound(regions),
regions.slots.contains_key(free_idx),
ensures
frame_to_index(entry.meta_slot_paddr().unwrap()) != free_idx,

PointsTo uniqueness: if meta slot free_idx is in the free pool (regions.slots), no active page table NODE entry can own a PointsTo at the same slot address. Justified by Verus’s linear ownership of PointsTo<MetaSlot>: the node’s PointsTo is either in regions.slots OR held by the node, never both. (Frames no longer own their PointsTo — they read from regions.slots directly.)

Source

pub proof fn get_path(self) -> tracked TreePath<NR_ENTRIES>

returns
self.path,
Source

pub open spec fn meta_slot_paddr(self) -> Option<Paddr>

{
    if self.is_node() {
        Some(meta_to_frame(self.node.unwrap().meta_perm.addr()))
    } else if self.is_frame() {
        Some(self.frame.unwrap().mapped_pa)
    } else {
        None
    }
}
Source

pub open spec fn meta_slot_paddr_neq(self, other: Self) -> bool

{
    self.meta_slot_paddr() is Some
        ==> (other.meta_slot_paddr() is Some
            ==> self.meta_slot_paddr().unwrap() != other.meta_slot_paddr().unwrap())
}
Source

pub proof fn metaregion_sound_slot_owners_only( self, r0: MetaRegionOwners, r1: MetaRegionOwners, )

requires
self.metaregion_sound(r0),
r0.slot_owners == r1.slot_owners,
forall |k: usize| r0.slots.contains_key(k) ==> #[trigger] r1.slots.contains_key(k),
forall |k: usize| r0.slots.contains_key(k) ==> r0.slots[k] == #[trigger] r1.slots[k],
ensures
self.metaregion_sound(r1),

metaregion_sound transfers when slot_owners matches and slots is a superset. For nodes: only slot_owners matters. For frames: slots.contains_key and slots[idx] must be preserved, which holds when slots is a superset with values unchanged.

Source

pub proof fn metaregion_sound_one_slot_changed( self, r0: MetaRegionOwners, r1: MetaRegionOwners, changed_idx: usize, )

requires
self.metaregion_sound(r0),
forall |i: usize| i != changed_idx ==> r0.slot_owners[i] == r1.slot_owners[i],
r0.slot_owners.dom() =~= r1.slot_owners.dom(),
forall |k: usize| r0.slots.contains_key(k) ==> #[trigger] r1.slots.contains_key(k),
forall |k: usize| r0.slots.contains_key(k) ==> r0.slots[k] == #[trigger] r1.slots[k],
self.meta_slot_paddr() is Some
    ==> frame_to_index(self.meta_slot_paddr().unwrap()) != changed_idx,
self.is_frame() && self.parent_level > 1
    ==> {
        let pa = self.frame.unwrap().mapped_pa;
        let nr_pages = page_size(self.parent_level) / PAGE_SIZE;
        forall |j: usize| {
            0 < j < nr_pages
                ==> {
                    let sub_idx = #[trigger]
                    frame_to_index((pa + j * PAGE_SIZE) as usize);
                    sub_idx != changed_idx
                        || (r1.slots.contains_key(sub_idx)
                            && r1.slot_owners[sub_idx].inner_perms.ref_count.value()
                                != REF_COUNT_UNUSED
                            && r1.slot_owners[sub_idx].inner_perms.ref_count.value() > 0)
                }
        }
    },
ensures
self.metaregion_sound(r1),

If metaregion_sound(r0) holds and r1 differs from r0 only at one slot index that this entry does not reference, then metaregion_sound(r1) also holds.

Source

pub proof fn metaregion_sound_paths_in_pt_changed( self, r0: MetaRegionOwners, r1: MetaRegionOwners, changed_idx: usize, )

requires
self.inv(),
r0.inv(),
self.metaregion_sound(r0),
r0.slots == r1.slots,
r0.slot_owners.dom() =~= r1.slot_owners.dom(),
forall |i: usize| i != changed_idx ==> r0.slot_owners[i] == r1.slot_owners[i],
r1.slot_owners[changed_idx].inner_perms == r0.slot_owners[changed_idx].inner_perms,
r1.slot_owners[changed_idx].self_addr == r0.slot_owners[changed_idx].self_addr,
r1.slot_owners[changed_idx].raw_count == r0.slot_owners[changed_idx].raw_count,
r1.slot_owners[changed_idx].usage == r0.slot_owners[changed_idx].usage,
self.is_node() && self.meta_slot_paddr() is Some
    && frame_to_index(self.meta_slot_paddr().unwrap()) == changed_idx
    ==> r1.slot_owners[changed_idx].paths_in_pt == set![self.path],
self.is_frame() && self.meta_slot_paddr() is Some
    && frame_to_index(self.meta_slot_paddr().unwrap()) == changed_idx
    ==> r1.slot_owners[changed_idx].paths_in_pt.contains(self.path),
self.is_frame() && self.parent_level > 1
    ==> {
        let pa = self.frame.unwrap().mapped_pa;
        let sub_level = (self.parent_level - 1) as PagingLevel;
        forall |j: usize| {
            0 < j < NR_ENTRIES
                ==> {
                    let sub_idx = #[trigger]
                    frame_to_index((pa + j * page_size(sub_level)) as usize);
                    sub_idx != changed_idx
                        || r1.slot_owners[changed_idx].paths_in_pt.is_empty()
                }
        }
    },
ensures
self.metaregion_sound(r1),

metaregion_sound is preserved when only paths_in_pt changes at a slot, slots is unchanged, and the new paths_in_pt is correct for any node at that index.

Source

pub proof fn same_paddr_implies_same_path( self, other: Self, regions: MetaRegionOwners, )

requires
self.meta_slot_paddr() is Some,
self.meta_slot_paddr() == other.meta_slot_paddr(),
regions.slot_owners[frame_to_index(self.meta_slot_paddr().unwrap())].paths_in_pt
    == set![self.path],
regions.slot_owners[frame_to_index(self.meta_slot_paddr().unwrap())].paths_in_pt
    == set![other.path],
ensures
self.path == other.path,

Two entries with the same physical address whose paths_in_pt matches their respective paths must have the same path.

Source

pub proof fn metaregion_sound_rc_value_changed( self, r0: MetaRegionOwners, r1: MetaRegionOwners, )

requires
self.inv(),
r0.inv(),
self.metaregion_sound(r0),
self.meta_slot_paddr() is Some,
r0.slots == r1.slots,
({
    let idx = frame_to_index(self.meta_slot_paddr().unwrap());
    &&& r1.slot_owners[idx].inner_perms.ref_count.id()
        == r0.slot_owners[idx].inner_perms.ref_count.id()
    &&& r1.slot_owners[idx].inner_perms.ref_count.value()
        != crate::specs::mm::frame::meta_owners::REF_COUNT_UNUSED
    &&& r1.slot_owners[idx].inner_perms.ref_count.value() > 0
    &&& r1.slot_owners[idx].inner_perms.storage
        == r0.slot_owners[idx].inner_perms.storage
    &&& r1.slot_owners[idx].inner_perms.vtable_ptr
        == r0.slot_owners[idx].inner_perms.vtable_ptr
    &&& r1.slot_owners[idx].inner_perms.in_list
        == r0.slot_owners[idx].inner_perms.in_list
    &&& r1.slot_owners[idx].self_addr == r0.slot_owners[idx].self_addr
    &&& r1.slot_owners[idx].raw_count == r0.slot_owners[idx].raw_count
    &&& r1.slot_owners[idx].paths_in_pt == r0.slot_owners[idx].paths_in_pt

}),
forall |i: usize| {
    i != frame_to_index(self.meta_slot_paddr().unwrap())
        && r0.slot_owners.contains_key(i) ==> r0.slot_owners[i] == r1.slot_owners[i]
},
ensures
self.metaregion_sound(r1),

metaregion_sound is preserved when only ref_count.value() changes at this entry’s slot and slots is unchanged.

Source

pub proof fn nodes_different_paths_different_addrs( self, other: Self, regions: MetaRegionOwners, )

requires
self.is_node(),
other.is_node(),
self.meta_slot_paddr() is Some
    ==> regions.slot_owners[frame_to_index(self.meta_slot_paddr().unwrap())].paths_in_pt
        == set![self.path],
other.meta_slot_paddr() is Some
    ==> regions.slot_owners[frame_to_index(other.meta_slot_paddr().unwrap())].paths_in_pt
        == set![other.path],
self.path != other.path,
ensures
self.node.unwrap().meta_perm.addr() != other.node.unwrap().meta_perm.addr(),

Two nodes whose paths_in_pt matches their paths have different addresses if they have different paths.

Source

pub proof fn nodes_different_path_lengths_neq_slot( self, other: Self, regions: MetaRegionOwners, )

requires
self.is_node(),
other.is_node(),
self.metaregion_sound(regions),
other.metaregion_sound(regions),
self.path.len() != other.path.len(),
ensures
self.meta_slot_paddr_neq(other),

Two node entries with metaregion_sound under the same regions cannot share a meta slot paddr if their paths have different lengths.

For nodes, metaregion_sound requires paths_in_pt == set![path] (singleton). Equal slot indices would force equal singleton sets, hence equal paths — contradicting the length difference.

Source§

impl<C: PageTableConfig> EntryOwner<C>

Source

pub open spec fn inv_base(self) -> bool

{
    &&& self.node is Some
        ==> {
            &&& self.frame is None
            &&& self.locked is None
            &&& self.node.unwrap().inv()
            &&& !self.absent
            &&& self.parent_level == self.node.unwrap().level + 1

        }
    &&& self.frame is Some
        ==> {
            &&& self.node is None
            &&& self.locked is None
            &&& !self.absent
            &&& 1 <= self.parent_level < NR_LEVELS
            &&& self.frame.unwrap().mapped_pa % PAGE_SIZE == 0
            &&& self.frame.unwrap().mapped_pa < MAX_PADDR
            &&& self.frame.unwrap().size == page_size(self.parent_level)
            &&& self.frame.unwrap().mapped_pa % page_size(self.parent_level) == 0
            &&& self.frame.unwrap().mapped_pa + page_size(self.parent_level) <= MAX_PADDR

        }
    &&& self.locked is Some
        ==> {
            &&& self.frame is None
            &&& self.node is None
            &&& !self.absent

        }
    &&& self.path.inv()

}

Structural invariant without !in_scope. Used by Child::invariants for entries that have been taken out of the tree (in_scope == true).

Source

pub open spec fn set_in_scope(self, in_scope: bool) -> Self

{ EntryOwner { in_scope, ..self } }

Trait Implementations§

Source§

impl<C: PageTableConfig> Inv for EntryOwner<C>

Source§

open spec fn inv(self) -> bool

{
    &&& !self.in_scope
    &&& self.inv_base()

}
Source§

impl<C: PageTableConfig> InvView for EntryOwner<C>

Source§

impl<C: PageTableConfig, const L: usize> TreeNodeValue<L> for EntryOwner<C>

Source§

open spec fn default(lv: nat) -> Self

{
    Self {
        in_scope: false,
        path: TreePath::new(Seq::empty()),
        parent_level: (INC_LEVELS - lv) as PagingLevel,
        node: None,
        frame: None,
        locked: None,
        absent: true,
    }
}
Source§

proof fn default_preserves_inv()

Source§

open spec fn la_inv(self, lv: nat) -> bool

{ self.is_node() ==> lv < L - 1 }
Source§

proof fn default_preserves_la_inv()

Source§

open spec fn rel_children(self, i: int, child: Option<Self>) -> bool

{ true }
Source§

proof fn default_preserves_rel_children(self, lv: nat)

Source§

impl<C: PageTableConfig> View for EntryOwner<C>

Source§

open spec fn view(&self) -> <Self as View>::V

{
    if let Some(frame) = self.frame {
        EntryView::Leaf {
            leaf: LeafPageTableEntryView {
                map_va: vaddr(self.path) as int,
                map_to_pa: frame.mapped_pa as int,
                level: self.path.len() as u8,
                prop: frame.prop,
                phantom: PhantomData,
            },
        }
    } else if let Some(node) = self.node {
        EntryView::Intermediate {
            node: IntermediatePageTableEntryView {
                map_va: vaddr(self.path) as int,
                map_to_pa: meta_to_frame(node.meta_perm.addr()) as int,
                level: self.path.len() as u8,
                phantom: PhantomData,
            },
        }
    } else if let Some(view) = self.locked {
        EntryView::LockedSubtree {
            views: view@,
        }
    } else {
        EntryView::Absent
    }
}
Source§

type V = EntryView<C>

Auto Trait Implementations§

§

impl<C> Freeze for EntryOwner<C>

§

impl<C> !RefUnwindSafe for EntryOwner<C>

§

impl<C> Send for EntryOwner<C>

§

impl<C> Sync for EntryOwner<C>

§

impl<C> Unpin for EntryOwner<C>
where <C as PageTableConfig>::E: Unpin, C: Unpin,

§

impl<C> UnsafeUnpin for EntryOwner<C>

§

impl<C> UnwindSafe for EntryOwner<C>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

§

impl<T, VERUS_SPEC__A> FromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: From<T>,

§

fn obeys_from_spec() -> bool

§

fn from_spec(v: T) -> VERUS_SPEC__A

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

§

impl<T, VERUS_SPEC__A> IntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: Into<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> T

§

impl<T, U> IntoSpecImpl<U> for T
where U: From<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> U

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryFromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryFrom<T>,

§

fn obeys_try_from_spec() -> bool

§

fn try_from_spec( v: T, ) -> Result<VERUS_SPEC__A, <VERUS_SPEC__A as TryFrom<T>>::Error>

Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryIntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryInto<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<T, <VERUS_SPEC__A as TryInto<T>>::Error>

§

impl<T, U> TryIntoSpecImpl<U> for T
where U: TryFrom<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<U, <U as TryFrom<T>>::Error>