CursorOwner

Struct CursorOwner 

Source
pub struct CursorOwner<'rcu, C: PageTableConfig> {
    pub level: PagingLevel,
    pub continuations: Map<int, CursorContinuation<'rcu, C>>,
    pub va: AbstractVaddr,
    pub guard_level: PagingLevel,
    pub prefix: AbstractVaddr,
    pub popped_too_high: bool,
}

Fields§

§level: PagingLevel§continuations: Map<int, CursorContinuation<'rcu, C>>§va: AbstractVaddr§guard_level: PagingLevel§prefix: AbstractVaddr§popped_too_high: bool

Implementations§

Source§

impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C>

Source

pub open spec fn max_steps_subtree(level: usize) -> usize

{
    if level <= 1 {
        NR_ENTRIES
    } else {
        (NR_ENTRIES * (Self::max_steps_subtree((level - 1) as usize) + 1)) as usize
    }
}

The number of steps it will take to walk through every node of a full page table at level level

Source

pub open spec fn max_steps_partial(self, level: usize) -> usize

{
    if level == NR_LEVELS {
        0
    } else {
        let cont = self.continuations[(level - 1) as int];
        let steps = Self::max_steps_subtree(level) * (NR_ENTRIES - cont.idx);
        let remaining_steps = self.max_steps_partial((level + 1) as usize);
        (steps + remaining_steps) as usize
    }
}

The number of steps it will take to walk through the remaining entries of the page table starting at the given level.

Source

pub open spec fn max_steps(self) -> usize

{ self.max_steps_partial(self.level as usize) }
Source

pub proof fn max_steps_partial_inv(self, other: Self, level: usize)

requires
self.inv(),
other.inv(),
self.level == other.level,
self.level <= level <= NR_LEVELS,
forall |i: int| {
    self.level - 1 <= i < NR_LEVELS
        ==> self.continuations[i].idx == other.continuations[i].idx
},
ensures
self.max_steps_partial(level) == other.max_steps_partial(level),
Source

pub open spec fn push_level_owner_spec(self, guard_perm: GuardPerm<'rcu, C>) -> Self

{
    let cont = self.continuations[self.level - 1];
    let (child, cont) = cont
        .make_cont_spec(self.va.index[self.level - 2] as usize, guard_perm);
    let new_continuations = self.continuations.insert(self.level - 1, cont);
    let new_continuations = new_continuations.insert(self.level - 2, child);
    let new_level = (self.level - 1) as u8;
    Self {
        continuations: new_continuations,
        level: new_level,
        popped_too_high: false,
        ..self
    }
}
Source

pub proof fn push_level_owner_decreases_steps(self, guard_perm: GuardPerm<'rcu, C>)

requires
self.inv(),
self.level > 0,
ensures
self.push_level_owner_spec(guard_perm).max_steps() < self.max_steps(),
Source

pub proof fn push_level_owner_preserves_va(self, guard_perm: GuardPerm<'rcu, C>)

requires
self.inv(),
self.level > 1,
ensures
self.push_level_owner_spec(guard_perm).va == self.va,
self.push_level_owner_spec(guard_perm).continuations[self.level - 2].idx
    == self.va.index[self.level - 2],
Source

pub proof fn push_level_owner_preserves_mappings(self, guard_perm: GuardPerm<'rcu, C>)

requires
self.inv(),
self.level > 1,
ensures
self.push_level_owner_spec(guard_perm)@.mappings == self@.mappings,
Source

pub proof fn push_level_owner_preserves_inv(self, guard_perm: GuardPerm<'rcu, C>)

requires
self.inv(),
self.level > 1,
forall |i: int| {
    self.level - 1 <= i < NR_LEVELS
        ==> self.continuations[i].guard_perm.addr() != guard_perm.addr()
},
ensures
self.push_level_owner_spec(guard_perm).inv(),
Source

pub proof fn push_level_owner_preserves_invs( self, guard_perm: GuardPerm<'rcu, C>, regions: MetaRegionOwners, guards: Guards<'rcu, C>, )

requires
self.inv(),
self.level > 1,
self.only_current_locked(guards),
self.nodes_locked(guards),
self.relate_region(regions),
guards.lock_held(guard_perm.value().inner.inner@.ptr.addr()),
ensures
self.push_level_owner_spec(guard_perm).inv(),
self.push_level_owner_spec(guard_perm).children_not_locked(guards),
self.push_level_owner_spec(guard_perm).nodes_locked(guards),
self.push_level_owner_spec(guard_perm).relate_region(regions),
Source

pub proof fn push_level_owner(tracked &mut self, tracked guard_perm: Tracked<GuardPerm<'rcu, C>>)

requires
old(self).inv(),
old(self).level > 1,
ensures
*self == old(self).push_level_owner_spec(guard_perm@),
Source

pub open spec fn pop_level_owner_spec(self) -> (Self, GuardPerm<'rcu, C>)

{
    let child = self.continuations[self.level - 1];
    let cont = self.continuations[self.level as int];
    let (new_cont, guard_perm) = cont.restore_spec(child);
    let new_continuations = self.continuations.insert(self.level as int, new_cont);
    let new_continuations = new_continuations.remove(self.level - 1);
    let new_level = (self.level + 1) as u8;
    let popped_too_high = if new_level >= self.guard_level { true } else { false };
    (
        Self {
            continuations: new_continuations,
            level: new_level,
            popped_too_high: popped_too_high,
            ..self
        },
        guard_perm,
    )
}
Source

pub proof fn pop_level_owner_preserves_inv(self)

requires
self.inv(),
self.level < NR_LEVELS,
self.in_locked_range(),
ensures
self.pop_level_owner_spec().0.inv(),
Source

pub proof fn pop_level_owner_preserves_invs( self, guards: Guards<'rcu, C>, regions: MetaRegionOwners, )

requires
self.inv(),
self.level < NR_LEVELS,
self.in_locked_range(),
self.children_not_locked(guards),
self.nodes_locked(guards),
self.relate_region(regions),
ensures
self.pop_level_owner_spec().0.in_locked_range(),
self.pop_level_owner_spec().0.inv(),
self.pop_level_owner_spec().0.only_current_locked(guards),
self.pop_level_owner_spec().0.nodes_locked(guards),
self.pop_level_owner_spec().0.relate_region(regions),
Source

pub proof fn set_va_preserves_inv(self, new_va: AbstractVaddr)

requires
self.inv(),
new_va.inv(),
forall |i: int| self.level - 1 <= i < NR_LEVELS ==> new_va.index[i] == self.va.index[i],
forall |i: int| {
    self.guard_level - 1 <= i < NR_LEVELS ==> new_va.index[i] == self.prefix.index[i]
},
ensures
self.set_va_spec(new_va).inv(),

Update va to a new value that shares the same indices at levels >= self.level. This preserves invariants because:

  1. The new va satisfies va.inv()
  2. The indices at levels >= level match the continuation indices
  3. in_locked_range/above_locked_range depend on va but the preconditions ensure consistency
Source

pub proof fn pop_level_owner(tracked &mut self) -> tracked guard_perm : GuardPerm<'rcu, C>

requires
old(self).inv(),
old(self).level < NR_LEVELS,
ensures
*self == old(self).pop_level_owner_spec().0,
guard_perm == old(self).pop_level_owner_spec().1,
Source

pub open spec fn move_forward_owner_spec(self) -> Self

recommends
self.inv(),
self.level < NR_LEVELS,
self.in_locked_range(),
{
    if self.index() + 1 < NR_ENTRIES {
        self.inc_index().zero_below_level()
    } else if self.level < NR_LEVELS {
        self.pop_level_owner_spec().0.move_forward_owner_spec()
    } else {
        Self {
            popped_too_high: false,
            ..self
        }
    }
}
Source

pub proof fn move_forward_increases_va(self)

requires
self.inv(),
self.level <= NR_LEVELS,
self.in_locked_range(),
ensures
self.move_forward_owner_spec().va.to_vaddr() > self.va.to_vaddr(),
Source

pub proof fn move_forward_not_popped_too_high(self)

requires
self.inv(),
self.level <= NR_LEVELS,
self.in_locked_range(),
ensures
!self.move_forward_owner_spec().popped_too_high,
Source

pub proof fn move_forward_owner_decreases_steps(self)

requires
self.inv(),
self.level <= NR_LEVELS,
ensures
self.move_forward_owner_spec().max_steps() < self.max_steps(),
Source

pub proof fn move_forward_va_is_align_up(self)

requires
self.inv(),
self.level <= NR_LEVELS,
self.in_locked_range(),
ensures
self.move_forward_owner_spec().va == self.va.align_up(self.level as int),
Source

pub proof fn move_forward_owner_preserves_mappings(self)

requires
self.inv(),
self.level > 1,
ensures
self.move_forward_owner_spec()@.mappings == self@.mappings,
Source§

impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C>

Source

pub open spec fn node_unlocked( guards: Guards<'rcu, C>, ) -> FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>

{
    |owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>| {
        owner.is_node() ==> guards.unlocked(owner.node.unwrap().meta_perm.addr())
    }
}
Source

pub open spec fn node_unlocked_except( guards: Guards<'rcu, C>, addr: usize, ) -> FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>

{
    |owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>| {
        owner.is_node()
            ==> (owner.node.unwrap().meta_perm.addr() != addr
                ==> guards.unlocked(owner.node.unwrap().meta_perm.addr()))
    }
}
Source

pub open spec fn map_full_tree( self, f: FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>, ) -> bool

{
    forall |i: int| {
        self.level - 1 <= i < NR_LEVELS ==> { self.continuations[i].map_children(f) }
    }
}
Source

pub open spec fn map_only_children( self, f: FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>, ) -> bool

{
    forall |i: int| {
        self.level - 1 <= i < NR_LEVELS ==> self.continuations[i].map_children(f)
    }
}
Source

pub open spec fn children_not_locked(self, guards: Guards<'rcu, C>) -> bool

{ self.map_only_children(Self::node_unlocked(guards)) }
Source

pub open spec fn only_current_locked(self, guards: Guards<'rcu, C>) -> bool

{
    self.map_only_children(
        Self::node_unlocked_except(
            guards,
            self.cur_entry_owner().node.unwrap().meta_perm.addr(),
        ),
    )
}
Source

pub proof fn never_drop_restores_children_not_locked( self, guard: PageTableGuard<'rcu, C>, guards0: Guards<'rcu, C>, guards1: Guards<'rcu, C>, )

requires
self.inv(),
self.only_current_locked(guards0),
<PageTableGuard<'rcu, C> as TrackDrop>::constructor_requires(guard, guards0),
<PageTableGuard<'rcu, C> as TrackDrop>::constructor_ensures(guard, guards0, guards1),
ensures
self.children_not_locked(guards1),
Source

pub proof fn map_children_implies( self, f: FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>, g: FnSpec<(EntryOwner<C>, TreePath<NR_ENTRIES>), bool>, )

requires
self.inv(),
OwnerSubtree::implies(f, g),
forall |i: int| self.level - 1 <= i < NR_LEVELS ==> self.continuations[i].map_children(f),
ensures
forall |i: int| self.level - 1 <= i < NR_LEVELS ==> self.continuations[i].map_children(g),
Source

pub open spec fn nodes_locked(self, guards: Guards<'rcu, C>) -> bool

{
    forall |i: int| {
        self.level - 1 <= i < NR_LEVELS ==> { self.continuations[i].node_locked(guards) }
    }
}
Source

pub open spec fn index(self) -> usize

{ self.continuations[self.level - 1].idx }
Source

pub open spec fn inc_index(self) -> Self

{
    Self {
        continuations: self
            .continuations
            .insert(self.level - 1, self.continuations[self.level - 1].inc_index()),
        va: AbstractVaddr {
            offset: self.va.offset,
            index: self
                .va
                .index
                .insert(
                    self.level - 1,
                    self.continuations[self.level - 1].inc_index().idx as int,
                ),
        },
        popped_too_high: false,
        ..self
    }
}
Source

pub open spec fn zero_below_level_rec(self, level: PagingLevel) -> Self

{
    if self.level <= level {
        self
    } else {
        Self {
            va: AbstractVaddr {
                offset: self.va.offset,
                index: self.va.index.insert(level - 1, 0),
            },
            ..self.zero_below_level_rec((level + 1) as u8)
        }
    }
}
Source

pub open spec fn zero_below_level(self) -> Self

{ self.zero_below_level_rec(1u8) }
Source

pub proof fn zero_below_level_rec_preserves_above(self, level: PagingLevel)

ensures
forall |lv: int| {
    lv >= self.level
        ==> self.zero_below_level_rec(level).va.index[lv] == #[trigger] self.va.index[lv]
},
Source

pub proof fn zero_preserves_above(self)

ensures
forall |lv: int| {
    lv >= self.level
        ==> self.zero_below_level().va.index[lv] == #[trigger] self.va.index[lv]
},
Source

pub proof fn do_zero_below_level(tracked &mut self)

requires
old(self).inv(),
ensures
*self == old(self).zero_below_level(),
Source

pub proof fn do_inc_index(tracked &mut self)

requires
old(self).inv(),
old(self).continuations[old(self).level - 1].idx + 1 < NR_ENTRIES,
old(self).in_locked_range(),
ensures
self.inv(),
*self == old(self).inc_index(),
Source

pub proof fn inc_and_zero_increases_va(self)

requires
self.inv(),
ensures
self.inc_index().zero_below_level().va.to_vaddr() > self.va.to_vaddr(),
Source

pub proof fn zero_rec_preserves_all_but_va(self, level: PagingLevel)

ensures
self.zero_below_level_rec(level).level == self.level,
self.zero_below_level_rec(level).continuations == self.continuations,
self.zero_below_level_rec(level).guard_level == self.guard_level,
self.zero_below_level_rec(level).prefix == self.prefix,
self.zero_below_level_rec(level).popped_too_high == self.popped_too_high,
Source

pub proof fn zero_preserves_all_but_va(self)

ensures
self.zero_below_level().level == self.level,
self.zero_below_level().continuations == self.continuations,
self.zero_below_level().guard_level == self.guard_level,
self.zero_below_level().prefix == self.prefix,
self.zero_below_level().popped_too_high == self.popped_too_high,
Source

pub open spec fn cur_va(self) -> Vaddr

{ self.va.to_vaddr() }
Source

pub open spec fn cur_va_range(self) -> Range<AbstractVaddr>

{
    let start = self.va.align_down(self.level as int);
    let end = self.va.align_up(self.level as int);
    Range { start, end }
}
Source

pub proof fn cur_va_range_reflects_view(self)

requires
self.inv(),
ensures
self.cur_va_range().start.reflect(self@.query_range().start),
self.cur_va_range().end.reflect(self@.query_range().end),
Source

pub proof fn cur_va_in_subtree_range(self)

requires
self.inv(),
ensures
vaddr(self.cur_subtree().value.path) <= self.cur_va()
    < vaddr(self.cur_subtree().value.path) + page_size((self.level - 1) as PagingLevel),

The current virtual address falls within the VA range of the current subtree’s path. This follows from the invariant that va.index matches the continuation indices.

Source

pub proof fn cur_subtree_eq_filtered_mappings(self)

requires
self.inv(),
ensures
PageTableOwner(self.cur_subtree())@.mappings
    == self@
        .mappings
        .filter(|m: Mapping| (
            self@.cur_va <= m.va_range.start < self@.cur_va + page_size(self.level)
        )),

The mappings in the current subtree are exactly those mappings whose VA range starts within [cur_va, cur_va + page_size(level)). This connects the page table representation to the cursor view’s remove_subtree operation. At cursor level L, the current entry manages a VA region of size page_size(L).

Source

pub proof fn subtree_va_ranges_disjoint(self, j: int)

requires
self.inv(),
0 <= j < NR_ENTRIES,
j != self.index(),
self.continuations[self.level - 1].children[j] is Some,
ensures
vaddr(self.continuations[self.level - 1].path().push_tail(j as usize))
    + page_size((self.level - 1) as PagingLevel) <= self.cur_va()
    || self.cur_va()
        < vaddr(self.continuations[self.level - 1].path().push_tail(j as usize)),

Subtrees at different indices have disjoint VA ranges.

Source

pub proof fn higher_level_children_disjoint(self, i: int, j: int)

requires
self.inv(),
self.level - 1 < i < NR_LEVELS - 1,
0 <= j < NR_ENTRIES,
j != self.continuations[i].idx,
self.continuations[i].children[j] is Some,
ensures
vaddr(self.continuations[i].path().push_tail(j as usize)) + page_size(i as PagingLevel)
    <= self.cur_va()
    || self.cur_va() < vaddr(self.continuations[i].path().push_tail(j as usize)),

Children of higher-level continuations have VA ranges that don’t include cur_va, because cur_va’s indices at those levels match the path to the current position.

Source

pub proof fn mapping_covering_cur_va_from_cur_subtree(self, m: Mapping)

requires
self.inv(),
self.view_mappings().contains(m),
m.va_range.start <= self.cur_va() < m.va_range.end,
ensures
PageTableOwner(self.cur_subtree()).view_rec(self.cur_subtree().value.path).contains(m),

Any mapping that covers cur_va must come from the current subtree. This follows from the disjointness of VA ranges and the fact that cur_va falls within the current subtree’s VA range.

Source

pub proof fn inv_continuation(self, i: int)

requires
self.inv(),
self.level - 1 <= i <= NR_LEVELS - 1,
ensures
self.continuations.contains_key(i),
self.continuations[i].inv(),
self.continuations[i].children.len() == NR_ENTRIES,
Source

pub open spec fn view_mappings(self) -> Set<Mapping>

{
    Set::new(|m: Mapping| {
        exists |i: int| {
            self.level - 1 <= i < NR_LEVELS - 1
                && self.continuations[i].view_mappings().contains(m)
        }
    })
}
Source

pub proof fn view_mappings_take_lowest(self, new: Self)

requires
self.inv(),
new.continuations == self.continuations.remove(self.level - 1),
ensures
new.view_mappings()
    == self.view_mappings() - self.continuations[self.level - 1].view_mappings(),
Source

pub proof fn view_mappings_put_lowest( self, new: Self, cont: CursorContinuation<'_, C>, )

requires
cont.inv(),
new.inv(),
new.continuations == self.continuations.insert(self.level - 1, cont),
ensures
new.view_mappings() == self.view_mappings() + cont.view_mappings(),
Source

pub open spec fn as_page_table_owner(self) -> PageTableOwner<C>

{
    if self.level == 1 {
        let l1 = self.continuations[0];
        let l2 = self.continuations[1].restore_spec(l1).0;
        let l3 = self.continuations[2].restore_spec(l2).0;
        let l4 = self.continuations[3].restore_spec(l3).0;
        l4.as_page_table_owner()
    } else if self.level == 2 {
        let l2 = self.continuations[1];
        let l3 = self.continuations[2].restore_spec(l2).0;
        let l4 = self.continuations[3].restore_spec(l3).0;
        l4.as_page_table_owner()
    } else if self.level == 3 {
        let l3 = self.continuations[2];
        let l4 = self.continuations[3].restore_spec(l3).0;
        l4.as_page_table_owner()
    } else {
        let l4 = self.continuations[3];
        l4.as_page_table_owner()
    }
}
Source

pub proof fn as_page_table_owner_preserves_view_mappings(self)

requires
self.inv(),
ensures
self.as_page_table_owner().view_rec(self.continuations[3].path()) == self.view_mappings(),
Source

pub open spec fn cur_entry_owner(self) -> EntryOwner<C>

{ self.cur_subtree().value }
Source

pub open spec fn cur_subtree(self) -> OwnerSubtree<C>

{ self.continuations[self.level - 1].children[self.index() as int].unwrap() }
Source

pub open spec fn locked_range(self) -> Range<Vaddr>

{
    let start = self.prefix.align_down(self.guard_level as int).to_vaddr();
    let end = self.prefix.align_up(self.guard_level as int).to_vaddr();
    Range { start, end }
}
Source

pub open spec fn in_locked_range(self) -> bool

{ self.locked_range().start <= self.va.to_vaddr() < self.locked_range().end }
Source

pub open spec fn above_locked_range(self) -> bool

{ self.va.to_vaddr() >= self.locked_range().end }
Source

pub proof fn prefix_in_locked_range(self)

requires
forall |i: int| i >= self.guard_level ==> self.va.index[i] == self.prefix.index[i],
ensures
self.in_locked_range(),
Source

pub proof fn in_locked_range_level_lt_nr_levels(self)

requires
self.inv(),
self.in_locked_range(),
!self.popped_too_high,
ensures
self.level < NR_LEVELS,

When in_locked_range and !popped_too_high, level < guard_level (from inv), hence level < NR_LEVELS.

Source

pub proof fn node_within_locked_range(self, level: PagingLevel)

requires
self.in_locked_range(),
1 <= level < self.guard_level,
self.va.inv(),
ensures
self.locked_range().start
    <= nat_align_down(
        self.va.to_vaddr() as nat,
        page_size((level + 1) as PagingLevel) as nat,
    ) as usize,
nat_align_down(self.va.to_vaddr() as nat, page_size((level + 1) as PagingLevel) as nat)
    as usize + page_size((level + 1) as PagingLevel) <= self.locked_range().end,

If in_locked_range() and level < guard_level, then:

  • va.align_down(page_size(level+1)) >= locked_range().start
  • va.align_down(page_size(level+1)) + page_size(level+1) <= locked_range().end

This follows from the fact that locked_range().start is aligned to page_size(guard_level), and page_size(guard_level) >= page_size(level+1) when guard_level > level. Therefore locked_range().start is also aligned to page_size(level+1).

Source

pub proof fn cur_entry_absent_not_present(self)

requires
self.inv(),
self.cur_entry_owner().is_absent(),
ensures
!self@.present(),

Proves that if the current entry is absent, then there is no mapping at the current virtual address. This follows from the page table structure:

  • cur_va falls within the VA range of cur_subtree()
  • An absent entry contributes no mappings (view_rec returns empty set)
  • Mappings from other subtrees have disjoint VA ranges
Source

pub proof fn cur_entry_frame_present(self)

requires
self.inv(),
self.cur_entry_owner().is_frame(),
ensures
self@.present(),
self@
    .query(
        self.cur_entry_owner().frame.unwrap().mapped_pa,
        self.cur_entry_owner().frame.unwrap().size,
        self.cur_entry_owner().frame.unwrap().prop,
    ),
Source

pub open spec fn relate_region(self, regions: MetaRegionOwners) -> bool

{
    &&& self
        .map_full_tree(|entry_owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>| {
            entry_owner.relate_region(regions)
        })
    &&& self.map_full_tree(PageTableOwner::<C>::path_tracked_pred(regions))

}
Source

pub open spec fn not_in_tree(self, owner: EntryOwner<C>) -> bool

{
    self.map_full_tree(|owner0: EntryOwner<C>, path: TreePath<NR_ENTRIES>| {
        owner0.meta_slot_paddr_neq(owner)
    })
}
Source

pub proof fn absent_not_in_tree(self, owner: EntryOwner<C>)

requires
self.inv(),
owner.is_absent(),
ensures
self.not_in_tree(owner),
Source

pub proof fn relate_region_preserved( self, other: Self, regions0: MetaRegionOwners, regions1: MetaRegionOwners, )

requires
self.inv(),
self.relate_region(regions0),
self.level == other.level,
self.continuations =~= other.continuations,
OwnerSubtree::implies(
    PageTableOwner::<C>::relate_region_pred(regions0),
    PageTableOwner::<C>::relate_region_pred(regions1),
),
OwnerSubtree::implies(
    PageTableOwner::<C>::path_tracked_pred(regions0),
    PageTableOwner::<C>::path_tracked_pred(regions1),
),
ensures
other.relate_region(regions1),
Source

pub open spec fn set_va_spec(self, new_va: AbstractVaddr) -> Self

{ Self { va: new_va, ..self } }
Source

pub proof fn set_va(tracked &mut self, new_va: AbstractVaddr)

requires
forall |i: int| {
    old(self).level - 1 <= i < NR_LEVELS ==> new_va.index[i] == old(self).va.index[i]
},
forall |i: int| {
    old(self).guard_level - 1 <= i < NR_LEVELS
        ==> new_va.index[i] == old(self).prefix.index[i]
},
ensures
*self == old(self).set_va_spec(new_va),

Trait Implementations§

Source§

impl<'rcu, C: PageTableConfig> Inv for CursorOwner<'rcu, C>

Source§

open spec fn inv(self) -> bool

{
    &&& self.va.inv()
    &&& 1 <= self.level <= NR_LEVELS
    &&& self.guard_level <= NR_LEVELS
    &&& self.popped_too_high ==> self.level >= self.guard_level && self.in_locked_range()
    &&& !self.popped_too_high
        ==> self.level < self.guard_level || self.above_locked_range()
    &&& self.continuations[self.level - 1].all_some()
    &&& forall |i: int| {
        self.level <= i < NR_LEVELS
            ==> { (#[trigger] self.continuations[i]).all_but_index_some() }
    }
    &&& self.prefix.inv()
    &&& forall |i: int| i < self.guard_level ==> self.prefix.index[i] == 0
    &&& self.level <= 4
        ==> {
            &&& self.continuations.contains_key(3)
            &&& self.continuations[3].inv()
            &&& self.continuations[3].level() == 4
            &&& self.continuations[3].entry_own.parent_level == 5
            &&& self.va.index[3] == self.continuations[3].idx

        }
    &&& self.level <= 3
        ==> {
            &&& self.continuations.contains_key(2)
            &&& self.continuations[2].inv()
            &&& self.continuations[2].level() == 3
            &&& self.continuations[2].entry_own.parent_level == 4
            &&& self.va.index[2] == self.continuations[2].idx
            &&& self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
                != self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()

        }
    &&& self.level <= 2
        ==> {
            &&& self.continuations.contains_key(1)
            &&& self.continuations[1].inv()
            &&& self.continuations[1].level() == 2
            &&& self.continuations[1].entry_own.parent_level == 3
            &&& self.va.index[1] == self.continuations[1].idx
            &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr()
                != self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
            &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr()
                != self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()

        }
    &&& self.level == 1
        ==> {
            &&& self.continuations.contains_key(0)
            &&& self.continuations[0].inv()
            &&& self.continuations[0].level() == 1
            &&& self.continuations[0].entry_own.parent_level == 2
            &&& self.va.index[0] == self.continuations[0].idx
            &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr()
                != self.continuations[1].guard_perm.value().inner.inner@.ptr.addr()

        }

}
Source§

impl<'rcu, C: PageTableConfig> InvView for CursorOwner<'rcu, C>

Source§

impl<'rcu, C: PageTableConfig> View for CursorOwner<'rcu, C>

Source§

open spec fn view(&self) -> Self::V

{
    CursorView {
        cur_va: self.cur_va(),
        mappings: self.view_mappings(),
        phantom: PhantomData,
    }
}
Source§

type V = CursorView<C>

Auto Trait Implementations§

§

impl<'rcu, C> Freeze for CursorOwner<'rcu, C>

§

impl<'rcu, C> !RefUnwindSafe for CursorOwner<'rcu, C>

§

impl<'rcu, C> Send for CursorOwner<'rcu, C>

§

impl<'rcu, C> Sync for CursorOwner<'rcu, C>

§

impl<'rcu, C> Unpin for CursorOwner<'rcu, C>
where <C as PageTableConfig>::E: Unpin, C: Unpin,

§

impl<'rcu, C> !UnwindSafe for CursorOwner<'rcu, C>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

§

impl<T, VERUS_SPEC__A> FromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: From<T>,

§

fn obeys_from_spec() -> bool

§

fn from_spec(v: T) -> VERUS_SPEC__A

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

§

impl<T, VERUS_SPEC__A> IntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: Into<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> T

§

impl<T, U> IntoSpecImpl<U> for T
where U: From<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> U

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryFromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryFrom<T>,

§

fn obeys_try_from_spec() -> bool

§

fn try_from_spec( v: T, ) -> Result<VERUS_SPEC__A, <VERUS_SPEC__A as TryFrom<T>>::Error>

Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryIntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryInto<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<T, <VERUS_SPEC__A as TryInto<T>>::Error>

§

impl<T, U> TryIntoSpecImpl<U> for T
where U: TryFrom<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<U, <U as TryFrom<T>>::Error>