AbstractVaddr

Struct AbstractVaddr 

Source
pub struct AbstractVaddr {
    pub offset: int,
    pub index: Map<int, int>,
}
Expand description

An abstract representation of a virtual address as a sequence of indices, representing the values of the bit-fields that index into each level of the page table. offset is the lowest 12 bits (the offset into a 4096 byte page) index[0] is the next 9 bits (index into the 512 entries of the first level page table) and so on up to index[NR_LEVELS-1].

Fields§

§offset: int§index: Map<int, int>

Implementations§

Source§

impl AbstractVaddr

Source

pub open spec fn from_vaddr(va: Vaddr) -> Self

{
    AbstractVaddr {
        offset: (va % PAGE_SIZE) as int,
        index: Map::new(
            |i: int| 0 <= i < NR_LEVELS,
            |i: int| ((va / pow2((12 + 9 * i) as nat) as usize) % NR_ENTRIES) as int,
        ),
    }
}

Extract the AbstractVaddr components from a concrete virtual address.

  • offset = lowest 12 bits
  • index[i] = bits (12 + 9i) to (12 + 9(i+1) - 1) for each level
Source

pub proof fn from_vaddr_wf(va: Vaddr)

ensures
AbstractVaddr::from_vaddr(va).inv(),
Source

pub open spec fn to_vaddr(self) -> Vaddr

{ (self.offset + self.to_vaddr_indices(0)) as Vaddr }

Reconstruct the concrete virtual address from the AbstractVaddr components. va = offset + sum(index[i] * 2^(12 + 9*i)) for i in 0..NR_LEVELS

Source

pub open spec fn to_vaddr_indices(self, start: int) -> int

{
    if start >= NR_LEVELS {
        0
    } else {
        self.index[start] * pow2((12 + 9 * start) as nat) as int
            + self.to_vaddr_indices(start + 1)
    }
}

Helper: sum of index[i] * 2^(12 + 9*i) for i in start..NR_LEVELS

Source

pub open spec fn reflect(self, va: Vaddr) -> bool

{ self == Self::from_vaddr(va) }

reflect(self, va) holds when self is the abstract representation of va.

Source

pub broadcast proof fn reflect_prop(self, va: Vaddr)

requires
self.inv(),
self.reflect(va),
ensures
#[trigger] self.to_vaddr() == va,
#[trigger] Self::from_vaddr(va) == self,

If self reflects va, then self.to_vaddr() == va and self == from_vaddr(va). The first ensures requires proving the round-trip property: from_vaddr(va).to_vaddr() == va.

Source

pub proof fn from_vaddr_to_vaddr_roundtrip(va: Vaddr)

ensures
Self::from_vaddr(va).to_vaddr() == va,

Round-trip property: extracting and reconstructing a VA gives back the original.

Source

pub broadcast proof fn reflect_from_vaddr(va: Vaddr)

ensures
#[trigger] Self::from_vaddr(va).reflect(va),
#[trigger] Self::from_vaddr(va).inv(),

from_vaddr(va) reflects va (by definition of reflect).

Source

pub broadcast proof fn reflect_to_vaddr(self)

requires
self.inv(),
ensures
#[trigger] self.reflect(self.to_vaddr()),

If self.inv(), then self reflects self.to_vaddr().

Source

pub proof fn to_vaddr_from_vaddr_roundtrip(abs: Self)

requires
abs.inv(),
ensures
Self::from_vaddr(abs.to_vaddr()) == abs,

Inverse round-trip: to_vaddr then from_vaddr gives back the original AbstractVaddr.

Source

pub broadcast proof fn reflect_eq(self, other: Self, va: Vaddr)

requires
self.reflect(va),
other.reflect(va),
ensures
(self == other),

If two AbstractVaddrs reflect the same va, they are equal.

Source

pub open spec fn align_down(self, level: int) -> Self

{
    if level == 1 {
        AbstractVaddr {
            offset: 0,
            index: self.index,
        }
    } else {
        let tmp = self.align_down(level - 1);
        AbstractVaddr {
            index: tmp.index.insert(level - 2, 0),
            ..tmp
        }
    }
}
Source

pub proof fn align_down_inv(self, level: int)

requires
1 <= level < NR_LEVELS,
self.inv(),
ensures
self.align_down(level).inv(),
forall |i: int| {
    level <= i < NR_LEVELS
        ==> #[trigger] self.index[i - 1] == self.align_down(level).index[i - 1]
},
Source

pub proof fn align_down_to_vaddr_eq_if_upper_indices_eq(self, other: Self, level: int)

requires
1 <= level <= NR_LEVELS,
self.inv(),
other.inv(),
forall |i: int| level - 1 <= i < NR_LEVELS ==> self.index[i] == other.index[i],
ensures
self.align_down(level).to_vaddr() == other.align_down(level).to_vaddr(),

If two AbstractVaddrs share the same indices at levels >= level-1 (i.e., index[level-1] and above), then aligning them down to level gives the same to_vaddr() result. This is because align_down(level) zeroes offset and indices 0 through level-2, so only indices level-1 and above affect the to_vaddr() result.

Source

pub proof fn align_down_concrete(self, level: int)

requires
1 <= level <= NR_LEVELS,
ensures
self
    .align_down(level)
    .reflect(
        nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat)
            as Vaddr,
    ),
Source

pub open spec fn align_up(self, level: int) -> Self

{
    let lower_aligned = self.align_down(level - 1);
    lower_aligned.next_index(level)
}
Source

pub proof fn align_up_concrete(self, level: int)

requires
1 <= level <= NR_LEVELS,
ensures
self
    .align_up(level)
    .reflect(
        nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat)
            as Vaddr,
    ),
Source

pub proof fn align_diff(self, level: int)

requires
1 <= level <= NR_LEVELS,
ensures
nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat)
    == nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat)
        + page_size(level as PagingLevel),
nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) < usize::MAX,
Source

pub open spec fn next_index(self, level: int) -> Self

{
    let index = self.index[level - 1];
    let next_index = index + 1;
    if next_index == NR_ENTRIES && level < NR_LEVELS {
        let next_va = Self {
            offset: self.offset,
            index: self.index.insert(level - 1, 0),
        };
        next_va.next_index(level + 1)
    } else if next_index == NR_ENTRIES && level == NR_LEVELS {
        Self {
            offset: self.offset,
            index: self.index.insert(level - 1, 0),
        }
    } else {
        Self {
            offset: self.offset,
            index: self.index.insert(level - 1, next_index),
        }
    }
}
Source

pub open spec fn wrapped(self, start_level: int, level: int) -> bool

{
    &&& self.next_index(start_level).index[level - 1] == 0
        ==> {
            &&& self.index[level - 1] + 1 == NR_ENTRIES
            &&& if level < NR_LEVELS {
                self.wrapped(start_level, level + 1)
            } else {
                true
            }

        }
    &&& self.next_index(start_level).index[level - 1] != 0
        ==> self.index[level - 1] + 1 < NR_ENTRIES

}
Source

pub proof fn use_wrapped(self, start_level: int, level: int)

requires
1 <= start_level <= level < NR_LEVELS,
self.wrapped(start_level, level),
self.next_index(start_level).index[level - 1] == 0,
ensures
self.index[level - 1] + 1 == NR_ENTRIES,
Source

pub proof fn wrapped_unwrap(self, start_level: int, level: int)

requires
1 <= start_level <= level < NR_LEVELS,
self.wrapped(start_level, level),
self.next_index(start_level).index[level - 1] == 0,
ensures
self.wrapped(start_level, level + 1),
Source

pub proof fn next_index_wrap_condition(self, level: int)

requires
self.inv(),
1 <= level <= NR_LEVELS,
ensures
self.wrapped(level, level),
Source

pub open spec fn compute_vaddr(self) -> Vaddr

{ self.rec_compute_vaddr(0) }

Computes the concrete vaddr from the abstract representation. This matches the structure: index[NR_LEVELS-1] * 2^39 + index[NR_LEVELS-2] * 2^30 + … + index[0] * 2^12 + offset

Source

pub open spec fn rec_compute_vaddr(self, i: int) -> Vaddr

{
    if i >= NR_LEVELS {
        self.offset as Vaddr
    } else {
        let shift = page_size((i + 1) as PagingLevel);
        (self.index[i] * shift + self.rec_compute_vaddr(i + 1)) as Vaddr
    }
}

Helper for computing vaddr recursively from level i upward.

Source

pub open spec fn to_path(self, level: int) -> TreePath<NR_ENTRIES>

recommends
0 <= level < NR_LEVELS,
{ TreePath(self.rec_to_path(NR_LEVELS - 1, level)) }

Extracts a TreePath from this abstract vaddr, from the root down to the given level. The path has length (NR_LEVELS - level), containing indices for paging levels NR_LEVELS..level+1.

  • level=0: full path of length NR_LEVELS with indices for all levels
  • level=3: path of length 1 with just the root index

Path index mapping:

  • path.index(0) = self.index[NR_LEVELS - 1] (root level)
  • path.index(i) = self.index[NR_LEVELS - 1 - i]
  • path.index(NR_LEVELS - level - 1) = self.index[level] (last entry)
Source

pub open spec fn rec_to_path(self, abstract_level: int, bottom_level: int) -> Seq<usize>

{
    if abstract_level < bottom_level {
        seq![]
    } else if abstract_level == bottom_level {
        seq![self.index[abstract_level] as usize]
    } else {
        self.rec_to_path(abstract_level - 1, bottom_level)
            .push(self.index[abstract_level] as usize)
    }
}

Builds the path sequence from abstract_level down to bottom_level (both inclusive). abstract_level and bottom_level refer to the index keys in self.index (0 = lowest level, NR_LEVELS-1 = root). Returns indices in order from highest level (first in seq) to lowest level (last in seq).

Source

pub proof fn to_path_vaddr(self, level: int)

requires
self.inv(),
0 <= level < NR_LEVELS,
ensures
vaddr(self.to_path(level)) == self.align_down(level + 1).compute_vaddr(),

The vaddr of the path from this abstract vaddr equals the aligned vaddr at that level.

Source

pub proof fn to_vaddr_is_compute_vaddr(self)

requires
self.inv(),
ensures
self.to_vaddr() == self.compute_vaddr(),

The concrete to_vaddr() equals the computed vaddr.

Source

pub proof fn index_increment_adds_page_size(self, level: int)

requires
self.inv(),
1 <= level <= NR_LEVELS,
self.index[level - 1] + 1 < NR_ENTRIES,
ensures
(Self {
    index: self.index.insert(level - 1, self.index[level - 1] + 1),
    ..self
})
    .to_vaddr() == self.to_vaddr() + page_size(level as PagingLevel),

Incrementing index[level-1] by 1 increases to_vaddr() by page_size(level).

Source

pub proof fn to_path_len(self, level: int)

requires
0 <= level < NR_LEVELS,
ensures
self.to_path(level).len() == NR_LEVELS - level,

Path extracted from abstract vaddr has correct length.

Source

pub proof fn to_path_inv(self, level: int)

requires
self.inv(),
0 <= level < NR_LEVELS,
ensures
self.to_path(level).inv(),

Path extracted from abstract vaddr has valid indices.

Source§

impl AbstractVaddr

Connection between TreePath’s vaddr and AbstractVaddr

Source

pub proof fn path_matches_vaddr(self, path: TreePath<NR_ENTRIES>)

requires
self.inv(),
path.inv(),
path.len() <= NR_LEVELS,
forall |i: int| 0 <= i < path.len() ==> path.index(i) == self.index[NR_LEVELS - 1 - i],
ensures
vaddr(path)
    == self.align_down((NR_LEVELS - path.len() + 1) as int).compute_vaddr()
        - self.align_down((NR_LEVELS - path.len() + 1) as int).offset,

If a TreePath matches this abstract vaddr’s indices at all levels covered by the path, then vaddr(path) equals the aligned compute_vaddr at the corresponding level.

Source

pub proof fn to_path_index(self, level: int, i: int)

requires
self.inv(),
0 <= level < NR_LEVELS,
0 <= i < NR_LEVELS - level,
ensures
self.to_path(level).index(i) == self.index[NR_LEVELS - 1 - i],

The path index at position i corresponds to the abstract vaddr index at level (NR_LEVELS - 1 - i). This is the key mapping between TreePath ordering and AbstractVaddr index ordering.

Source

pub proof fn to_path_vaddr_concrete(self, level: int)

requires
self.inv(),
0 <= level < NR_LEVELS,
ensures
vaddr(self.to_path(level))
    == nat_align_down(
        self.to_vaddr() as nat,
        page_size((level + 1) as PagingLevel) as nat,
    ) as usize,

Direct connection: vaddr(to_path(level)) equals the aligned concrete vaddr. This combines to_path_vaddr with to_vaddr_is_compute_vaddr.

Source

pub proof fn vaddr_range_from_path(self, level: int)

requires
self.inv(),
0 <= level < NR_LEVELS,
ensures
vaddr(self.to_path(level)) <= self.to_vaddr()
    < vaddr(self.to_path(level)) + page_size((level + 1) as PagingLevel),

Key property: if we have a path that matches cur_va’s indices, then vaddr(path) + page_size(level) bounds the range containing cur_va.

Trait Implementations§

Source§

impl Inv for AbstractVaddr

Source§

open spec fn inv(self) -> bool

{
    &&& 0 <= self.offset < PAGE_SIZE
    &&& forall |i: int| {
        0 <= i < NR_LEVELS
            ==> {
                &&& self.index.contains_key(i)
                &&& 0 <= self.index[i]
                &&& self.index[i] < NR_ENTRIES

            }
    }

}

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

§

impl<T, VERUS_SPEC__A> FromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: From<T>,

§

fn obeys_from_spec() -> bool

§

fn from_spec(v: T) -> VERUS_SPEC__A

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

§

impl<T, VERUS_SPEC__A> IntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: Into<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> T

§

impl<T, U> IntoSpecImpl<U> for T
where U: From<T>,

§

fn obeys_into_spec() -> bool

§

fn into_spec(self) -> U

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryFromSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryFrom<T>,

§

fn obeys_try_from_spec() -> bool

§

fn try_from_spec( v: T, ) -> Result<VERUS_SPEC__A, <VERUS_SPEC__A as TryFrom<T>>::Error>

Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
§

impl<T, VERUS_SPEC__A> TryIntoSpec<T> for VERUS_SPEC__A
where VERUS_SPEC__A: TryInto<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<T, <VERUS_SPEC__A as TryInto<T>>::Error>

§

impl<T, U> TryIntoSpecImpl<U> for T
where U: TryFrom<T>,

§

fn obeys_try_into_spec() -> bool

§

fn try_into_spec(self) -> Result<U, <U as TryFrom<T>>::Error>