pub struct VirtPtr {
pub vaddr: Vaddr,
pub range: Ghost<Range<Vaddr>>,
}Expand description
Specification-level virtual pointer with an associated half-open range.
VirtPtr models a C-like pointer used by verification code:
vaddris the current cursor position;rangeis the allocation-like bounds[start, end).
Most operations require is_valid() (i.e., the current pointer is within
range and not one-past-end), while offset operations additionally require
the computed address to stay inside the same range.
Fields§
§vaddr: VaddrCurrent virtual address represented by this pointer value.
range: Ghost<Range<Vaddr>>Logical bounds of the pointer’s valid object/range.
Implementations§
Source§impl VirtPtr
impl VirtPtr
Sourcepub open spec fn new_spec(vaddr: Vaddr, len: usize) -> Self
pub open spec fn new_spec(vaddr: Vaddr, len: usize) -> Self
{
Self {
vaddr,
range: Ghost(Range {
start: vaddr,
end: (vaddr + len) as usize,
}),
}
}Pure constructor specification for Self::new.
Sourcepub open spec fn is_defined(self) -> bool
pub open spec fn is_defined(self) -> bool
{
&&& self.vaddr != 0
&&& self.range@.start <= self.vaddr <= self.range@.end
}Whether the pointer has a non-null address and consistent bounds.
Sourcepub open spec fn is_valid(self) -> bool
pub open spec fn is_valid(self) -> bool
{
&&& self.is_defined()
&&& self.vaddr < self.range@.end
}Whether dereferencing the current pointer position is allowed.
This excludes the one-past-end position (vaddr == range.end).
Sourcepub exec fn read(self, Tracked(mem): Tracked<&MemView>) -> u8
pub exec fn read(self, Tracked(mem): Tracked<&MemView>) -> u8
mem.addr_transl(self.vaddr) is Some,mem
.memory[mem.addr_transl(self.vaddr).unwrap().0]
.contents[mem.addr_transl(self.vaddr).unwrap().1 as int] is Init,self.is_valid(),Reads one byte from self.vaddr in the provided memory view.
§Verified Properties
§Preconditions
mem.addr_transl(self.vaddr) is Some.- Target byte is initialized:
mem.memory[mem.addr_transl(self.vaddr).unwrap().0].contents[mem.addr_transl(self.vaddr).unwrap().1 as int] is Init. self.is_valid().
§Postconditions
- Returns
mem.read(self.vaddr).value().
Sourcepub exec fn write(self, Tracked(mem): Tracked<&mut MemView>, x: u8)
pub exec fn write(self, Tracked(mem): Tracked<&mut MemView>, x: u8)
old(mem).addr_transl(self.vaddr) is Some,self.is_valid(),ensures*mem == old(mem).write(self.vaddr, x),Writes one byte to self.vaddr in the provided memory view.
§Verified Properties
§Preconditions
old(mem).addr_transl(self.vaddr) is Some.self.is_valid().
§Postconditions
*mem == old(mem).write(self.vaddr, x).
Sourcepub open spec fn add_spec(self, n: usize) -> Self
pub open spec fn add_spec(self, n: usize) -> Self
{
VirtPtr {
vaddr: (self.vaddr + n) as usize,
range: self.range,
}
}Sourcepub exec fn add(&mut self, n: usize)
pub exec fn add(&mut self, n: usize)
0 <= old(self).vaddr + n < usize::MAX,ensures*self == old(self).add_spec(n),Advances the pointer by n bytes.
This operation only updates the cursor; it does not perform memory access.
§Verified Properties
§Preconditions
0 <= old(self).vaddr + n < usize::MAX.
§Postconditions
*self == old(self).add_spec(n).
Sourcepub open spec fn read_offset_spec(self, mem: MemView, n: usize) -> u8
pub open spec fn read_offset_spec(self, mem: MemView, n: usize) -> u8
{ mem.read((self.vaddr + n) as usize).value() }Sourcepub exec fn read_offset(&self, Tracked(mem): Tracked<&MemView>, n: usize) -> u8
pub exec fn read_offset(&self, Tracked(mem): Tracked<&MemView>, n: usize) -> u8
0 < self.vaddr + n < usize::MAX,self.range@.start <= self.vaddr + n < self.range@.end,mem.addr_transl((self.vaddr + n) as usize) is Some,mem
.memory[mem.addr_transl((self.vaddr + n) as usize).unwrap().0]
.contents[mem.addr_transl((self.vaddr + n) as usize).unwrap().1 as int] is Init,Reads from self.vaddr + n without mutating self.
Implemented by creating a temporary pointer and calling Self::read.
When self.vaddr == self.range.start, this behaves like array indexing.
§Verified Properties
§Preconditions
0 < self.vaddr + n < usize::MAX.self.range@.start <= self.vaddr + n < self.range@.end.mem.addr_transl((self.vaddr + n) as usize) is Some.- Target byte at offset is initialized.
§Postconditions
- Returns
self.read_offset_spec(*mem, n).
Sourcepub open spec fn write_offset_spec(self, mem: MemView, n: usize, x: u8) -> MemView
pub open spec fn write_offset_spec(self, mem: MemView, n: usize, x: u8) -> MemView
{ mem.write((self.vaddr + n) as usize, x) }Sourcepub exec fn write_offset(
&self,
verus_tmp_mem: Tracked<&mut MemView>,
n: usize,
x: u8,
)
pub exec fn write_offset( &self, verus_tmp_mem: Tracked<&mut MemView>, n: usize, x: u8, )
self.inv(),self.range@.start <= self.vaddr + n < self.range@.end,old(mem).addr_transl((self.vaddr + n) as usize) is Some,Writes to self.vaddr + n without mutating self.
§Verified Properties
§Preconditions
self.inv().self.range@.start <= self.vaddr + n < self.range@.end.old(mem).addr_transl((self.vaddr + n) as usize) is Some.
Sourcepub open spec fn copy_offset_spec(
src: Self,
dst: Self,
mem_src: MemView,
mem_dst: MemView,
n: usize,
) -> MemView
pub open spec fn copy_offset_spec( src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize, ) -> MemView
{
let x = src.read_offset_spec(mem_src, n);
dst.write_offset_spec(mem_dst, n, x)
}Sourcepub exec fn copy_offset(
src: &Self,
dst: &Self,
verus_tmp_mem_src: Tracked<&MemView>,
verus_tmp_mem_dst: Tracked<&mut MemView>,
n: usize,
)
pub exec fn copy_offset( src: &Self, dst: &Self, verus_tmp_mem_src: Tracked<&MemView>, verus_tmp_mem_dst: Tracked<&mut MemView>, n: usize, )
src.inv(),dst.inv(),src.range@.start <= src.vaddr + n < src.range@.end,mem_src.addr_transl((src.vaddr + n) as usize) is Some,mem_src.memory.contains_key(mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0),mem_src
.memory[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0]
.contents[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().1 as int] is Init,dst.range@.start <= dst.vaddr + n < dst.range@.end,old(mem_dst).addr_transl((dst.vaddr + n) as usize) is Some,ensures*mem_dst == Self::copy_offset_spec(*src, *dst, *mem_src, *old(mem_dst), n),mem_dst.mappings == old(mem_dst).mappings,mem_dst.memory.dom() == old(mem_dst).memory.dom(),Copies one byte from src + n to dst + n.
Source and destination are reasoned about via distinct memory views.
§Verified Properties
§Preconditions
src.inv()anddst.inv().- Source offset and destination offset are both in-range.
- Source offset is mapped and initialized in
mem_src. - Destination offset is mapped in
old(mem_dst).
§Postconditions
*mem_dst == Self::copy_offset_spec(*src, *dst, *mem_src, *old(mem_dst), n).mem_dst.mappings == old(mem_dst).mappings.mem_dst.memory.dom() == old(mem_dst).memory.dom().
Sourcepub open spec fn memcpy_spec(
src: Self,
dst: Self,
mem_src: MemView,
mem_dst: MemView,
n: usize,
) -> MemView
pub open spec fn memcpy_spec( src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize, ) -> MemView
{
if n == 0 {
mem_dst
} else {
let mem_dst_1 = Self::copy_offset_spec(
src,
dst,
mem_src,
mem_dst,
(n - 1) as usize,
);
Self::memcpy_spec(src, dst, mem_src, mem_dst_1, (n - 1) as usize)
}
}Sourcepub exec fn copy_nonoverlapping(
src: &Self,
dst: &Self,
verus_tmp_mem_src: Tracked<&MemView>,
verus_tmp_mem_dst: Tracked<&mut MemView>,
n: usize,
)
pub exec fn copy_nonoverlapping( src: &Self, dst: &Self, verus_tmp_mem_src: Tracked<&MemView>, verus_tmp_mem_dst: Tracked<&mut MemView>, n: usize, )
src.inv(),dst.inv(),src.range@.end <= dst.range@.start || dst.range@.end <= src.range@.start,src.range@.start <= src.vaddr,src.vaddr + n <= src.range@.end,forall |i: usize| {
src.vaddr <= i < src.vaddr + n
==> {
&&& mem_src.addr_transl(i) is Some
&&& mem_src.memory.contains_key(mem_src.addr_transl(i).unwrap().0)
&&& mem_src
.memory[mem_src.addr_transl(i).unwrap().0]
.contents[mem_src.addr_transl(i).unwrap().1 as int] is Init
}
},dst.range@.start <= dst.vaddr,dst.vaddr + n <= dst.range@.end,forall |i: usize| {
dst.vaddr <= i < dst.vaddr + n
==> {
&&& old(mem_dst).addr_transl(i) is Some
}
},ensures*mem_dst == Self::memcpy_spec(*src, *dst, *mem_src, *old(mem_dst), n),mem_dst.mappings == old(mem_dst).mappings,mem_dst.memory.dom() == old(mem_dst).memory.dom(),forall |i: usize| {
dst.vaddr <= i < dst.vaddr + n
==> {
&&& mem_dst.addr_transl(i) is Some
}
},Copies n bytes from src to dst in the given memory view.
The source and destination must not overlap.
copy_nonoverlapping is semantically equivalent to C’s memcpy,
but with the source and destination arguments swapped.
mem points to src’s owned memory regions.
§Verified Properties
§Preconditions
src.inv()anddst.inv().- Source and destination ranges are non-overlapping.
- Source slice
[src.vaddr, src.vaddr + n)is in-range and initialized inmem_src. - Destination slice
[dst.vaddr, dst.vaddr + n)is in-range and mapped inold(mem_dst).
§Postconditions
*mem_dst == Self::memcpy_spec(*src, *dst, *mem_src, *old(mem_dst), n).mem_dst.mappings == old(mem_dst).mappings.mem_dst.memory.dom() == old(mem_dst).memory.dom().- Destination slice remains mapped in
mem_dst.
Sourcepub exec fn from_vaddr(vaddr: usize, len: usize) -> r : Self
pub exec fn from_vaddr(vaddr: usize, len: usize) -> r : Self
vaddr != 0,0 < len <= usize::MAX - vaddr,ensuresr.is_valid(),r.range@.start == vaddr,r.range@.end == (vaddr + len) as usize,Builds a valid VirtPtr from a concrete virtual-address range.
§Verified Properties
§Preconditions
vaddr != 0.0 < len <= usize::MAX - vaddr.
§Postconditions
r.is_valid().r.range@.start == vaddr.r.range@.end == (vaddr + len) as usize.
Sourcepub exec fn split(self, n: usize) -> r : (Self, Self)
pub exec fn split(self, n: usize) -> r : (Self, Self)
self.is_valid(),0 <= n <= self.range@.end - self.range@.start,self.vaddr == self.range@.start,ensuresr.0.range@.start == self.range@.start,r.0.range@.end == self.range@.start + n,r.0.vaddr == self.range@.start,r.1.range@.start == self.range@.start + n,r.1.range@.end == self.range@.end,r.1.vaddr == self.range@.start + n,Executable helper to split the VirtPtr into two contiguous ranges.
This updates ghost ranges to match a split operation over the same region.
§Verified Properties
§Preconditions
self.is_valid().0 <= n <= self.range@.end - self.range@.start.self.vaddr == self.range@.start.
§Postconditions
- Left pointer covers
[self.range@.start, self.range@.start + n). - Right pointer covers
[self.range@.start + n, self.range@.end).