1use vstd::pervasive::arbitrary;
16use vstd::prelude::*;
17
18use vstd::layout;
19use vstd::raw_ptr;
20use vstd::set;
21use vstd::set_lib;
22
23use core::marker::PhantomData;
24use core::ops::Range;
25
26use crate::mm::{Paddr, Vaddr};
27use crate::prelude::Inv;
28use crate::specs::arch::mm::MAX_PADDR;
29use crate::specs::mm::page_table::Mapping;
30
31#[path = "virt_mem_example.rs"]
32mod virt_mem_example;
33
34verus! {
35
36pub struct VirtPtr {
46 pub vaddr: Vaddr,
48 pub range: Ghost<Range<Vaddr>>,
50}
51
52pub ghost struct FrameContents {
58 pub contents: Seq<raw_ptr::MemContents<u8>>,
60 pub size: usize,
62 pub range: Range<Paddr>,
64}
65
66impl Inv for FrameContents {
67 open spec fn inv(self) -> bool {
75 &&& self.contents.len() == self.size@
76 &&& self.size@ == self.range.end - self.range.start
77 &&& self.range.start % self.size == 0
78 &&& self.range.end % self.size == 0
79 &&& self.range.start <= self.range.end < MAX_PADDR
80 }
81}
82
83
84pub tracked struct MemView {
97 pub mappings: Set<Mapping>,
99 pub memory: Map<Paddr, FrameContents>
101}
102
103impl MemView {
104 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
108 let mappings = self.mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
109 if 0 < mappings.len() {
110 let m = mappings.choose(); let off = va - m.va_range.start;
112 Some((m.pa_range.start, off as usize))
113 } else {
114 None
115 }
116 }
117
118 pub open spec fn read(self, va: usize) -> raw_ptr::MemContents<u8> {
122 let (pa, off) = self.addr_transl(va)->Some_0;
123 self.memory[pa].contents[off as int]
124 }
125
126 pub open spec fn write(self, va: usize, x: u8) -> Self {
130 let (pa, off) = self.addr_transl(va)->Some_0;
131 MemView {
132 memory: self.memory.insert(pa, FrameContents {
133 contents: self.memory[pa].contents.update(off as int, raw_ptr::MemContents::Init(x)),
134 ..self.memory[pa]
135 }),
136 ..self
137 }
138 }
139
140 pub open spec fn eq_at(self, va1: usize, va2: usize) -> bool {
142 let (pa1, off1) = self.addr_transl(va1)->Some_0;
143 let (pa2, off2) = self.addr_transl(va2)->Some_0;
144 self.memory[pa1].contents[off1 as int] == self.memory[pa2].contents[off2 as int]
145 }
146
147 pub open spec fn is_mapped(self, va: usize, pa: usize) -> bool {
149 self.addr_transl(va) is Some && self.addr_transl(va)->Some_0.0 == pa
150 }
151
152 pub open spec fn borrow_at_spec(&self, vaddr: usize, len: usize) -> MemView {
157 let range_end = vaddr + len;
158
159 let valid_pas = Set::new(
160 |pa: usize|
161 exists |va: usize|
162 vaddr <= va < range_end && #[trigger] self.is_mapped(va, pa),
163 );
164
165 MemView {
166 mappings: self.mappings.filter(
167 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr,
168 ),
169 memory: self.memory.restrict(valid_pas),
170 }
171 }
172
173 pub open spec fn mappings_are_disjoint(self) -> bool {
175 forall|m1: Mapping, m2: Mapping|
176 #![trigger self.mappings.contains(m1), self.mappings.contains(m2)]
177 self.mappings.contains(m1) && self.mappings.contains(m2) && m1 != m2 ==> {
178 m1.va_range.end <= m2.va_range.start || m2.va_range.end <= m1.va_range.start
179 }
180 }
181
182 pub open spec fn split_spec(self, vaddr: usize, len: usize) -> (MemView, MemView) {
188 let split_end = vaddr + len;
189
190 let left_mappings = self.mappings.filter(
192 |m: Mapping| m.va_range.start < split_end && m.va_range.end > vaddr,
193 );
194 let right_mappings = self.mappings.filter(|m: Mapping| m.va_range.end > split_end);
195
196 let left_pas = Set::new(
197 |pa: usize|
198 exists|va: usize| vaddr <= va < split_end && self.is_mapped(va, pa),
199 );
200 let right_pas = Set::new(
201 |pa: usize| exists|va: usize| va >= split_end && self.is_mapped(va, pa),
202 );
203
204 (
205 MemView { mappings: left_mappings, memory: self.memory.restrict(left_pas) },
206 MemView { mappings: right_mappings, memory: self.memory.restrict(right_pas) },
207 )
208 }
209
210 #[verifier::external_body]
217 pub proof fn borrow_at(tracked &self, vaddr: usize, len: usize) -> (tracked r: &MemView)
218 ensures
219 r == self.borrow_at_spec(vaddr, len),
220 {
221 unimplemented!()
222 }
223
224
225 #[verifier::external_body]
232 pub proof fn split(tracked self, vaddr: usize, len: usize) -> (tracked r: (Self, Self))
233 ensures
234 r == self.split_spec(vaddr, len),
235 {
236 unimplemented!()
237 }
238
239 pub proof fn lemma_split_preserves_transl(
253 original: MemView,
254 vaddr: usize,
255 len: usize,
256 left: MemView,
257 right: MemView,
258 )
259 requires
260 original.split_spec(vaddr, len) == (left, right),
261 ensures
262 right.memory.dom().subset_of(original.memory.dom()),
263 forall|va: usize|
264 vaddr <= va < vaddr + len ==> {
265 #[trigger] original.addr_transl(va) == left.addr_transl(va)
266 },
267 forall|va: usize|
268 va >= vaddr + len ==> {
269 #[trigger] original.addr_transl(va) == right.addr_transl(va)
270 },
271 {
272 assert(right.memory.dom().subset_of(original.memory.dom()));
273
274 assert forall|va: usize| vaddr <= va < vaddr + len implies original.addr_transl(va)
275 == left.addr_transl(va) by {
276 assert(left.mappings =~= original.mappings.filter(
277 |m: Mapping| m.va_range.start < vaddr + len && m.va_range.end > vaddr,
278 ));
279 let o_mappings = original.mappings.filter(
280 |m: Mapping| m.va_range.start <= va < m.va_range.end,
281 );
282 let l_mappings = left.mappings.filter(
283 |m: Mapping| m.va_range.start <= va < m.va_range.end,
284 );
285
286 assert(l_mappings <= o_mappings);
287 assert forall|m: Mapping| #[trigger]
288 o_mappings.contains(m) implies l_mappings.contains(m) by {
289 assert(left.mappings.contains(m));
290 };
291 assert(o_mappings <= l_mappings);
292 assert(o_mappings == l_mappings);
293 }
294
295 assert forall|va: usize| va >= vaddr + len implies original.addr_transl(va)
296 == right.addr_transl(va) by {
297 let split_end = vaddr + len;
298
299 let o_mappings = original.mappings.filter(
300 |m: Mapping| m.va_range.start <= va < m.va_range.end,
301 );
302 let r_mappings = right.mappings.filter(
303 |m: Mapping| m.va_range.start <= va < m.va_range.end,
304 );
305
306 assert forall|m: Mapping| o_mappings.contains(m) implies r_mappings.contains(m) by {
307 assert(right.mappings.contains(m));
308 assert(r_mappings.contains(m));
309 }
310
311 assert(o_mappings == r_mappings);
312 }
313 }
314
315 pub open spec fn join_spec(self, other: MemView) -> MemView {
320 MemView {
321 mappings: self.mappings.union(other.mappings),
322 memory: self.memory.union_prefer_right(other.memory),
323 }
324 }
325
326 #[verifier::external_body]
336 pub proof fn join(tracked &mut self, tracked other: Self)
337 requires
338 old(self).mappings.disjoint(other.mappings),
339 ensures
340 *final(self) == old(self).join_spec(other),
341 {
342 unimplemented!()
343 }
344
345 pub proof fn lemma_split_join_identity(
358 this: MemView,
359 lhs: MemView,
360 rhs: MemView,
361 vaddr: usize,
362 len: usize,
363 )
364 requires
365 this.split_spec(vaddr, len) == (lhs, rhs),
366 forall|m: Mapping|
367 #[trigger] this.mappings.contains(m) ==> vaddr <= m.va_range.start < m.va_range.end,
368 forall|pa: Paddr|
369 #[trigger] this.memory.contains_key(pa) ==> exists |va: usize|
370 vaddr <= va && #[trigger] this.is_mapped(va, pa),
371 ensures
372 this.mappings == lhs.join_spec(rhs).mappings,
373 this.memory == lhs.join_spec(rhs).memory,
374 {
375 }
376}
377
378impl Inv for VirtPtr {
379 open spec fn inv(self) -> bool {
380 &&& self.range@.start <= self.vaddr <= self.range@.end
381 &&& self.range@.end >= self.range@.start
382 }
383}
384
385impl Clone for VirtPtr {
386 fn clone(&self) -> (res: Self)
387 ensures
388 res == self,
389 {
390 Self { vaddr: self.vaddr, range: Ghost(self.range@) }
391 }
392}
393
394impl Copy for VirtPtr {
395
396}
397
398impl VirtPtr {
399 #[vstd::contrib::auto_spec]
401 pub fn new(vaddr: Vaddr, len: usize) -> Self
402 {
403 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
404 }
405
406 pub open spec fn is_defined(self) -> bool {
408 &&& self.vaddr != 0
409 &&& self.range@.start <= self.vaddr <= self.range@.end
410 }
411
412 pub open spec fn is_valid(self) -> bool {
416 &&& self.is_defined()
417 &&& self.vaddr < self.range@.end
418 }
419
420 #[verifier::external_body]
433 pub fn read(self, Tracked(mem): Tracked<&MemView>) -> u8
434 requires
435 mem.addr_transl(self.vaddr) is Some,
436 mem.memory[mem.addr_transl(self.vaddr).unwrap().0].contents[mem.addr_transl(self.vaddr).unwrap().1 as int] is Init,
437 self.is_valid(),
438 returns
439 mem.read(self.vaddr).value(),
440 {
441 unimplemented!()
442 }
443
444 #[verifier::external_body]
455 pub fn write(self, Tracked(mem): Tracked<&mut MemView>, x: u8)
456 requires
457 old(mem).addr_transl(self.vaddr) is Some,
458 self.is_valid(),
459 ensures
460 *final(mem) == old(mem).write(self.vaddr, x),
461 {
462 unimplemented!()
463 }
464
465 pub open spec fn add_spec(self, n: usize) -> Self {
466 VirtPtr { vaddr: (self.vaddr + n) as usize, range: self.range }
467 }
468
469 pub fn add(&mut self, n: usize)
481 requires
482 0 <= old(self).vaddr + n < usize::MAX,
487 ensures
488 *final(self) == old(self).add_spec(n),
489 {
493 self.vaddr = self.vaddr + n
494 }
495
496 pub open spec fn wrapping_add_spec(self, n: usize) -> Self {
497 VirtPtr { vaddr: self.vaddr.wrapping_add(n), range: self.range }
498 }
499
500 pub fn wrapping_add(self, n: usize) -> (res: Self)
503 returns self.wrapping_add_spec(n),
504 {
505 VirtPtr { vaddr: self.vaddr.wrapping_add(n), range: self.range }
506 }
507
508 pub open spec fn read_offset_spec(self, mem: MemView, n: usize) -> u8 {
509 mem.read((self.vaddr + n) as usize).value()
510 }
511
512 pub fn read_offset(&self, Tracked(mem): Tracked<&MemView>, n: usize) -> u8
528 requires
529 0 < self.vaddr + n < usize::MAX,
530 self.range@.start <= self.vaddr + n < self.range@.end,
531 mem.addr_transl((self.vaddr + n) as usize) is Some,
532 mem.memory[mem.addr_transl((self.vaddr + n) as usize).unwrap().0].contents[mem.addr_transl((self.vaddr + n) as usize).unwrap().1 as int] is Init,
533 returns
534 self.read_offset_spec(*mem, n),
535 {
536 let mut tmp = self.clone();
537 tmp.add(n);
538 tmp.read(Tracked(mem))
539 }
540
541 pub open spec fn write_offset_spec(self, mem: MemView, n: usize, x: u8) -> MemView {
542 mem.write((self.vaddr + n) as usize, x)
543 }
544
545 pub fn write_offset(&self, Tracked(mem): Tracked<&mut MemView>, n: usize, x: u8)
554 requires
555 self.inv(),
556 0 < self.vaddr + n < usize::MAX,
557 self.range@.start <= self.vaddr + n < self.range@.end,
558 old(mem).addr_transl((self.vaddr + n) as usize) is Some,
559 {
560 let mut tmp = self.clone();
561 tmp.add(n);
562 tmp.write(Tracked(mem), x)
563 }
564
565 pub open spec fn copy_offset_spec(src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize) -> MemView {
566 let x = src.read_offset_spec(mem_src, n);
567 dst.write_offset_spec(mem_dst, n, x)
568 }
569
570 pub fn copy_offset(src: &Self, dst: &Self, Tracked(mem_src): Tracked<&MemView>, Tracked(mem_dst): Tracked<&mut MemView>, n: usize)
587 requires
588 src.inv(),
589 dst.inv(),
590 0 < src.vaddr + n < usize::MAX,
591 0 < dst.vaddr + n < usize::MAX,
592 src.range@.start <= src.vaddr + n < src.range@.end,
593 mem_src.addr_transl((src.vaddr + n) as usize) is Some,
594 mem_src.memory.contains_key(mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0),
595 mem_src.memory[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0].contents[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().1 as int] is Init,
596
597 dst.range@.start <= dst.vaddr + n < dst.range@.end,
598 old(mem_dst).addr_transl((dst.vaddr + n) as usize) is Some,
599 ensures
600 *final(mem_dst) == Self::copy_offset_spec(*src, *dst, *mem_src, *old(mem_dst), n),
601 final(mem_dst).mappings == old(mem_dst).mappings,
602 final(mem_dst).memory.dom() == old(mem_dst).memory.dom(),
603
604 {
605 let x = src.read_offset(Tracked(mem_src), n);
606 proof { admit() };
607 dst.write_offset(Tracked(mem_dst), n, x)
608 }
609
610 pub open spec fn memcpy_spec(src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize) -> MemView
611 decreases n,
612 {
613 if n == 0 {
614 mem_dst
615 } else {
616 let mem_dst_1 = Self::copy_offset_spec(src, dst, mem_src, mem_dst, (n - 1) as usize);
617
618 Self::memcpy_spec(src, dst, mem_src, mem_dst_1, (n - 1) as usize)
619 }
620 }
621
622 pub fn copy_nonoverlapping(
644 src: &Self,
645 dst: &Self,
646 Tracked(mem_src): Tracked<&MemView>,
647 Tracked(mem_dst): Tracked<&mut MemView>,
648 n: usize,
649 )
650 requires
651 src.inv(),
652 dst.inv(),
653 src.vaddr > 0,
654 dst.vaddr > 0,
655 src.range@.end <= dst.range@.start || dst.range@.end <= src.range@.start,
656 src.range@.start <= src.vaddr,
657 src.vaddr + n <= src.range@.end,
658 forall|i: usize|
659 #![trigger mem_src.addr_transl(i)]
660 src.vaddr <= i < src.vaddr + n ==> {
661 &&& mem_src.addr_transl(i) is Some
662 &&& mem_src.memory.contains_key(mem_src.addr_transl(i).unwrap().0)
663 &&& mem_src.memory[mem_src.addr_transl(i).unwrap().0].contents[mem_src.addr_transl(i).unwrap().1 as int] is Init
664 },
665 dst.range@.start <= dst.vaddr,
666 dst.vaddr + n <= dst.range@.end,
667 forall|i: usize|
668 dst.vaddr <= i < dst.vaddr + n ==> {
669 &&& old(mem_dst).addr_transl(i) is Some
670 },
671 ensures
672 *final(mem_dst) == Self::memcpy_spec(*src, *dst, *mem_src, *old(mem_dst), n),
673 final(mem_dst).mappings == old(mem_dst).mappings,
674 final(mem_dst).memory.dom() == old(mem_dst).memory.dom(),
675 forall|i: usize|
676 #![trigger final(mem_dst).addr_transl(i)]
677 dst.vaddr <= i < dst.vaddr + n ==> {
678 &&& final(mem_dst).addr_transl(i) is Some
679 },
680 decreases n,
681 {
682 if n == 0 {
683 return ;
684 } else {
685 let ghost mem0 = *mem_dst;
686
687 Self::copy_offset(src, dst, Tracked(mem_src), Tracked(mem_dst), n - 1);
688
689 proof {
690 assert(forall|i: usize|
691 dst.vaddr <= i < dst.vaddr + n - 1 ==>
692 mem_dst.addr_transl(i) == #[trigger] mem0.addr_transl(i)
693 );
694 assert forall|i: usize|
695 dst.vaddr <= i < dst.vaddr + n - 1 implies mem_dst.addr_transl(i) is Some by {
696 assert(mem_dst.addr_transl(i) == mem0.addr_transl(i));
697 }
698 }
699
700 Self::copy_nonoverlapping(src, dst, Tracked(mem_src), Tracked(mem_dst), n - 1);
701 }
702 }
703
704 pub fn from_vaddr(vaddr: usize, len: usize) -> (r: Self)
717 requires
718 len <= usize::MAX - vaddr,
719 ensures
720 r.inv(),
721 r.vaddr == vaddr,
722 r.range@.start == vaddr,
723 r.range@.end == (vaddr + len) as usize,
724 vaddr != 0 && len > 0 ==> r.is_valid(),
725 {
726 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
727 }
728
729 #[verus_spec(r =>
744 requires
745 self.is_valid(),
746 0 <= n <= self.range@.end - self.range@.start,
747 self.vaddr == self.range@.start,
748 ensures
749 r.0.range@.start == self.range@.start,
750 r.0.range@.end == self.range@.start + n,
751 r.0.vaddr == self.range@.start,
752 r.1.range@.start == self.range@.start + n,
753 r.1.range@.end == self.range@.end,
754 r.1.vaddr == self.range@.start + n,
755 )]
756 pub fn split(self, n: usize) -> (Self, Self) {
757 let left = VirtPtr {
758 vaddr: self.vaddr,
759 range: Ghost(Range { start: self.vaddr, end: (self.vaddr + n) as usize }),
760 };
761
762 let right = VirtPtr {
763 vaddr: self.vaddr + n,
764 range: Ghost(Range { start: (self.vaddr + n) as usize, end: self.range@.end }),
765 };
766
767 (left, right)
768 }
769}
770
771pub tracked struct GlobalMemView {
774 pub pt_mappings: Set<Mapping>,
775 pub tlb_mappings: Set<Mapping>,
776 pub unmapped_pas: Set<Paddr>,
777 pub memory: Map<Paddr, FrameContents>,
778}
779
780impl Inv for GlobalMemView {
781
782 open spec fn inv(self) -> bool {
783 &&& forall |m: Mapping| #![auto] self.tlb_mappings.contains(m) ==> {
784 &&& m.inv()
785 &&& forall|pa: Paddr| m.pa_range.start <= pa < m.pa_range.end ==> {
786 &&& self.memory.dom().contains(pa)
787 }
788 &&& self.memory.contains_key(m.pa_range.start)
789 &&& self.memory[m.pa_range.start].size == m.page_size
790 &&& self.memory[m.pa_range.start].inv()
791 }
792 &&& forall |m: Mapping|
793 forall |n: Mapping| #![auto]
794 self.tlb_mappings.contains(m) ==>
795 self.tlb_mappings.contains(n) ==>
796 m != n ==>
797 #[trigger]
798 m.va_range.end <= n.va_range.start || n.va_range.end <= m.va_range.start
799 &&& self.tlb_mappings.finite()
800 &&& self.pt_mappings.finite()
801 &&& self.memory.dom().finite()
802 &&& self.all_pas_accounted_for()
803 &&& self.pas_uniquely_mapped()
804 &&& self.unmapped_correct()
805 }
806}
807
808impl GlobalMemView {
809
810 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
811 let mappings = self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
812 if 0 < mappings.len() {
813 let m = mappings.choose(); let off = va - m.va_range.start;
815 Some((m.pa_range.start, off as usize))
816 } else {
817 None
818 }
819 }
820
821 pub open spec fn is_mapped(self, pa: usize) -> bool {
822 exists|m: Mapping| self.tlb_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
823 }
824
825 pub open spec fn all_pas_accounted_for(self) -> bool {
826 forall|pa: Paddr|
827 0 <= pa < MAX_PADDR ==>
828 #[trigger] self.is_mapped(pa) || #[trigger] self.unmapped_pas.contains(pa)
829 }
830
831 pub open spec fn pas_uniquely_mapped(self) -> bool {
832 forall|m1: Mapping, m2: Mapping|
833 #![trigger self.tlb_mappings.contains(m1), self.tlb_mappings.contains(m2)]
834 self.tlb_mappings.contains(m1) && self.tlb_mappings.contains(m2) && m1 != m2 ==>
835 m1.pa_range.end <= m2.pa_range.start || m2.pa_range.end <= m1.pa_range.start
836 }
837
838 pub open spec fn unmapped_correct(self) -> bool {
839 forall|pa: Paddr|
840 #![auto]
841 self.is_mapped(pa) <==> !self.unmapped_pas.contains(pa)
842 }
843
844 pub open spec fn take_view_spec(self, vaddr: usize, len: usize) -> (Self, MemView) {
845 let range_end = vaddr + len;
846
847 let leave_mappings: Set<Mapping> = self.tlb_mappings.filter(
848 |m: Mapping| m.va_range.end <= vaddr || m.va_range.start > range_end
849 );
850
851 let take_mappings = self.tlb_mappings.filter(
852 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr
853 );
854
855 let leave_pas = Set::new(
856 |pa: usize|
857 exists|m: Mapping| leave_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
858 );
859 let take_pas = Set::new(
860 |pa: usize|
861 exists|m: Mapping| take_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
862 );
863
864 (
865 GlobalMemView {
866 tlb_mappings: leave_mappings,
867 memory: self.memory.restrict(leave_pas),
868 ..self
869 },
870 MemView { mappings: take_mappings, memory: self.memory.restrict(take_pas) },
871 )
872 }
873
874 pub axiom fn take_view(tracked &mut self, vaddr: usize, len: usize) -> (tracked view: MemView)
875 ensures
876 *final(self) == old(self).take_view_spec(vaddr, len).0,
877 view == old(self).take_view_spec(vaddr, len).1;
878
879 pub open spec fn return_view_spec(self, view: MemView) -> Self {
880 GlobalMemView {
881 tlb_mappings: self.tlb_mappings.union(view.mappings),
882 memory: self.memory.union_prefer_right(view.memory),
883 ..self
884 }
885 }
886
887 pub axiom fn return_view(&mut self, view: MemView)
888 ensures
889 *final(self) == old(self).return_view_spec(view);
890
891 pub open spec fn tlb_flush_vaddr_spec(self, vaddr: Vaddr) -> Self {
892 let tlb_mappings = self.tlb_mappings.filter(
893 |m: Mapping| m.va_range.end <= vaddr || vaddr < m.va_range.start
894 );
895 GlobalMemView {
896 tlb_mappings,
897 ..self
898 }
899 }
900
901 pub axiom fn tlb_flush_vaddr(&mut self, vaddr: Vaddr)
902 requires
903 old(self).inv()
904 ensures
905 *final(self) == old(self).tlb_flush_vaddr_spec(vaddr),
906 final(self).inv();
907
908 pub open spec fn tlb_soft_fault_spec(self, vaddr: Vaddr) -> Self {
909 let mapping = self.pt_mappings.filter(|m: Mapping| m.va_range.start <= vaddr < m.va_range.end).choose();
910 GlobalMemView {
911 tlb_mappings: self.tlb_mappings.insert(mapping),
912 ..self
913 }
914 }
915
916 pub axiom fn tlb_soft_fault(tracked &mut self, vaddr: Vaddr)
917 requires
918 old(self).inv(),
919 old(self).addr_transl(vaddr) is None,
920 ensures
921 *final(self) == old(self).tlb_soft_fault_spec(vaddr),
922 final(self).inv();
923
924 pub open spec fn pt_map_spec(self, m: Mapping) -> Self {
925 let pt_mappings = self.pt_mappings.insert(m);
926 let unmapped_pas = self.unmapped_pas.difference(
927 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
928 );
929 GlobalMemView {
930 pt_mappings,
931 unmapped_pas,
932 ..self
933 }
934 }
935
936 pub axiom fn pt_map(&mut self, m: Mapping)
937 requires
938 forall|pa: Paddr|
939 m.pa_range.start <= pa < m.pa_range.end ==>
940 old(self).unmapped_pas.contains(pa),
941 old(self).inv()
942 ensures
943 *final(self) == old(self).pt_map_spec(m);
944
945 pub open spec fn pt_unmap_spec(self, m: Mapping) -> Self {
946 let pt_mappings = self.pt_mappings.remove(m);
947 let unmapped_pas = self.unmapped_pas.union(
948 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
949 );
950 GlobalMemView {
951 pt_mappings,
952 unmapped_pas,
953 ..self
954 }
955 }
956
957 pub axiom fn pt_unmap(&mut self, m: Mapping)
958 requires
959 old(self).pt_mappings.contains(m),
960 old(self).inv()
961 ensures
962 *final(self) == old(self).pt_unmap_spec(m),
963 final(self).inv();
964
965 pub proof fn lemma_va_mapping_unique(self, va: usize)
966 requires
967 self.inv(),
968 ensures
969 self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end).is_singleton(),
970 {
971 admit()
972 }
973}
974
975}