1use vstd::pervasive::arbitrary;
16use vstd::prelude::*;
17
18use vstd::layout;
19use vstd::raw_ptr;
20use vstd::set;
21use vstd::set_lib;
22
23use core::marker::PhantomData;
24use core::ops::Range;
25
26use crate::mm::{Paddr, Vaddr};
27use crate::prelude::Inv;
28use crate::specs::arch::mm::MAX_PADDR;
29use crate::specs::mm::page_table::Mapping;
30
31#[path = "virt_mem_example.rs"]
32mod virt_mem_example;
33
34verus! {
35
36pub struct VirtPtr {
46 pub vaddr: Vaddr,
48 pub ghost range: Ghost<Range<Vaddr>>,
50}
51
52pub struct FrameContents {
58 pub contents: Seq<raw_ptr::MemContents<u8>>,
60 pub ghost size: Ghost<usize>,
62 pub ghost range: Ghost<Range<Paddr>>,
64}
65
66impl Inv for FrameContents {
67 open spec fn inv(self) -> bool {
75 &&& self.contents.len() == self.size@
76 &&& self.size@ == self.range@.end - self.range@.start
77 &&& self.range@.start % self.size@ == 0
78 &&& self.range@.end % self.size@ == 0
79 &&& self.range@.start <= self.range@.end < MAX_PADDR
80 }
81}
82
83
84pub tracked struct MemView {
97 pub mappings: Set<Mapping>,
99 pub memory: Map<Paddr, FrameContents>
101}
102
103impl MemView {
104 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
108 let mappings = self.mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
109 if 0 < mappings.len() {
110 let m = mappings.choose(); let off = va - m.va_range.start;
112 Some((m.pa_range.start, off as usize))
113 } else {
114 None
115 }
116 }
117
118 pub open spec fn read(self, va: usize) -> raw_ptr::MemContents<u8> {
122 let (pa, off) = self.addr_transl(va).unwrap();
123 self.memory[pa].contents[off as int]
124 }
125
126 pub open spec fn write(self, va: usize, x: u8) -> Self {
130 let (pa, off) = self.addr_transl(va).unwrap();
131 MemView {
132 memory: self.memory.insert(pa, FrameContents {
133 contents: self.memory[pa].contents.update(off as int, raw_ptr::MemContents::Init(x)),
134 size: self.memory[pa].size,
135 range: self.memory[pa].range,
136 }),
137 ..self
138 }
139 }
140
141 pub open spec fn eq_at(self, va1: usize, va2: usize) -> bool {
143 let (pa1, off1) = self.addr_transl(va1).unwrap();
144 let (pa2, off2) = self.addr_transl(va2).unwrap();
145 self.memory[pa1].contents[off1 as int] == self.memory[pa2].contents[off2 as int]
146 }
147
148 pub open spec fn is_mapped(self, va: usize, pa: usize) -> bool {
150 self.addr_transl(va) is Some && self.addr_transl(va).unwrap().0 == pa
151 }
152
153 pub open spec fn borrow_at_spec(&self, vaddr: usize, len: usize) -> MemView {
158 let range_end = vaddr + len;
159
160 let valid_pas = Set::new(
161 |pa: usize|
162 exists|va: usize|
163 vaddr <= va < range_end && #[trigger] self.is_mapped(va, pa),
164 );
165
166 MemView {
167 mappings: self.mappings.filter(
168 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr,
169 ),
170 memory: self.memory.restrict(valid_pas),
171 }
172 }
173
174 pub open spec fn mappings_are_disjoint(self) -> bool {
176 forall|m1: Mapping, m2: Mapping|
177 #![trigger self.mappings.contains(m1), self.mappings.contains(m2)]
178 self.mappings.contains(m1) && self.mappings.contains(m2) && m1 != m2 ==> {
179 m1.va_range.end <= m2.va_range.start || m2.va_range.end <= m1.va_range.start
180 }
181 }
182
183 pub open spec fn split_spec(self, vaddr: usize, len: usize) -> (MemView, MemView) {
189 let split_end = vaddr + len;
190
191 let left_mappings = self.mappings.filter(
193 |m: Mapping| m.va_range.start < split_end && m.va_range.end > vaddr,
194 );
195 let right_mappings = self.mappings.filter(|m: Mapping| m.va_range.end > split_end);
196
197 let left_pas = Set::new(
198 |pa: usize|
199 exists|va: usize| vaddr <= va < split_end && self.is_mapped(va, pa),
200 );
201 let right_pas = Set::new(
202 |pa: usize| exists|va: usize| va >= split_end && self.is_mapped(va, pa),
203 );
204
205 (
206 MemView { mappings: left_mappings, memory: self.memory.restrict(left_pas) },
207 MemView { mappings: right_mappings, memory: self.memory.restrict(right_pas) },
208 )
209 }
210
211 #[verifier::external_body]
218 pub proof fn borrow_at(tracked &self, vaddr: usize, len: usize) -> (tracked r: &MemView)
219 ensures
220 r == self.borrow_at_spec(vaddr, len),
221 {
222 unimplemented!()
223 }
224
225
226 #[verifier::external_body]
233 pub proof fn split(tracked self, vaddr: usize, len: usize) -> (tracked r: (Self, Self))
234 ensures
235 r == self.split_spec(vaddr, len),
236 {
237 unimplemented!()
238 }
239
240 pub proof fn lemma_split_preserves_transl(
254 original: MemView,
255 vaddr: usize,
256 len: usize,
257 left: MemView,
258 right: MemView,
259 )
260 requires
261 original.split_spec(vaddr, len) == (left, right),
262 ensures
263 right.memory.dom().subset_of(original.memory.dom()),
264 forall|va: usize|
265 vaddr <= va < vaddr + len ==> {
266 #[trigger] original.addr_transl(va) == left.addr_transl(va)
267 },
268 forall|va: usize|
269 va >= vaddr + len ==> {
270 #[trigger] original.addr_transl(va) == right.addr_transl(va)
271 },
272 {
273 assert(right.memory.dom().subset_of(original.memory.dom()));
275
276 assert forall|va: usize| vaddr <= va < vaddr + len implies original.addr_transl(va)
277 == left.addr_transl(va) by {
278 assert(left.mappings =~= original.mappings.filter(
279 |m: Mapping| m.va_range.start < vaddr + len && m.va_range.end > vaddr,
280 ));
281 let o_mappings = original.mappings.filter(
282 |m: Mapping| m.va_range.start <= va < m.va_range.end,
283 );
284 let l_mappings = left.mappings.filter(
285 |m: Mapping| m.va_range.start <= va < m.va_range.end,
286 );
287
288 assert(l_mappings.subset_of(o_mappings));
289 assert(o_mappings.subset_of(l_mappings)) by {
290 assert forall|m: Mapping| #[trigger]
291 o_mappings.contains(m) implies l_mappings.contains(m) by {
292 assume(o_mappings.contains(m));
293 assert(m.va_range.start < vaddr + len);
294 assert(m.va_range.end > vaddr);
295 assert(m.va_range.start <= va < m.va_range.end);
296 assert(left.mappings.contains(m));
297 }
298 };
299
300 assert(o_mappings =~= l_mappings);
301 }
302
303 assert forall|va: usize| va >= vaddr + len implies original.addr_transl(va)
304 == right.addr_transl(va) by {
305 let split_end = vaddr + len;
306
307 let o_mappings = original.mappings.filter(
308 |m: Mapping| m.va_range.start <= va < m.va_range.end,
309 );
310 let r_mappings = right.mappings.filter(
311 |m: Mapping| m.va_range.start <= va < m.va_range.end,
312 );
313
314 assert forall|m: Mapping| o_mappings.contains(m) implies r_mappings.contains(m) by {
315 assert(m.va_range.end > va);
316 assert(va >= split_end);
317 assert(m.va_range.end > split_end);
318
319 assert(right.mappings.contains(m));
320 assert(r_mappings.contains(m));
321 }
322
323 assert(o_mappings =~= r_mappings);
324 }
325 }
326
327 pub open spec fn join_spec(self, other: MemView) -> MemView {
332 MemView {
333 mappings: self.mappings.union(other.mappings),
334 memory: self.memory.union_prefer_right(other.memory),
335 }
336 }
337
338 #[verifier::external_body]
348 pub proof fn join(tracked &mut self, tracked other: Self)
349 requires
350 old(self).mappings.disjoint(other.mappings),
351 ensures
352 *self == old(self).join_spec(other),
353 {
354 unimplemented!()
355 }
356
357 pub proof fn lemma_split_join_identity(
370 this: MemView,
371 lhs: MemView,
372 rhs: MemView,
373 vaddr: usize,
374 len: usize,
375 )
376 requires
377 this.split_spec(vaddr, len) == (lhs, rhs),
378 forall|m: Mapping|
379 #[trigger] this.mappings.contains(m) ==> vaddr <= m.va_range.start < m.va_range.end,
380 forall|pa: Paddr|
381 #[trigger] this.memory.contains_key(pa) ==> exists|va: usize|
382 vaddr <= va && #[trigger] this.is_mapped(va, pa),
383 ensures
384 this.mappings =~= lhs.join_spec(rhs).mappings,
385 this.memory =~= lhs.join_spec(rhs).memory,
386 {
387 }
388}
389
390impl Inv for VirtPtr {
391 open spec fn inv(self) -> bool {
392 &&& self.range@.start <= self.vaddr <= self.range@.end
393 &&& self.range@.start > 0
394 &&& self.range@.end >= self.range@.start
395 }
396}
397
398impl Clone for VirtPtr {
399 fn clone(&self) -> (res: Self)
400 ensures
401 res == self,
402 {
403 Self { vaddr: self.vaddr, range: Ghost(self.range@) }
404 }
405}
406
407impl Copy for VirtPtr {
408
409}
410
411impl VirtPtr {
412 pub open spec fn new_spec(vaddr: Vaddr, len: usize) -> Self {
414 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
415 }
416
417 pub fn new(vaddr: Vaddr, len: usize) -> Self
424 returns Self::new_spec(vaddr, len),
425 {
426 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
427 }
428
429 pub open spec fn is_defined(self) -> bool {
431 &&& self.vaddr != 0
432 &&& self.range@.start <= self.vaddr <= self.range@.end
433 }
434
435 pub open spec fn is_valid(self) -> bool {
439 &&& self.is_defined()
440 &&& self.vaddr < self.range@.end
441 }
442
443 #[verifier::external_body]
456 pub fn read(self, Tracked(mem): Tracked<&MemView>) -> u8
457 requires
458 mem.addr_transl(self.vaddr) is Some,
459 mem.memory[mem.addr_transl(self.vaddr).unwrap().0].contents[mem.addr_transl(self.vaddr).unwrap().1 as int] is Init,
460 self.is_valid(),
461 returns
462 mem.read(self.vaddr).value(),
463 {
464 unimplemented!()
465 }
466
467 #[verifier::external_body]
478 pub fn write(self, Tracked(mem): Tracked<&mut MemView>, x: u8)
479 requires
480 old(mem).addr_transl(self.vaddr) is Some,
481 self.is_valid(),
482 ensures
483 *mem == old(mem).write(self.vaddr, x),
484 {
485 unimplemented!()
486 }
487
488 pub open spec fn add_spec(self, n: usize) -> Self {
489 VirtPtr { vaddr: (self.vaddr + n) as usize, range: self.range }
490 }
491
492 pub fn add(&mut self, n: usize)
504 requires
505 0 <= old(self).vaddr + n < usize::MAX,
510 ensures
511 *self == old(self).add_spec(n),
512 {
516 self.vaddr = self.vaddr + n
517 }
518
519 pub open spec fn read_offset_spec(self, mem: MemView, n: usize) -> u8 {
520 mem.read((self.vaddr + n) as usize).value()
521 }
522
523 pub fn read_offset(&self, Tracked(mem): Tracked<&MemView>, n: usize) -> u8
539 requires
540 0 < self.vaddr + n < usize::MAX,
541 self.range@.start <= self.vaddr + n < self.range@.end,
542 mem.addr_transl((self.vaddr + n) as usize) is Some,
543 mem.memory[mem.addr_transl((self.vaddr + n) as usize).unwrap().0].contents[mem.addr_transl((self.vaddr + n) as usize).unwrap().1 as int] is Init,
544 returns
545 self.read_offset_spec(*mem, n),
546 {
547 let mut tmp = self.clone();
548 tmp.add(n);
549 tmp.read(Tracked(mem))
550 }
551
552 pub open spec fn write_offset_spec(self, mem: MemView, n: usize, x: u8) -> MemView {
553 mem.write((self.vaddr + n) as usize, x)
554 }
555
556 pub fn write_offset(&self, Tracked(mem): Tracked<&mut MemView>, n: usize, x: u8)
565 requires
566 self.inv(),
567 self.range@.start <= self.vaddr + n < self.range@.end,
568 old(mem).addr_transl((self.vaddr + n) as usize) is Some,
569 {
570 let mut tmp = self.clone();
571 tmp.add(n);
572 tmp.write(Tracked(mem), x)
573 }
574
575 pub open spec fn copy_offset_spec(src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize) -> MemView {
576 let x = src.read_offset_spec(mem_src, n);
577 dst.write_offset_spec(mem_dst, n, x)
578 }
579
580 pub fn copy_offset(src: &Self, dst: &Self, Tracked(mem_src): Tracked<&MemView>, Tracked(mem_dst): Tracked<&mut MemView>, n: usize)
597 requires
598 src.inv(),
599 dst.inv(),
600 src.range@.start <= src.vaddr + n < src.range@.end,
601 mem_src.addr_transl((src.vaddr + n) as usize) is Some,
602 mem_src.memory.contains_key(mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0),
603 mem_src.memory[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().0].contents[mem_src.addr_transl((src.vaddr + n) as usize).unwrap().1 as int] is Init,
604
605 dst.range@.start <= dst.vaddr + n < dst.range@.end,
606 old(mem_dst).addr_transl((dst.vaddr + n) as usize) is Some,
607 ensures
608 *mem_dst == Self::copy_offset_spec(*src, *dst, *mem_src, *old(mem_dst), n),
609 mem_dst.mappings == old(mem_dst).mappings,
610 mem_dst.memory.dom() == old(mem_dst).memory.dom(),
611
612 {
613 let x = src.read_offset(Tracked(mem_src), n);
614 proof { admit() }
615 ;
616 dst.write_offset(Tracked(mem_dst), n, x)
617 }
618
619 pub open spec fn memcpy_spec(src: Self, dst: Self, mem_src: MemView, mem_dst: MemView, n: usize) -> MemView
620 decreases n,
621 {
622 if n == 0 {
623 mem_dst
624 } else {
625 let mem_dst_1 = Self::copy_offset_spec(src, dst, mem_src, mem_dst, (n - 1) as usize);
626
627 Self::memcpy_spec(src, dst, mem_src, mem_dst_1, (n - 1) as usize)
628 }
629 }
630
631 pub fn copy_nonoverlapping(
653 src: &Self,
654 dst: &Self,
655 Tracked(mem_src): Tracked<&MemView>,
656 Tracked(mem_dst): Tracked<&mut MemView>,
657 n: usize,
658 )
659 requires
660 src.inv(),
661 dst.inv(),
662 src.range@.end <= dst.range@.start || dst.range@.end <= src.range@.start,
663 src.range@.start <= src.vaddr,
664 src.vaddr + n <= src.range@.end,
665 forall|i: usize|
666 #![trigger mem_src.addr_transl(i)]
667 src.vaddr <= i < src.vaddr + n ==> {
668 &&& mem_src.addr_transl(i) is Some
669 &&& mem_src.memory.contains_key(mem_src.addr_transl(i).unwrap().0)
670 &&& mem_src.memory[mem_src.addr_transl(i).unwrap().0].contents[mem_src.addr_transl(i).unwrap().1 as int] is Init
671 },
672 dst.range@.start <= dst.vaddr,
673 dst.vaddr + n <= dst.range@.end,
674 forall|i: usize|
675 dst.vaddr <= i < dst.vaddr + n ==> {
676 &&& old(mem_dst).addr_transl(i) is Some
677 },
678 ensures
679 *mem_dst == Self::memcpy_spec(*src, *dst, *mem_src, *old(mem_dst), n),
680 mem_dst.mappings == old(mem_dst).mappings,
681 mem_dst.memory.dom() == old(mem_dst).memory.dom(),
682 forall|i: usize|
683 #![trigger mem_dst.addr_transl(i)]
684 dst.vaddr <= i < dst.vaddr + n ==> {
685 &&& mem_dst.addr_transl(i) is Some
686 },
687 decreases n,
688 {
689 if n == 0 {
690 return ;
691 } else {
692 let ghost mem0 = *mem_dst;
693
694 Self::copy_offset(src, dst, Tracked(mem_src), Tracked(mem_dst), n - 1);
695
696 proof {
697 assert(forall|i: usize|
698 dst.vaddr <= i < dst.vaddr + n - 1 ==>
699 mem_dst.addr_transl(i) == mem0.addr_transl(i)
700 );
701 }
702
703 Self::copy_nonoverlapping(src, dst, Tracked(mem_src), Tracked(mem_dst), n - 1);
704 }
705 }
706
707 pub fn from_vaddr(vaddr: usize, len: usize) -> (r: Self)
720 requires
721 vaddr != 0,
722 0 < len <= usize::MAX - vaddr,
723 ensures
724 r.is_valid(),
725 r.range@.start == vaddr,
726 r.range@.end == (vaddr + len) as usize,
727 {
728 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
729 }
730
731 #[verus_spec(r =>
746 requires
747 self.is_valid(),
748 0 <= n <= self.range@.end - self.range@.start,
749 self.vaddr == self.range@.start,
750 ensures
751 r.0.range@.start == self.range@.start,
752 r.0.range@.end == self.range@.start + n,
753 r.0.vaddr == self.range@.start,
754 r.1.range@.start == self.range@.start + n,
755 r.1.range@.end == self.range@.end,
756 r.1.vaddr == self.range@.start + n,
757 )]
758 pub fn split(self, n: usize) -> (Self, Self) {
759 let left = VirtPtr {
760 vaddr: self.vaddr,
761 range: Ghost(Range { start: self.vaddr, end: (self.vaddr + n) as usize }),
762 };
763
764 let right = VirtPtr {
765 vaddr: self.vaddr + n,
766 range: Ghost(Range { start: (self.vaddr + n) as usize, end: self.range@.end }),
767 };
768
769 (left, right)
770 }
771}
772
773pub tracked struct GlobalMemView {
776 pub pt_mappings: Set<Mapping>,
777 pub tlb_mappings: Set<Mapping>,
778 pub unmapped_pas: Set<Paddr>,
779 pub memory: Map<Paddr, FrameContents>,
780}
781
782impl Inv for GlobalMemView {
783
784 open spec fn inv(self) -> bool {
785 &&& forall |m: Mapping| #![auto] self.tlb_mappings.contains(m) ==> {
786 &&& m.inv()
787 &&& forall|pa: Paddr| m.pa_range.start <= pa < m.pa_range.end ==> {
788 &&& self.memory.dom().contains(pa)
789 }
790 &&& self.memory.contains_key(m.pa_range.start)
791 &&& self.memory[m.pa_range.start].size == m.page_size
792 &&& self.memory[m.pa_range.start].inv()
793 }
794 &&& forall |m: Mapping|
795 forall |n: Mapping| #![auto]
796 self.tlb_mappings.contains(m) ==>
797 self.tlb_mappings.contains(n) ==>
798 m != n ==>
799 #[trigger]
800 m.va_range.end <= n.va_range.start || n.va_range.end <= m.va_range.start
801 &&& self.tlb_mappings.finite()
802 &&& self.pt_mappings.finite()
803 &&& self.memory.dom().finite()
804 &&& self.all_pas_accounted_for()
805 &&& self.pas_uniquely_mapped()
806 &&& self.unmapped_correct()
807 }
808}
809
810impl GlobalMemView {
811
812 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
813 let mappings = self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
814 if 0 < mappings.len() {
815 let m = mappings.choose(); let off = va - m.va_range.start;
817 Some((m.pa_range.start, off as usize))
818 } else {
819 None
820 }
821 }
822
823 pub open spec fn is_mapped(self, pa: usize) -> bool {
824 exists|m: Mapping| self.tlb_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
825 }
826
827 pub open spec fn all_pas_accounted_for(self) -> bool {
828 forall|pa: Paddr|
829 0 <= pa < MAX_PADDR ==>
830 #[trigger] self.is_mapped(pa) || #[trigger] self.unmapped_pas.contains(pa)
831 }
832
833 pub open spec fn pas_uniquely_mapped(self) -> bool {
834 forall|m1: Mapping, m2: Mapping|
835 #[trigger] self.tlb_mappings.contains(m1) && #[trigger] self.tlb_mappings.contains(m2) && m1 != m2 ==>
836 m1.pa_range.end <= m2.pa_range.start || m2.pa_range.end <= m1.pa_range.start
837 }
838
839 pub open spec fn unmapped_correct(self) -> bool {
840 forall|pa: Paddr|
841 #![auto]
842 self.is_mapped(pa) <==> !self.unmapped_pas.contains(pa)
843 }
844
845 pub open spec fn take_view_spec(self, vaddr: usize, len: usize) -> (Self, MemView) {
846 let range_end = vaddr + len;
847
848 let leave_mappings: Set<Mapping> = self.tlb_mappings.filter(
849 |m: Mapping| m.va_range.end <= vaddr || m.va_range.start > range_end
850 );
851
852 let take_mappings = self.tlb_mappings.filter(
853 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr
854 );
855
856 let leave_pas = Set::new(
857 |pa: usize|
858 exists|m: Mapping| leave_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
859 );
860 let take_pas = Set::new(
861 |pa: usize|
862 exists|m: Mapping| take_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
863 );
864
865 (
866 GlobalMemView {
867 tlb_mappings: leave_mappings,
868 memory: self.memory.restrict(leave_pas),
869 ..self
870 },
871 MemView { mappings: take_mappings, memory: self.memory.restrict(take_pas) },
872 )
873 }
874
875 pub axiom fn take_view(tracked &mut self, vaddr: usize, len: usize) -> (tracked view: MemView)
876 ensures
877 *self == old(self).take_view_spec(vaddr, len).0,
878 view == old(self).take_view_spec(vaddr, len).1;
879
880 pub open spec fn return_view_spec(self, view: MemView) -> Self {
881 GlobalMemView {
882 tlb_mappings: self.tlb_mappings.union(view.mappings),
883 memory: self.memory.union_prefer_right(view.memory),
884 ..self
885 }
886 }
887
888 pub axiom fn return_view(&mut self, view: MemView)
889 ensures
890 *self == old(self).return_view_spec(view);
891
892 pub open spec fn tlb_flush_vaddr_spec(self, vaddr: Vaddr) -> Self {
893 let tlb_mappings = self.tlb_mappings.filter(
894 |m: Mapping| m.va_range.end <= vaddr || vaddr < m.va_range.start
895 );
896 GlobalMemView {
897 tlb_mappings,
898 ..self
899 }
900 }
901
902 pub axiom fn tlb_flush_vaddr(&mut self, vaddr: Vaddr)
903 requires
904 old(self).inv()
905 ensures
906 *self == old(self).tlb_flush_vaddr_spec(vaddr),
907 self.inv();
908
909 pub open spec fn tlb_soft_fault_spec(self, vaddr: Vaddr) -> Self {
910 let mapping = self.pt_mappings.filter(|m: Mapping| m.va_range.start <= vaddr < m.va_range.end).choose();
911 GlobalMemView {
912 tlb_mappings: self.tlb_mappings.insert(mapping),
913 ..self
914 }
915 }
916
917 pub axiom fn tlb_soft_fault(tracked &mut self, vaddr: Vaddr)
918 requires
919 old(self).inv(),
920 old(self).addr_transl(vaddr) is None,
921 ensures
922 *self == old(self).tlb_soft_fault_spec(vaddr),
923 self.inv();
924
925 pub open spec fn pt_map_spec(self, m: Mapping) -> Self {
926 let pt_mappings = self.pt_mappings.insert(m);
927 let unmapped_pas = self.unmapped_pas.difference(
928 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
929 );
930 GlobalMemView {
931 pt_mappings,
932 unmapped_pas,
933 ..self
934 }
935 }
936
937 pub axiom fn pt_map(&mut self, m: Mapping)
938 requires
939 forall|pa: Paddr|
940 m.pa_range.start <= pa < m.pa_range.end ==>
941 old(self).unmapped_pas.contains(pa),
942 old(self).inv()
943 ensures
944 *self == old(self).pt_map_spec(m);
945
946 pub open spec fn pt_unmap_spec(self, m: Mapping) -> Self {
947 let pt_mappings = self.pt_mappings.remove(m);
948 let unmapped_pas = self.unmapped_pas.union(
949 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
950 );
951 GlobalMemView {
952 pt_mappings,
953 unmapped_pas,
954 ..self
955 }
956 }
957
958 pub axiom fn pt_unmap(&mut self, m: Mapping)
959 requires
960 old(self).pt_mappings.contains(m),
961 old(self).inv()
962 ensures
963 *self == old(self).pt_unmap_spec(m),
964 self.inv();
965
966 pub proof fn lemma_va_mapping_unique(self, va: usize)
967 requires
968 self.inv(),
969 ensures
970 self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end).is_singleton(),
971 {
972 admit()
973 }
974}
975
976}