1use vstd::pervasive::arbitrary;
2use vstd::prelude::*;
3
4use vstd::layout;
5use vstd::raw_ptr;
6use vstd::set;
7use vstd::set_lib;
8
9use core::marker::PhantomData;
10use core::ops::Range;
11
12use crate::mm::{Paddr, Vaddr};
13use crate::prelude::Inv;
14use crate::specs::arch::mm::MAX_PADDR;
15use crate::specs::mm::page_table::Mapping;
16
17#[path = "virt_mem_example.rs"]
18mod virt_mem_example;
19
20verus! {
21
22pub struct VirtPtr {
28 pub vaddr: Vaddr,
29 pub ghost range: Ghost<Range<Vaddr>>,
30}
31
32pub struct FrameContents {
33 pub contents: Seq<raw_ptr::MemContents<u8>>,
34 pub ghost size: Ghost<usize>,
35 pub ghost range: Ghost<Range<Paddr>>,
36}
37
38impl Inv for FrameContents {
39 open spec fn inv(self) -> bool {
40 &&& self.contents.len() == self.size@
41 &&& self.size@ == self.range@.end - self.range@.start
42 &&& self.range@.start % self.size@ == 0
43 &&& self.range@.end % self.size@ == 0
44 &&& self.range@.start <= self.range@.end < MAX_PADDR()
45 }
46}
47
48
49pub tracked struct MemView {
50 pub mappings: Set<Mapping>,
51 pub memory: Map<Paddr, FrameContents>
52}
53
54pub tracked struct GlobalMemView {
58 pub pt_mappings: Set<Mapping>,
59 pub tlb_mappings: Set<Mapping>,
60 pub unmapped_pas: Set<Paddr>,
61 pub memory: Map<Paddr, FrameContents>,
62}
63
64impl MemView {
65 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
66 let mappings = self.mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
67 if 0 < mappings.len() {
68 let m = mappings.choose(); let off = va - m.va_range.start;
70 Some((m.pa_range.start, off as usize))
71 } else {
72 None
73 }
74 }
75
76 pub open spec fn read(self, va: usize) -> raw_ptr::MemContents<u8> {
77 let (pa, off) = self.addr_transl(va).unwrap();
78 self.memory[pa].contents[off as int]
79 }
80
81 pub open spec fn write(self, va: usize, x: u8) -> Self {
82 let (pa, off) = self.addr_transl(va).unwrap();
83 MemView {
84 memory: self.memory.insert(pa, FrameContents {
85 contents: self.memory[pa].contents.update(off as int, raw_ptr::MemContents::Init(x)),
86 size: self.memory[pa].size,
87 range: self.memory[pa].range,
88 }),
89 ..self
90 }
91 }
92
93 pub open spec fn eq_at(self, va1: usize, va2: usize) -> bool {
94 let (pa1, off1) = self.addr_transl(va1).unwrap();
95 let (pa2, off2) = self.addr_transl(va2).unwrap();
96 self.memory[pa1].contents[off1 as int] == self.memory[pa2].contents[off2 as int]
97 }
98
99 pub open spec fn is_mapped(self, va: usize, pa: usize) -> bool {
100 self.addr_transl(va) is Some && self.addr_transl(va).unwrap().0 == pa
101 }
102
103 pub open spec fn borrow_at_spec(&self, vaddr: usize, len: usize) -> MemView {
104 let range_end = vaddr + len;
105
106 let valid_pas = Set::new(
107 |pa: usize|
108 exists|va: usize|
109 vaddr <= va < range_end && #[trigger] self.is_mapped(va, pa),
110 );
111
112 MemView {
113 mappings: self.mappings.filter(
114 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr,
115 ),
116 memory: self.memory.restrict(valid_pas),
117 }
118 }
119
120 pub open spec fn mappings_are_disjoint(self) -> bool {
121 forall|m1: Mapping, m2: Mapping|
122 #![trigger self.mappings.contains(m1), self.mappings.contains(m2)]
123 self.mappings.contains(m1) && self.mappings.contains(m2) && m1 != m2 ==> {
124 m1.va_range.end <= m2.va_range.start || m2.va_range.end <= m1.va_range.start
125 }
126 }
127
128 pub open spec fn split_spec(self, vaddr: usize, len: usize) -> (MemView, MemView) {
129 let split_end = vaddr + len;
130
131 let left_mappings = self.mappings.filter(
133 |m: Mapping| m.va_range.start < split_end && m.va_range.end > vaddr,
134 );
135 let right_mappings = self.mappings.filter(|m: Mapping| m.va_range.end > split_end);
136
137 let left_pas = Set::new(
138 |pa: usize|
139 exists|va: usize| vaddr <= va < split_end && self.is_mapped(va, pa),
140 );
141 let right_pas = Set::new(
142 |pa: usize| exists|va: usize| va >= split_end && self.is_mapped(va, pa),
143 );
144
145 (
146 MemView { mappings: left_mappings, memory: self.memory.restrict(left_pas) },
147 MemView { mappings: right_mappings, memory: self.memory.restrict(right_pas) },
148 )
149 }
150
151 #[verifier::external_body]
153 pub proof fn borrow_at(tracked &self, vaddr: usize, len: usize) -> (tracked r: &MemView)
154 ensures
155 r == self.borrow_at_spec(vaddr, len),
156 {
157 unimplemented!()
158 }
159
160
161 #[verifier::external_body]
166 pub proof fn split(tracked self, vaddr: usize, len: usize) -> (tracked r: (Self, Self))
167 ensures
168 r == self.split_spec(vaddr, len),
169 {
170 unimplemented!()
171 }
172
173 pub proof fn lemma_split_preserves_transl(
177 original: MemView,
178 vaddr: usize,
179 len: usize,
180 left: MemView,
181 right: MemView,
182 )
183 requires
184 original.split_spec(vaddr, len) == (left, right),
185 ensures
186 right.memory.dom().subset_of(original.memory.dom()),
187 forall|va: usize|
188 vaddr <= va < vaddr + len ==> {
189 #[trigger] original.addr_transl(va) == left.addr_transl(va)
190 },
191 forall|va: usize|
192 va >= vaddr + len ==> {
193 #[trigger] original.addr_transl(va) == right.addr_transl(va)
194 },
195 {
196 assert(right.memory.dom().subset_of(original.memory.dom()));
198
199 assert forall|va: usize| vaddr <= va < vaddr + len implies original.addr_transl(va)
200 == left.addr_transl(va) by {
201 assert(left.mappings =~= original.mappings.filter(
202 |m: Mapping| m.va_range.start < vaddr + len && m.va_range.end > vaddr,
203 ));
204 let o_mappings = original.mappings.filter(
205 |m: Mapping| m.va_range.start <= va < m.va_range.end,
206 );
207 let l_mappings = left.mappings.filter(
208 |m: Mapping| m.va_range.start <= va < m.va_range.end,
209 );
210
211 assert(l_mappings.subset_of(o_mappings));
212 assert(o_mappings.subset_of(l_mappings)) by {
213 assert forall|m: Mapping| #[trigger]
214 o_mappings.contains(m) implies l_mappings.contains(m) by {
215 assume(o_mappings.contains(m));
216 assert(m.va_range.start < vaddr + len);
217 assert(m.va_range.end > vaddr);
218 assert(m.va_range.start <= va < m.va_range.end);
219 assert(left.mappings.contains(m));
220 }
221 };
222
223 assert(o_mappings =~= l_mappings);
224 }
225
226 assert forall|va: usize| va >= vaddr + len implies original.addr_transl(va)
227 == right.addr_transl(va) by {
228 let split_end = vaddr + len;
229
230 let o_mappings = original.mappings.filter(
231 |m: Mapping| m.va_range.start <= va < m.va_range.end,
232 );
233 let r_mappings = right.mappings.filter(
234 |m: Mapping| m.va_range.start <= va < m.va_range.end,
235 );
236
237 assert forall|m: Mapping| o_mappings.contains(m) implies r_mappings.contains(m) by {
238 assert(m.va_range.end > va);
239 assert(va >= split_end);
240 assert(m.va_range.end > split_end);
241
242 assert(right.mappings.contains(m));
243 assert(r_mappings.contains(m));
244 }
245
246 assert(o_mappings =~= r_mappings);
247 }
248 }
249
250 pub open spec fn join_spec(self, other: MemView) -> MemView {
251 MemView {
252 mappings: self.mappings.union(other.mappings),
253 memory: self.memory.union_prefer_right(other.memory),
254 }
255 }
256
257 #[verifier::external_body]
259 pub proof fn join(tracked &mut self, tracked other: Self)
260 requires
261 old(self).mappings.disjoint(other.mappings),
262 ensures
263 *self == old(self).join_spec(other),
264 {
265 unimplemented!()
266 }
267
268 #[verifier::external_body]
269 pub proof fn lemma_split_join_identity(
270 this: MemView,
271 lhs: MemView,
272 rhs: MemView,
273 vaddr: usize,
274 len: usize,
275 )
276 requires
277 this.split_spec(vaddr, len) == (lhs, rhs),
278 ensures
279 this == lhs.join_spec(rhs),
280 {
281 }
283}
284
285impl Inv for VirtPtr {
286 open spec fn inv(self) -> bool {
287 &&& self.range@.start <= self.vaddr <= self.range@.end
288 &&& self.range@.start > 0
289 &&& self.range@.end >= self.range@.start
290 }
291}
292
293impl Clone for VirtPtr {
294 fn clone(&self) -> (res: Self)
295 ensures
296 res == self,
297 {
298 Self { vaddr: self.vaddr, range: Ghost(self.range@) }
299 }
300}
301
302impl Copy for VirtPtr {
303
304}
305
306impl VirtPtr {
307
308 pub open spec fn new_spec(vaddr: Vaddr, len: usize) -> Self {
309 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
310 }
311
312 pub fn new(vaddr: Vaddr, len: usize) -> Self
313 returns Self::new_spec(vaddr, len),
314 {
315 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
316 }
317
318 pub open spec fn is_defined(self) -> bool {
319 &&& self.vaddr != 0
320 &&& self.range@.start <= self.vaddr <= self.range@.end
321 }
322
323 pub open spec fn is_valid(self) -> bool {
324 &&& self.is_defined()
325 &&& self.vaddr < self.range@.end
326 }
327
328 #[verifier::external_body]
329 pub fn read(self, Tracked(mem): Tracked<&MemView>) -> u8
330 requires
331 mem.addr_transl(self.vaddr) is Some,
332 mem.memory[mem.addr_transl(self.vaddr).unwrap().0].contents[mem.addr_transl(self.vaddr).unwrap().1 as int] is Init,
333 self.is_valid(),
334 returns
335 mem.read(self.vaddr).value(),
336 {
337 unimplemented!()
338 }
339
340 #[verifier::external_body]
341 pub fn write(self, Tracked(mem): Tracked<&mut MemView>, x: u8)
342 requires
343 old(mem).addr_transl(self.vaddr) is Some,
344 self.is_valid(),
345 ensures
346 *mem == old(mem).write(self.vaddr, x),
347 {
348 unimplemented!()
349 }
350
351 pub open spec fn add_spec(self, n: usize) -> Self {
352 VirtPtr { vaddr: (self.vaddr + n) as usize, range: self.range }
353 }
354
355 pub fn add(&mut self, n: usize)
356 requires
357 0 <= old(self).vaddr + n < usize::MAX,
362 ensures
363 *self == old(self).add_spec(n),
364 {
368 self.vaddr = self.vaddr + n
369 }
370
371 pub open spec fn read_offset_spec(self, mem: MemView, n: usize) -> u8 {
372 mem.read((self.vaddr + n) as usize).value()
373 }
374
375 pub fn read_offset(&self, Tracked(mem): Tracked<&MemView>, n: usize) -> u8
378 requires
379 0 < self.vaddr + n < usize::MAX,
380 self.range@.start <= self.vaddr + n < self.range@.end,
381 mem.addr_transl((self.vaddr + n) as usize) is Some,
382 mem.memory[mem.addr_transl((self.vaddr + n) as usize).unwrap().0].contents[mem.addr_transl((self.vaddr + n) as usize).unwrap().1 as int] is Init,
383 returns
384 self.read_offset_spec(*mem, n),
385 {
386 let mut tmp = self.clone();
387 tmp.add(n);
388 tmp.read(Tracked(mem))
389 }
390
391 pub open spec fn write_offset_spec(self, mem: MemView, n: usize, x: u8) -> MemView {
392 mem.write((self.vaddr + n) as usize, x)
393 }
394
395 pub fn write_offset(&self, Tracked(mem): Tracked<&mut MemView>, n: usize, x: u8)
396 requires
397 self.inv(),
398 self.range@.start <= self.vaddr + n < self.range@.end,
399 old(mem).addr_transl((self.vaddr + n) as usize) is Some,
400 {
401 let mut tmp = self.clone();
402 tmp.add(n);
403 tmp.write(Tracked(mem), x)
404 }
405
406 pub open spec fn copy_offset_spec(src: Self, dst: Self, mem: MemView, n: usize) -> MemView {
407 let x = src.read_offset_spec(mem, n);
408 dst.write_offset_spec(mem, n, x)
409 }
410
411 pub fn copy_offset(src: &Self, dst: &Self, Tracked(mem): Tracked<&mut MemView>, n: usize)
412 requires
413 src.inv(),
414 dst.inv(),
415 src.range@.start <= src.vaddr + n < src.range@.end,
416 dst.range@.start <= dst.vaddr + n < dst.range@.end,
417 old(mem).addr_transl((src.vaddr + n) as usize) is Some,
418 old(mem).addr_transl((dst.vaddr + n) as usize) is Some,
419 old(mem).memory.contains_key(old(mem).addr_transl((src.vaddr + n) as usize).unwrap().0),
420 old(mem).memory[old(mem).addr_transl((src.vaddr + n) as usize).unwrap().0].contents[old(mem).addr_transl((src.vaddr + n) as usize).unwrap().1 as int] is Init,
421 ensures
422 *mem == Self::copy_offset_spec(*src, *dst, *old(mem), n),
423 {
424 let x = src.read_offset(Tracked(mem), n);
425 proof { admit() }
426 ;
427 dst.write_offset(Tracked(mem), n, x)
428 }
429
430 pub open spec fn memcpy_spec(src: Self, dst: Self, mem: MemView, n: usize) -> MemView
431 decreases n,
432 {
433 if n == 0 {
434 mem
435 } else {
436 let mem = Self::copy_offset_spec(src, dst, mem, (n - 1) as usize);
437 Self::memcpy_spec(src, dst, mem, (n - 1) as usize)
438 }
439 }
440
441 pub fn copy_nonoverlapping(
447 src: &Self,
448 dst: &Self,
449 Tracked(mem): Tracked<&mut MemView>,
450 n: usize,
451 )
452 requires
453 src.inv(),
454 dst.inv(),
455 src.range@.start <= src.vaddr,
456 src.vaddr + n <= src.range@.end,
457 dst.range@.start <= dst.vaddr,
458 dst.vaddr + n < dst.range@.end,
459 src.range@.end <= dst.range@.start || dst.range@.end <= src.range@.start,
460 forall|i: usize|
461 src.vaddr <= i < src.vaddr + n ==> {
462 &&& #[trigger] old(mem).addr_transl(i) is Some
463 &&& old(mem).memory.contains_key(old(mem).addr_transl(i).unwrap().0)
464 &&& old(mem).memory[old(mem).addr_transl(i).unwrap().0].contents[old(mem).addr_transl(i).unwrap().1 as int] is Init
465 },
466 forall|i: usize|
467 dst.vaddr <= i < dst.vaddr + n ==> {
468 &&& old(mem).addr_transl(i) is Some
469 },
470 ensures
471 *mem == Self::memcpy_spec(*src, *dst, *old(mem), n),
472 decreases n,
473 {
474 let ghost mem0 = *mem;
475
476 if n == 0 {
477 return ;
478 } else {
479 Self::copy_offset(src, dst, Tracked(mem), n - 1);
480 assert(forall|i: usize|
481 #![auto]
482 src.vaddr <= i < src.vaddr + n - 1 ==> mem.addr_transl(i) == mem0.addr_transl(i));
483 assert(forall|i: usize|
484 #![auto]
485 src.vaddr <= i < src.vaddr + n - 1 ==>
486 mem.memory[mem.addr_transl(i).unwrap().0].contents[mem.addr_transl(i).unwrap().1 as int] ==
487 mem0.memory[mem0.addr_transl(i).unwrap().0].contents[mem0.addr_transl(i).unwrap().1 as int]) by { admit() };
488 Self::copy_nonoverlapping(src, dst, Tracked(mem), n - 1);
489 }
490 }
491
492 pub fn from_vaddr(vaddr: usize, len: usize) -> (r: Self)
493 requires
494 vaddr != 0,
495 0 < len <= usize::MAX - vaddr,
496 ensures
497 r.is_valid(),
498 r.range@.start == vaddr,
499 r.range@.end == (vaddr + len) as usize,
500 {
501 Self { vaddr, range: Ghost(Range { start: vaddr, end: (vaddr + len) as usize }) }
502 }
503
504 #[verus_spec(r =>
507 requires
508 self.is_valid(),
509 0 <= n <= self.range@.end - self.range@.start,
510 self.vaddr == self.range@.start,
511 ensures
512 r.0.range@.start == self.range@.start,
513 r.0.range@.end == self.range@.start + n,
514 r.0.vaddr == self.range@.start,
515 r.1.range@.start == self.range@.start + n,
516 r.1.range@.end == self.range@.end,
517 r.1.vaddr == self.range@.start + n,
518 )]
519 pub fn split(self, n: usize) -> (Self, Self) {
520 let left = VirtPtr {
521 vaddr: self.vaddr,
522 range: Ghost(Range { start: self.vaddr, end: (self.vaddr + n) as usize }),
523 };
524
525 let right = VirtPtr {
526 vaddr: self.vaddr + n,
527 range: Ghost(Range { start: (self.vaddr + n) as usize, end: self.range@.end }),
528 };
529
530 (left, right)
531 }
532}
533
534impl GlobalMemView {
535
536 pub open spec fn addr_transl(self, va: usize) -> Option<(usize, usize)> {
537 let mappings = self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end);
538 if 0 < mappings.len() {
539 let m = mappings.choose(); let off = va - m.va_range.start;
541 Some((m.pa_range.start, off as usize))
542 } else {
543 None
544 }
545 }
546
547 pub open spec fn is_mapped(self, pa: usize) -> bool {
548 exists|m: Mapping| self.tlb_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
549 }
550
551 pub open spec fn all_pas_accounted_for(self) -> bool {
552 forall|pa: Paddr|
553 0 <= pa < MAX_PADDR() ==>
554 #[trigger] self.is_mapped(pa) || #[trigger] self.unmapped_pas.contains(pa)
555 }
556
557 pub open spec fn pas_uniquely_mapped(self) -> bool {
558 forall|m1: Mapping, m2: Mapping|
559 #[trigger] self.tlb_mappings.contains(m1) && #[trigger] self.tlb_mappings.contains(m2) && m1 != m2 ==>
560 m1.pa_range.end <= m2.pa_range.start || m2.pa_range.end <= m1.pa_range.start
561 }
562
563 pub open spec fn unmapped_correct(self) -> bool {
564 forall|pa: Paddr|
565 #![auto]
566 self.is_mapped(pa) <==> !self.unmapped_pas.contains(pa)
567 }
568
569 pub open spec fn take_view_spec(self, vaddr: usize, len: usize) -> (Self, MemView) {
570 let range_end = vaddr + len;
571
572 let leave_mappings: Set<Mapping> = self.tlb_mappings.filter(
573 |m: Mapping| m.va_range.end <= vaddr || m.va_range.start > range_end
574 );
575
576 let take_mappings = self.tlb_mappings.filter(
577 |m: Mapping| m.va_range.start < range_end && m.va_range.end > vaddr
578 );
579
580 let leave_pas = Set::new(
581 |pa: usize|
582 exists|m: Mapping| leave_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
583 );
584 let take_pas = Set::new(
585 |pa: usize|
586 exists|m: Mapping| take_mappings.contains(m) && m.pa_range.start <= pa < m.pa_range.end
587 );
588
589 (
590 GlobalMemView {
591 tlb_mappings: leave_mappings,
592 memory: self.memory.restrict(leave_pas),
593 ..self
594 },
595 MemView { mappings: take_mappings, memory: self.memory.restrict(take_pas) },
596 )
597 }
598
599 pub axiom fn take_view(tracked &mut self, vaddr: usize, len: usize) -> (tracked view: MemView)
600 ensures
601 *self == old(self).take_view_spec(vaddr, len).0,
602 view == old(self).take_view_spec(vaddr, len).1;
603
604 pub open spec fn return_view_spec(self, view: MemView) -> Self {
605 GlobalMemView {
606 tlb_mappings: self.tlb_mappings.union(view.mappings),
607 memory: self.memory.union_prefer_right(view.memory),
608 ..self
609 }
610 }
611
612 pub axiom fn return_view(&mut self, view: MemView)
613 ensures
614 *self == old(self).return_view_spec(view);
615
616 pub open spec fn tlb_flush_vaddr_spec(self, vaddr: Vaddr) -> Self {
617 let tlb_mappings = self.tlb_mappings.filter(
618 |m: Mapping| m.va_range.end <= vaddr || vaddr < m.va_range.start
619 );
620 GlobalMemView {
621 tlb_mappings,
622 ..self
623 }
624 }
625
626 pub axiom fn tlb_flush_vaddr(&mut self, vaddr: Vaddr)
627 requires
628 old(self).inv()
629 ensures
630 *self == old(self).tlb_flush_vaddr_spec(vaddr),
631 self.inv();
632
633 pub open spec fn tlb_soft_fault_spec(self, vaddr: Vaddr) -> Self {
634 let mapping = self.pt_mappings.filter(|m: Mapping| m.va_range.start <= vaddr < m.va_range.end).choose();
635 GlobalMemView {
636 tlb_mappings: self.tlb_mappings.insert(mapping),
637 ..self
638 }
639 }
640
641 pub axiom fn tlb_soft_fault(tracked &mut self, vaddr: Vaddr)
642 requires
643 old(self).inv(),
644 old(self).addr_transl(vaddr) is None,
645 ensures
646 *self == old(self).tlb_soft_fault_spec(vaddr),
647 self.inv();
648
649 pub open spec fn pt_map_spec(self, m: Mapping) -> Self {
650 let pt_mappings = self.pt_mappings.insert(m);
651 let unmapped_pas = self.unmapped_pas.difference(
652 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
653 );
654 GlobalMemView {
655 pt_mappings,
656 unmapped_pas,
657 ..self
658 }
659 }
660
661 pub axiom fn pt_map(&mut self, m: Mapping)
662 requires
663 forall|pa: Paddr|
664 m.pa_range.start <= pa < m.pa_range.end ==>
665 old(self).unmapped_pas.contains(pa),
666 old(self).inv()
667 ensures
668 *self == old(self).pt_map_spec(m);
669
670 pub open spec fn pt_unmap_spec(self, m: Mapping) -> Self {
671 let pt_mappings = self.pt_mappings.remove(m);
672 let unmapped_pas = self.unmapped_pas.union(
673 Set::new(|pa: usize| m.pa_range.start <= pa < m.pa_range.end)
674 );
675 GlobalMemView {
676 pt_mappings,
677 unmapped_pas,
678 ..self
679 }
680 }
681
682 pub axiom fn pt_unmap(&mut self, m: Mapping)
683 requires
684 old(self).pt_mappings.contains(m),
685 old(self).inv()
686 ensures
687 *self == old(self).pt_unmap_spec(m),
688 self.inv();
689
690 pub proof fn lemma_va_mapping_unique(self, va: usize)
691 requires
692 self.inv(),
693 ensures
694 self.tlb_mappings.filter(|m: Mapping| m.va_range.start <= va < m.va_range.end).is_singleton(),
695 {
696 admit()
697 }
698}
699
700impl Inv for GlobalMemView {
701
702 open spec fn inv(self) -> bool {
703 &&& self.tlb_mappings.finite()
704 &&& self.pt_mappings.finite()
705 &&& self.memory.dom().finite()
706 &&& self.all_pas_accounted_for()
707 &&& self.pas_uniquely_mapped()
708 &&& self.unmapped_correct()
709 &&& forall |m: Mapping| #![auto] self.tlb_mappings.contains(m) ==> {
710 &&& m.inv()
711 &&& forall|pa: Paddr| m.pa_range.start <= pa < m.pa_range.end ==> {
712 &&& self.memory.dom().contains(pa)
713 }
714 &&& self.memory.contains_key(m.pa_range.start)
715 &&& self.memory[m.pa_range.start].size == m.page_size
716 &&& self.memory[m.pa_range.start].inv()
717 }
718 &&& forall |m: Mapping|
719 forall |n: Mapping| #![auto]
720 self.tlb_mappings.contains(m) ==>
721 self.tlb_mappings.contains(n) ==>
722 m != n ==>
723 #[trigger]
724 m.va_range.end <= n.va_range.start || n.va_range.end <= m.va_range.start
725 }
726}
727
728}