1use vstd::arithmetic::power2::pow2;
2use vstd::prelude::*;
3use vstd::seq_lib::*;
4use vstd::set::{axiom_set_choose_len, axiom_set_contains_len, axiom_set_intersect_finite};
5
6use vstd_extra::arithmetic::{
7 lemma_nat_align_down_monotone, lemma_nat_align_down_sound, lemma_nat_align_down_within_block,
8 lemma_nat_align_up_sound,
9};
10use vstd_extra::drop_tracking::*;
11use vstd_extra::ghost_tree::*;
12use vstd_extra::ownership::*;
13use vstd_extra::seq_extra::{forall_seq, lemma_forall_seq_index};
14
15use core::marker::PhantomData;
16use core::ops::Range;
17
18use crate::mm::frame::meta::mapping::frame_to_index;
19use crate::mm::page_table::*;
20use crate::mm::page_prop::PageProperty;
21use crate::mm::{nr_subpage_per_huge, Paddr, PagingConstsTrait, PagingLevel, Vaddr};
22use crate::specs::arch::mm::{MAX_PADDR, MAX_USERSPACE_VADDR, NR_ENTRIES, NR_LEVELS, PAGE_SIZE};
23use crate::specs::mm::frame::meta_owners::{REF_COUNT_MAX, REF_COUNT_UNUSED};
24use crate::specs::mm::page_table::cursor::page_size_lemmas::{
25 lemma_page_size_divides, lemma_page_size_ge_page_size, lemma_page_size_spec_level1,
26};
27use crate::specs::arch::paging_consts::PagingConsts;
28use crate::specs::mm::frame::meta_region_owners::MetaRegionOwners;
29use crate::specs::mm::page_table::node::GuardPerm;
30use crate::specs::mm::page_table::owners::*;
31use crate::specs::mm::page_table::view::PageTableView;
32use crate::specs::mm::page_table::AbstractVaddr;
33use crate::specs::mm::page_table::Guards;
34use crate::specs::mm::page_table::Mapping;
35use crate::specs::mm::page_table::{nat_align_down, nat_align_up};
36use crate::specs::task::InAtomicMode;
37
38verus! {
39
40pub tracked struct CursorContinuation<'rcu, C: PageTableConfig> {
41 pub entry_own: EntryOwner<C>,
42 pub idx: usize,
43 pub tree_level: nat,
44 pub children: Seq<Option<OwnerSubtree<C>>>,
45 pub path: TreePath<NR_ENTRIES>,
46 pub guard_perm: GuardPerm<'rcu, C>,
47}
48
49impl<'rcu, C: PageTableConfig> CursorContinuation<'rcu, C> {
50 pub open spec fn path(self) -> TreePath<NR_ENTRIES> {
51 self.entry_own.path
52 }
53
54 pub open spec fn child(self) -> OwnerSubtree<C> {
55 self.children[self.idx as int].unwrap()
56 }
57
58 pub open spec fn take_child_spec(self) -> (OwnerSubtree<C>, Self) {
59 let child = self.children[self.idx as int].unwrap();
60 let cont = Self {
61 children: self.children.remove(self.idx as int).insert(self.idx as int, None),
62 ..self
63 };
64 (child, cont)
65 }
66
67 #[verifier::returns(proof)]
68 pub proof fn take_child(tracked &mut self) -> (res: OwnerSubtree<C>)
69 requires
70 old(self).inv(),
71 old(self).idx < old(self).children.len(),
72 old(self).children[old(self).idx as int] is Some,
73 ensures
74 res == old(self).take_child_spec().0,
75 *final(self) == old(self).take_child_spec().1,
76 res.inv()
77 {
78 self.inv_children_unroll(self.idx as int);
79 let tracked child = self.children.tracked_remove(old(self).idx as int).tracked_unwrap();
80 self.children.tracked_insert(old(self).idx as int, None);
81 child
82 }
83
84 pub open spec fn put_child_spec(self, child: OwnerSubtree<C>) -> Self {
85 Self {
86 children: self.children.remove(self.idx as int).insert(self.idx as int, Some(child)),
87 ..self
88 }
89 }
90
91 #[verifier::returns(proof)]
92 pub proof fn put_child(tracked &mut self, tracked child: OwnerSubtree<C>)
93 requires
94 old(self).idx < old(self).children.len(),
95 old(self).children[old(self).idx as int] is None,
96 ensures
97 *final(self) == old(self).put_child_spec(child)
98 {
99 let _ = self.children.tracked_remove(old(self).idx as int);
100 self.children.tracked_insert(old(self).idx as int, Some(child));
101 }
102
103 pub proof fn take_put_child(self)
104 requires
105 self.idx < self.children.len(),
106 self.children[self.idx as int] is Some,
107 ensures
108 self.take_child_spec().1.put_child_spec(self.take_child_spec().0) == self,
109 {
110 let child = self.take_child_spec().0;
111 let cont = self.take_child_spec().1;
112 assert(cont.put_child_spec(child).children == self.children);
113 }
114
115 pub open spec fn make_cont_spec(self, idx: usize, guard_perm: GuardPerm<'rcu, C>) -> (Self, Self) {
116 let child = Self {
117 entry_own: self.children[self.idx as int].unwrap().value,
118 tree_level: (self.tree_level + 1) as nat,
119 idx: idx,
120 children: self.children[self.idx as int].unwrap().children,
121 path: self.path.push_tail(self.idx as usize),
122 guard_perm: guard_perm,
123 };
124 let cont = Self {
125 children: self.children.update(self.idx as int, None),
126 ..self
127 };
128 (child, cont)
129 }
130
131 #[verifier::returns(proof)]
132 pub axiom fn make_cont(tracked &mut self, idx: usize, tracked guard_perm: Tracked<GuardPerm<'rcu, C>>) -> (res: Self)
133 requires
134 old(self).all_some(),
135 old(self).idx < NR_ENTRIES,
136 idx < NR_ENTRIES,
137 ensures
138 res == old(self).make_cont_spec(idx, guard_perm@).0,
139 *final(self) == old(self).make_cont_spec(idx, guard_perm@).1;
140
141 pub open spec fn restore_spec(self, child: Self) -> (Self, GuardPerm<'rcu, C>) {
142 let child_node = OwnerSubtree {
143 value: child.entry_own,
144 level: child.tree_level,
145 children: child.children,
146 };
147 (Self { children: self.children.update(self.idx as int, Some(child_node)), ..self }, child.guard_perm)
148 }
149
150 #[verifier::returns(proof)]
151 pub axiom fn restore(tracked &mut self, tracked child: Self) -> (tracked guard_perm: GuardPerm<'rcu, C>)
152 ensures
153 *final(self) == old(self).restore_spec(child).0,
154 guard_perm == old(self).restore_spec(child).1;
155
156 pub open spec fn new_spec(owner_subtree: OwnerSubtree<C>, idx: usize, guard_perm: GuardPerm<'rcu, C>) -> Self {
157 Self {
158 entry_own: owner_subtree.value,
159 idx: idx,
160 tree_level: owner_subtree.level,
161 children: owner_subtree.children,
162 path: TreePath::new(Seq::empty()),
163 guard_perm: guard_perm,
164 }
165 }
166
167 pub axiom fn new(tracked owner_subtree: OwnerSubtree<C>, idx: usize, tracked guard_perm: GuardPerm<'rcu, C>)
168 -> (tracked res: Self)
169 ensures
170 res == Self::new_spec(owner_subtree, idx, guard_perm);
171
172 pub open spec fn map_children(self, f: spec_fn(EntryOwner<C>, TreePath<NR_ENTRIES>) -> bool) -> bool {
173 forall |i: int|
174 #![auto]
175 0 <= i < self.children.len() ==>
176 self.children[i] is Some ==>
177 self.children[i].unwrap().tree_predicate_map(self.path().push_tail(i as usize), f)
178 }
179
180 pub open spec fn level(self) -> PagingLevel {
184 self.entry_own.node.unwrap().level
185 }
186
187 pub open spec fn inv_children(self) -> bool {
188 self.children.all(
189 |child: Option<OwnerSubtree<C>>|
190 child is Some ==> child.unwrap().inv()
191 )
192 }
193
194 pub proof fn inv_children_unroll(self, i:int)
195 requires
196 self.inv_children(),
197 0 <= i < self.children.len(),
198 self.children[i] is Some
199 ensures
200 self.children[i].unwrap().inv()
201 {
202 let pred = |child: Option<OwnerSubtree<C>>| child is Some ==> child.unwrap().inv();
203 assert(pred(self.children[i]));
204 }
205
206 pub proof fn inv_children_unroll_all(self)
207 requires
208 self.inv_children()
209 ensures
210 forall |i:int|
211 #![auto]
212 0 <= i < self.children.len() ==>
213 self.children[i] is Some ==>
214 self.children[i].unwrap().inv()
215 {
216 let pred = |child: Option<OwnerSubtree<C>>| child is Some ==> child.unwrap().inv();
217 assert forall |i:int| 0 <= i < self.children.len() && #[trigger] self.children[i] is Some implies
218 self.children[i].unwrap().inv()
219 by {
220 self.inv_children_unroll(i)
221 }
222 }
223
224 pub open spec fn inv_children_rel_pred(self) -> spec_fn(int, Option<OwnerSubtree<C>>) -> bool {
225 |i: int, child: Option<OwnerSubtree<C>>| {
226 child is Some ==> {
227 &&& child.unwrap().value.parent_level == self.level()
228 &&& child.unwrap().level == self.tree_level + 1
229 &&& !child.unwrap().value.in_scope
230 &&& child.unwrap().value.path.len() == self.entry_own.node.unwrap().tree_level + 1
231 &&& child.unwrap().value.match_pte(
232 self.entry_own.node.unwrap().children_perm.value()[i],
233 self.entry_own.node.unwrap().level)
234 &&& child.unwrap().value.path == self.path().push_tail(i as usize)
235 }
236 }
237 }
238
239 pub open spec fn inv_children_rel(self) -> bool {
240 forall_seq(self.children, self.inv_children_rel_pred())
241 }
242
243 pub open spec fn pt_inv_children_pred() -> spec_fn(int, Option<OwnerSubtree<C>>) -> bool {
244 |i: int, child: Option<OwnerSubtree<C>>|
245 child is Some ==> PageTableOwner(child.unwrap()).pt_inv()
246 }
247
248 pub open spec fn pt_inv_children(self) -> bool {
249 forall_seq(self.children, Self::pt_inv_children_pred())
250 }
251
252 pub proof fn pt_inv_children_unroll(self, i: int)
253 requires
254 self.pt_inv_children(),
255 0 <= i < self.children.len(),
256 self.children[i] is Some,
257 ensures
258 PageTableOwner(self.children[i].unwrap()).pt_inv(),
259 {
260 lemma_forall_seq_index(self.children, Self::pt_inv_children_pred(), i);
261 }
262
263 pub proof fn inv_children_rel_unroll(self, i: int)
264 requires
265 self.inv_children_rel(),
266 0 <= i < self.children.len(),
267 self.children[i] is Some,
268 ensures
269 self.children[i].unwrap().value.parent_level == self.level(),
270 self.children[i].unwrap().level == self.tree_level + 1,
271 !self.children[i].unwrap().value.in_scope,
272 self.children[i].unwrap().value.path.len() == self.entry_own.node.unwrap().tree_level + 1,
273 self.children[i].unwrap().value.match_pte(
274 self.entry_own.node.unwrap().children_perm.value()[i],
275 self.entry_own.node.unwrap().level),
276 self.children[i].unwrap().value.path == self.path().push_tail(i as usize),
277 {
278 lemma_forall_seq_index(
279 self.children, self.inv_children_rel_pred(), i);
280 }
281
282 pub open spec fn inv(self) -> bool {
283 &&& self.children.len() == NR_ENTRIES
284 &&& 0 <= self.idx < NR_ENTRIES
285 &&& self.inv_children()
286 &&& self.inv_children_rel()
287 &&& self.pt_inv_children()
288 &&& self.entry_own.is_node()
289 &&& self.entry_own.inv()
290 &&& !self.entry_own.in_scope
291 &&& self.entry_own.node.unwrap().relate_guard_perm(self.guard_perm)
292 &&& self.tree_level == INC_LEVELS - self.level() - 1
293 &&& self.tree_level < INC_LEVELS - 1
294 &&& self.path().len() == self.tree_level
295 }
296
297 pub open spec fn all_some(self) -> bool {
298 forall|i: int| 0 <= i < NR_ENTRIES ==> self.children[i] is Some
299 }
300
301 pub open spec fn all_but_index_some(self) -> bool {
302 &&& forall|i: int| 0 <= i < self.idx ==> self.children[i] is Some
303 &&& forall|i: int| self.idx < i < NR_ENTRIES ==> self.children[i] is Some
304 &&& self.children[self.idx as int] is None
305 }
306
307 pub open spec fn inc_index(self) -> Self {
308 Self {
309 idx: (self.idx + 1) as usize,
310 ..self
311 }
312 }
313
314 pub proof fn do_inc_index(tracked &mut self)
315 requires
316 old(self).idx + 1 < NR_ENTRIES,
317 ensures
318 *final(self) == old(self).inc_index(),
319 {
320 self.idx = (self.idx + 1) as usize;
321 }
322
323 pub open spec fn node_locked(self, guards: Guards<'rcu, C>) -> bool {
324 guards.lock_held(self.guard_perm.value().inner.inner@.ptr.addr())
325 }
326
327 pub open spec fn view_mappings(self) -> Set<Mapping> {
328 Set::new(
329 |m: Mapping| exists|i:int|
330 #![auto]
331 0 <= i < self.children.len() &&
332 self.children[i] is Some &&
333 PageTableOwner(self.children[i].unwrap()).view_rec(self.path().push_tail(i as usize)).contains(m)
334 )
335 }
336
337 pub open spec fn as_subtree(self) -> OwnerSubtree<C> {
338 OwnerSubtree {
339 value: self.entry_own,
340 level: self.tree_level,
341 children: self.children,
342 }
343 }
344
345 pub open spec fn as_page_table_owner(self) -> PageTableOwner<C> {
346 PageTableOwner(self.as_subtree())
347 }
348
349 pub open spec fn view_mappings_take_child_spec(self) -> Set<Mapping> {
350 PageTableOwner(self.children[self.idx as int].unwrap()).view_rec(self.path().push_tail(self.idx as usize))
351 }
352
353 pub proof fn rel_children_from_node_matching(
365 entry: &Entry<'rcu, C>,
366 child_value: EntryOwner<C>,
367 parent_owner: NodeOwner<C>,
368 guard_perm: GuardPerm<'rcu, C>,
369 entry_own: EntryOwner<C>,
370 idx: usize,
371 )
372 requires
373 entry.node_matching(child_value, parent_owner, guard_perm),
374 entry.idx == idx,
375 entry_own.node == Some(parent_owner),
376 child_value.path == entry_own.path.push_tail(idx),
377 child_value.path.len() == parent_owner.tree_level + 1,
378 ensures
379 child_value.path.len() == parent_owner.tree_level + 1,
380 child_value.match_pte(parent_owner.children_perm.value()[idx as int], parent_owner.level),
381 child_value.path == entry_own.path.push_tail(idx),
382 child_value.parent_level == parent_owner.level,
383 { }
384
385 pub axiom fn continuation_inv_holds_after_child_restore(self)
388 requires
389 self.entry_own.is_node(),
390 self.entry_own.inv(),
391 self.entry_own.node.unwrap().relate_guard_perm(self.guard_perm),
392 self.children.len() == NR_ENTRIES,
393 0 <= self.idx < NR_ENTRIES,
394 self.tree_level == INC_LEVELS - self.level() - 1,
395 self.tree_level < INC_LEVELS - 1,
396 self.path().len() == self.tree_level,
397 self.children[self.idx as int] is Some,
398 self.children[self.idx as int].unwrap().inv(),
399 self.children[self.idx as int].unwrap().value.parent_level == self.level(),
400 self.children[self.idx as int].unwrap().value.path == self.path().push_tail(self.idx as usize),
401 self.children[self.idx as int].unwrap().level == self.tree_level + 1,
402 self.children[self.idx as int].unwrap().value.path.len()
403 == self.entry_own.node.unwrap().tree_level + 1,
404 self.children[self.idx as int].unwrap().value.match_pte(
405 self.entry_own.node.unwrap().children_perm.value()[self.idx as int],
406 self.entry_own.node.unwrap().level),
407 !self.children[self.idx as int].unwrap().value.in_scope,
408 ensures
409 self.inv();
410
411 pub proof fn new_child(tracked &self, paddr: Paddr, prop: PageProperty, tracked regions: &mut MetaRegionOwners) -> (tracked res: OwnerSubtree<C>)
412 requires
413 self.inv(),
414 self.level() < NR_LEVELS,
415 old(regions).slots.contains_key(frame_to_index(paddr)),
416 paddr % PAGE_SIZE == 0,
417 paddr < MAX_PADDR,
418 paddr % page_size(self.level()) == 0,
419 paddr + page_size(self.level()) <= MAX_PADDR,
420 self.path().push_tail(self.idx as usize).inv(),
421 ensures
422 final(regions).slot_owners == old(regions).slot_owners,
423 final(regions).slots == old(regions).slots,
424 res.value == EntryOwner::<C>::new_frame_spec(paddr, self.path().push_tail(self.idx as usize), self.level(), prop).set_in_scope(false),
425 res.inv(),
426 res.level == self.tree_level + 1,
427 res == OwnerSubtree::new_val(res.value, res.level as nat),
428 {
429 let tracked mut owner = EntryOwner::<C>::new_frame(paddr, self.path().push_tail(self.idx as usize), self.level(), prop);
430 owner.in_scope = false;
431 OwnerSubtree::new_val_tracked(owner, self.tree_level + 1)
432 }
433
434}
435
436pub tracked struct CursorOwner<'rcu, C: PageTableConfig> {
437 pub level: PagingLevel,
438 pub continuations: Map<int, CursorContinuation<'rcu, C>>,
439 pub va: AbstractVaddr,
440 pub guard_level: PagingLevel,
441 pub prefix: AbstractVaddr,
442 pub popped_too_high: bool,
443}
444
445impl<'rcu, C: PageTableConfig> Inv for CursorOwner<'rcu, C> {
446 open spec fn inv(self) -> bool {
447 &&& self.va.inv()
448 &&& self.va.offset == 0
449 &&& 1 <= self.level <= NR_LEVELS
450 &&& 1 <= self.guard_level <= NR_LEVELS
451 &&& C::TOP_LEVEL_INDEX_RANGE_spec().start <= self.va.index[NR_LEVELS - 1]
455 &&& self.va.index[NR_LEVELS - 1] <= C::TOP_LEVEL_INDEX_RANGE_spec().end
459 &&& self.in_locked_range() || self.above_locked_range()
461 &&& self.popped_too_high ==> self.level >= self.guard_level && self.in_locked_range()
464 &&& !self.popped_too_high ==> self.level < self.guard_level || self.above_locked_range()
465 &&& self.continuations[self.level - 1].all_some()
466 &&& forall|i: int| self.level <= i < NR_LEVELS ==> {
467 (#[trigger] self.continuations[i]).all_but_index_some()
468 }
469 &&& self.prefix.inv()
470 &&& forall|i: int| i < self.guard_level ==> self.prefix.index[i] == 0
471 &&& self.prefix.index[NR_LEVELS - 1] < C::TOP_LEVEL_INDEX_RANGE_spec().end
475 &&& self.va.leading_bits == self.prefix.leading_bits
478 &&& self.prefix.leading_bits == C::LEADING_BITS_spec() as int
481 &&& !self.popped_too_high ==> forall|i: int| self.guard_level <= i < NR_LEVELS ==>
484 self.va.index[i] == self.prefix.index[i]
485 &&& !self.popped_too_high && self.guard_level >= 1 && self.level < self.guard_level ==>
486 self.va.index[self.guard_level - 1] == 0
487 &&& self.level <= 4 ==> {
488 &&& self.continuations.contains_key(3)
489 &&& self.continuations[3].inv()
490 &&& self.continuations[3].level() == 4
491 &&& self.continuations[3].entry_own.parent_level == 5
493 &&& self.va.index[3] == self.continuations[3].idx
494 }
495 &&& self.level <= 3 ==> {
496 &&& self.continuations.contains_key(2)
497 &&& self.continuations[2].inv()
498 &&& self.continuations[2].level() == 3
499 &&& self.continuations[2].entry_own.parent_level == 4
500 &&& self.va.index[2] == self.continuations[2].idx
501 &&& self.continuations[2].guard_perm.value().inner.inner@.ptr.addr() !=
502 self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
503 &&& self.continuations[2].path() == self.continuations[3].path().push_tail(self.continuations[3].idx as usize)
505 &&& self.continuations[2].entry_own.path.len()
507 == self.continuations[3].entry_own.node.unwrap().tree_level + 1
508 &&& self.continuations[2].entry_own.match_pte(
509 self.continuations[3].entry_own.node.unwrap().children_perm.value()[self.continuations[3].idx as int],
510 self.continuations[3].entry_own.node.unwrap().level)
511 &&& self.continuations[2].entry_own.parent_level
512 == self.continuations[3].entry_own.node.unwrap().level
513 }
514 &&& self.level <= 2 ==> {
515 &&& self.continuations.contains_key(1)
516 &&& self.continuations[1].inv()
517 &&& self.continuations[1].level() == 2
518 &&& self.continuations[1].entry_own.parent_level == 3
519 &&& self.va.index[1] == self.continuations[1].idx
520 &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr() !=
521 self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
522 &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr() !=
523 self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
524 &&& self.continuations[1].path() == self.continuations[2].path().push_tail(self.continuations[2].idx as usize)
526 &&& self.continuations[1].entry_own.path.len()
528 == self.continuations[2].entry_own.node.unwrap().tree_level + 1
529 &&& self.continuations[1].entry_own.match_pte(
530 self.continuations[2].entry_own.node.unwrap().children_perm.value()[self.continuations[2].idx as int],
531 self.continuations[2].entry_own.node.unwrap().level)
532 &&& self.continuations[1].entry_own.parent_level
533 == self.continuations[2].entry_own.node.unwrap().level
534 }
535 &&& self.level == 1 ==> {
536 &&& self.continuations.contains_key(0)
537 &&& self.continuations[0].inv()
538 &&& self.continuations[0].level() == 1
539 &&& self.continuations[0].entry_own.parent_level == 2
540 &&& self.va.index[0] == self.continuations[0].idx
541 &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
542 self.continuations[1].guard_perm.value().inner.inner@.ptr.addr()
543 &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
544 self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
545 &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
546 self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
547 &&& self.continuations[0].path() == self.continuations[1].path().push_tail(self.continuations[1].idx as usize)
549 &&& self.continuations[0].entry_own.path.len()
551 == self.continuations[1].entry_own.node.unwrap().tree_level + 1
552 &&& self.continuations[0].entry_own.match_pte(
553 self.continuations[1].entry_own.node.unwrap().children_perm.value()[self.continuations[1].idx as int],
554 self.continuations[1].entry_own.node.unwrap().level)
555 &&& self.continuations[0].entry_own.parent_level
556 == self.continuations[1].entry_own.node.unwrap().level
557 }
558 }
559}
560
561impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C> {
562
563 pub open spec fn node_unlocked(guards: Guards<'rcu, C>) ->
564 (spec_fn(EntryOwner<C>, TreePath<NR_ENTRIES>) -> bool) {
565 |owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
566 owner.is_node() ==>
567 guards.unlocked(owner.node.unwrap().meta_perm.addr())
568 }
569
570 pub open spec fn node_unlocked_except(guards: Guards<'rcu, C>, addr: usize) ->
571 (spec_fn(EntryOwner<C>, TreePath<NR_ENTRIES>) -> bool) {
572 |owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
573 owner.is_node() ==>
574 owner.node.unwrap().meta_perm.addr() != addr ==>
575 guards.unlocked(owner.node.unwrap().meta_perm.addr())
576 }
577
578 pub open spec fn map_full_tree(self, f: spec_fn(EntryOwner<C>, TreePath<NR_ENTRIES>) -> bool) -> bool {
579 forall|i: int|
580 #![trigger self.continuations[i]]
581 self.level - 1 <= i < NR_LEVELS ==> {
582 self.continuations[i].map_children(f)
583 }
584 }
585
586 pub open spec fn map_only_children(self, f: spec_fn(EntryOwner<C>, TreePath<NR_ENTRIES>) -> bool) -> bool {
587 forall|i: int|
588 #![trigger self.continuations[i]]
589 self.level - 1 <= i < NR_LEVELS ==>
590 self.continuations[i].map_children(f)
591 }
592
593 pub open spec fn children_not_locked(self, guards: Guards<'rcu, C>) -> bool {
594 self.map_only_children(Self::node_unlocked(guards))
595 }
596
597 pub open spec fn only_current_locked(self, guards: Guards<'rcu, C>) -> bool {
598 self.map_only_children(Self::node_unlocked_except(guards, self.cur_entry_owner().node.unwrap().meta_perm.addr()))
599 }
600
601 pub proof fn never_drop_restores_children_not_locked(
602 self,
603 guard: PageTableGuard<'rcu, C>,
604 guards0: Guards<'rcu, C>,
605 guards1: Guards<'rcu, C>)
606 requires
607 self.inv(),
608 self.only_current_locked(guards0),
609 <PageTableGuard<'rcu, C> as TrackDrop>::constructor_requires(guard, guards0),
610 <PageTableGuard<'rcu, C> as TrackDrop>::constructor_ensures(guard, guards0, guards1),
611 self.cur_entry_owner().is_node(),
613 guard.inner.inner@.ptr.addr()
614 == self.cur_entry_owner().node.unwrap().meta_perm.addr(),
615 ensures
616 self.children_not_locked(guards1),
617 {
618 let current_addr = self.cur_entry_owner().node.unwrap().meta_perm.addr();
619 let f = Self::node_unlocked_except(guards0, current_addr);
620 let g = Self::node_unlocked(guards1);
621 assert(OwnerSubtree::implies(f, g));
622 self.map_children_implies(f, g);
623 }
624
625 pub axiom fn never_drop_restores_nodes_locked(
629 self,
630 guard: PageTableGuard<'rcu, C>,
631 guards0: Guards<'rcu, C>,
632 guards1: Guards<'rcu, C>,
633 )
634 requires
635 self.inv(),
636 self.nodes_locked(guards0),
637 <PageTableGuard<'rcu, C> as TrackDrop>::constructor_requires(guard, guards0),
638 <PageTableGuard<'rcu, C> as TrackDrop>::constructor_ensures(guard, guards0, guards1),
639 forall|i: int| #![trigger self.continuations[i]]
640 self.level - 1 <= i < NR_LEVELS ==>
641 self.continuations[i].guard_perm.value().inner.inner@.ptr.addr()
642 != guard.inner.inner@.ptr.addr(),
643 ensures
644 self.nodes_locked(guards1);
645
646 pub open spec fn nodes_locked(self, guards: Guards<'rcu, C>) -> bool {
661 forall|i: int|
662 #![trigger self.continuations[i]]
663 self.level - 1 <= i < NR_LEVELS ==> {
664 self.continuations[i].node_locked(guards)
665 }
666 }
667
668 pub open spec fn index(self) -> usize {
669 self.continuations[self.level - 1].idx
670 }
671
672 pub open spec fn inc_index(self) -> Self {
673 Self {
674 continuations: self.continuations.insert(self.level - 1, self.continuations[self.level - 1].inc_index()),
675 va: AbstractVaddr {
676 index: self.va.index.insert(self.level - 1, self.continuations[self.level - 1].inc_index().idx as int),
677 ..self.va
678 },
679 popped_too_high: false,
680 ..self
681 }
682 }
683
684 pub proof fn do_inc_index(tracked &mut self)
685 requires
686 old(self).inv(),
687 old(self).level <= old(self).guard_level,
688 old(self).continuations[old(self).level - 1].idx + 1 < NR_ENTRIES,
689 old(self).level == NR_LEVELS ==>
690 (old(self).continuations[old(self).level - 1].idx + 1) <= C::TOP_LEVEL_INDEX_RANGE_spec().end,
691 ensures
692 final(self).inv(),
693 *final(self) == old(self).inc_index(),
694 {
695 reveal(CursorContinuation::inv_children);
696 self.popped_too_high = false;
697 let tracked mut cont = self.continuations.tracked_remove(self.level - 1);
698 cont.do_inc_index();
699 self.va.index.tracked_insert(self.level - 1, cont.idx as int);
700 self.continuations.tracked_insert(self.level - 1, cont);
701 assert(self.continuations == old(self).continuations.insert(self.level - 1, cont));
702 assert(self.va.index.dom() =~= Set::new(|i: int| 0 <= i < NR_LEVELS));
703
704 old(self).va.index_increment_adds_page_size(old(self).level as int);
705 lemma_page_size_ge_page_size(old(self).level as PagingLevel);
706
707 if self.level >= self.guard_level {
708 if !old(self).above_locked_range() {
709 Self::inc_at_guard_level_above_locked_range(
710 old(self).va, old(self).prefix,
711 old(self).guard_level, old(self).level,
712 self.va.to_vaddr());
713 }
714 }
715 if old(self).popped_too_high { old(self).in_locked_range_prefix_match(); }
716 assert(self.va.index[self.level as int - 1] == self.continuations[self.level as int - 1].idx);
717 assert(self.prefix == old(self).prefix);
718 assert(self.prefix.index[NR_LEVELS - 1] < C::TOP_LEVEL_INDEX_RANGE_spec().end);
719 assert(cont.children == old(self).continuations[self.level - 1].children);
721 assert(cont.pt_inv_children());
722 }
723
724 pub proof fn inv_continuation(self, i: int)
725 requires
726 self.inv(),
727 self.level - 1 <= i <= NR_LEVELS - 1,
728 ensures
729 self.continuations.contains_key(i),
730 self.continuations[i].inv(),
731 self.continuations[i].children.len() == NR_ENTRIES,
732 {
733 assert(self.continuations.contains_key(i));
734 }
735
736 pub open spec fn view_mappings(self) -> Set<Mapping>
737 {
738 Set::new(
739 |m: Mapping|
740 exists|i:int|
741 #![trigger self.continuations[i]]
742 self.level - 1 <= i < NR_LEVELS && self.continuations[i].view_mappings().contains(m)
743 )
744 }
745
746 pub open spec fn as_page_table_owner(self) -> PageTableOwner<C> {
747 if self.level == 1 {
748 let l1 = self.continuations[0];
749 let l2 = self.continuations[1].restore_spec(l1).0;
750 let l3 = self.continuations[2].restore_spec(l2).0;
751 let l4 = self.continuations[3].restore_spec(l3).0;
752 l4.as_page_table_owner()
753 } else if self.level == 2 {
754 let l2 = self.continuations[1];
755 let l3 = self.continuations[2].restore_spec(l2).0;
756 let l4 = self.continuations[3].restore_spec(l3).0;
757 l4.as_page_table_owner()
758 } else if self.level == 3 {
759 let l3 = self.continuations[2];
760 let l4 = self.continuations[3].restore_spec(l3).0;
761 l4.as_page_table_owner()
762 } else {
763 let l4 = self.continuations[3];
764 l4.as_page_table_owner()
765 }
766 }
767
768 pub open spec fn cur_entry_owner(self) -> EntryOwner<C> {
769 self.cur_subtree().value
770 }
771
772 pub open spec fn cur_subtree(self) -> OwnerSubtree<C> {
773 self.continuations[self.level - 1].children[self.index() as int].unwrap()
774 }
775
776 pub proof fn cur_frame_clone_requires(
791 self,
792 item: C::Item,
793 pa: Paddr,
794 level: PagingLevel,
795 prop: PageProperty,
796 regions: MetaRegionOwners,
797 )
798 requires
799 self.inv(),
800 regions.inv(),
801 self.metaregion_sound(regions),
802 self.cur_entry_owner().is_frame(),
803 pa == self.cur_entry_owner().frame.unwrap().mapped_pa,
804 C::item_from_raw_spec(pa, level, prop) == item,
805 crate::mm::frame::meta::has_safe_slot(pa),
806 regions.slot_owners[frame_to_index(pa)].inner_perms.ref_count.value() + 1
807 < crate::specs::mm::frame::meta_owners::REF_COUNT_MAX,
808 ensures
809 item.clone_requires(regions)
810 {
811 assert(self.path_metaregion_sound(regions));
813 assert(self.cur_entry_owner().metaregion_sound(regions));
814 C::clone_requires_concrete(item, pa, level, prop, regions);
816 }
817
818 pub proof fn clone_item_preserves_invariants(
821 self,
822 old_regions: MetaRegionOwners,
823 new_regions: MetaRegionOwners,
824 idx: usize,
825 )
826 requires
827 self.inv(),
828 self.metaregion_sound(old_regions),
829 old_regions.inv(),
830 self.cur_entry_owner().is_frame(),
831 idx == frame_to_index(self.cur_entry_owner().frame.unwrap().mapped_pa),
832 old_regions.slot_owners.contains_key(idx),
833 new_regions.slot_owners.contains_key(idx),
834 new_regions.slot_owners[idx].inner_perms.ref_count.value() ==
836 old_regions.slot_owners[idx].inner_perms.ref_count.value() + 1,
837 new_regions.slot_owners[idx].inner_perms.ref_count.id() ==
839 old_regions.slot_owners[idx].inner_perms.ref_count.id(),
840 new_regions.slot_owners[idx].inner_perms.storage ==
841 old_regions.slot_owners[idx].inner_perms.storage,
842 new_regions.slot_owners[idx].inner_perms.vtable_ptr ==
843 old_regions.slot_owners[idx].inner_perms.vtable_ptr,
844 new_regions.slot_owners[idx].inner_perms.in_list ==
845 old_regions.slot_owners[idx].inner_perms.in_list,
846 new_regions.slot_owners[idx].paths_in_pt ==
848 old_regions.slot_owners[idx].paths_in_pt,
849 new_regions.slot_owners[idx].self_addr == old_regions.slot_owners[idx].self_addr,
850 new_regions.slot_owners[idx].raw_count == old_regions.slot_owners[idx].raw_count,
851 new_regions.slot_owners[idx].usage == old_regions.slot_owners[idx].usage,
852 new_regions.slot_owners.dom() == old_regions.slot_owners.dom(),
854 forall |i: usize| #![trigger new_regions.slot_owners[i]]
855 i != idx && old_regions.slot_owners.contains_key(i) ==>
856 new_regions.slot_owners[i] == old_regions.slot_owners[i],
857 new_regions.slots == old_regions.slots,
859 0 < old_regions.slot_owners[idx].inner_perms.ref_count.value(),
861 old_regions.slot_owners[idx].inner_perms.ref_count.value() + 1 < REF_COUNT_MAX,
862 ensures
863 new_regions.inv(),
864 self.metaregion_sound(new_regions),
865 {
866 self.cont_entries_metaregion(old_regions);
867 assert(new_regions.slot_owners[idx].inv());
868 assert(new_regions.inv()) by {
869 assert forall |i: usize| #[trigger] new_regions.slots.contains_key(i) implies {
870 &&& new_regions.slot_owners.contains_key(i)
871 &&& new_regions.slot_owners[i].inv()
872 &&& new_regions.slots[i].is_init()
873 &&& new_regions.slots[i].value().wf(new_regions.slot_owners[i])
874 &&& new_regions.slot_owners[i].self_addr == new_regions.slots[i].addr()
875 } by { assert(old_regions.slots.contains_key(i)); };
876 assert forall |i: usize| #[trigger] new_regions.slot_owners.contains_key(i) implies
877 new_regions.slot_owners[i].inv() by {};
878 };
879 self.metaregion_slot_owners_rc_increment(old_regions, new_regions, idx);
880 }
881
882 pub proof fn new_child_mappings_eq_target(
885 self,
886 new_subtree: OwnerSubtree<C>,
887 pa: Paddr,
888 level: PagingLevel,
889 prop: PageProperty,
890 )
891 requires
892 self.inv(),
893 level == self.level,
894 new_subtree.inv(),
895 new_subtree.value.is_frame(),
896 new_subtree.value.path ==
897 self.continuations[self.level as int - 1].path()
898 .push_tail(self.continuations[self.level as int - 1].idx as usize),
899 new_subtree.value.frame.unwrap().mapped_pa == pa,
900 new_subtree.value.frame.unwrap().prop == prop,
901 ensures
902 PageTableOwner(new_subtree)@.mappings == set![Mapping {
903 va_range: self@.cur_slot_range(page_size(level)),
904 pa_range: pa..(pa + page_size(level)) as usize,
905 page_size: page_size(level),
906 property: prop,
907 }]
908 {
909 admit();
913 let path = new_subtree.value.path;
914 let ps = page_size(level);
915 let pt_level = INC_LEVELS - path.len();
916 let cont = self.continuations[self.level as int - 1];
917
918 cont.path().push_tail_property_len(cont.idx as usize);
919 assert(cont.level() == self.level) by {
920 if self.level == 1 {} else if self.level == 2 {} else if self.level == 3 {} else {}
921 };
922 assert(pt_level == self.level);
923
924 self.cur_va_in_subtree_range();
926 assert(vaddr(path) == nat_align_down(self@.cur_va as nat, ps as nat) as Vaddr) by {
927 self.va.to_path_vaddr_concrete(self.level as int - 1);
928 let va_path = self.va.to_path(self.level as int - 1);
929 self.va.to_path_len(self.level as int - 1);
930 self.va.to_path_inv(self.level as int - 1);
931 self.cur_subtree_inv();
932 assert forall|i: int| 0 <= i < path.len() implies path.index(i) == va_path.index(i) by {
933 self.va.to_path_index(self.level as int - 1, i);
934 if self.level == 4 {
935 cont.path().push_tail_property_index(cont.idx as usize);
936 } else if self.level == 3 {
937 cont.path().push_tail_property_index(cont.idx as usize);
938 self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
939 } else if self.level == 2 {
940 cont.path().push_tail_property_index(cont.idx as usize);
941 self.continuations[2].path().push_tail_property_index(self.continuations[2].idx as usize);
942 self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
943 } else {
944 cont.path().push_tail_property_index(cont.idx as usize);
945 self.continuations[1].path().push_tail_property_index(self.continuations[1].idx as usize);
946 self.continuations[2].path().push_tail_property_index(self.continuations[2].idx as usize);
947 self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
948 }
949 };
950 AbstractVaddr::rec_vaddr_eq_if_indices_eq(path, va_path, 0);
951 };
952 }
953
954 pub open spec fn locked_range(self) -> Range<Vaddr> {
955 let start = self.prefix.align_down(self.guard_level as int).to_vaddr();
956 let end = self.prefix.align_up(self.guard_level as int).to_vaddr();
957 Range { start, end }
958 }
959
960 pub open spec fn in_locked_range(self) -> bool {
961 self.locked_range().start <= self.va.to_vaddr() < self.locked_range().end
962 }
963
964 pub open spec fn above_locked_range(self) -> bool {
965 self.va.to_vaddr() >= self.locked_range().end
966 }
967
968 pub proof fn inc_at_guard_level_above_locked_range(
970 old_va: AbstractVaddr, prefix: AbstractVaddr,
971 guard_level: u8, level: u8, new_va_val: Vaddr,
972 )
973 requires
974 old_va.inv(), prefix.inv(),
975 1 <= guard_level <= NR_LEVELS, level == guard_level,
976 new_va_val == old_va.to_vaddr() + page_size(level as PagingLevel),
977 prefix.align_down(guard_level as int).to_vaddr() <= old_va.to_vaddr(),
978 old_va.to_vaddr() < prefix.align_up(guard_level as int).to_vaddr(),
979 ensures
980 new_va_val >= prefix.align_up(guard_level as int).to_vaddr(),
981 {
982 let ps_gl = page_size(guard_level as PagingLevel);
983 lemma_page_size_ge_page_size(guard_level as PagingLevel);
984 prefix.align_diff(guard_level as int);
985 prefix.align_down_concrete(guard_level as int);
986 prefix.align_up_concrete(guard_level as int);
987 prefix.align_down_shape(guard_level as int);
988 prefix.align_down(guard_level as int).reflect_prop(
989 nat_align_down(prefix.to_vaddr() as nat, ps_gl as nat) as Vaddr);
990 prefix.align_up(guard_level as int).reflect_prop(
991 nat_align_up(prefix.to_vaddr() as nat, ps_gl as nat) as Vaddr);
992 }
993
994 pub proof fn prefix_in_locked_range(self)
995 requires
996 self.inv(),
997 !self.popped_too_high,
998 self.level < self.guard_level,
999 ensures
1000 self.in_locked_range(),
1001 {
1002 let gl = self.guard_level;
1003 if gl >= 1 && gl <= NR_LEVELS {
1004 self.va.align_down_to_vaddr_eq_if_upper_indices_eq(self.prefix, gl as int);
1005 self.va.align_down_concrete(gl as int);
1006 self.prefix.align_down_concrete(gl as int);
1007 self.prefix.align_diff(gl as int);
1008 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(
1009 nat_align_down(self.va.to_vaddr() as nat, page_size(gl as PagingLevel) as nat) as Vaddr);
1010 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(
1011 nat_align_down(self.prefix.to_vaddr() as nat, page_size(gl as PagingLevel) as nat) as Vaddr);
1012 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(
1013 nat_align_up(self.prefix.to_vaddr() as nat, page_size(gl as PagingLevel) as nat) as Vaddr);
1014 lemma_page_size_ge_page_size(gl as PagingLevel);
1015 lemma_nat_align_down_sound(self.va.to_vaddr() as nat, page_size(gl as PagingLevel) as nat);
1016
1017 let ps = page_size(gl as PagingLevel) as nat;
1018 let prefix_val = self.prefix.to_vaddr() as nat;
1019
1020 self.prefix.align_down_shape(gl as int);
1021 self.prefix.align_down(gl as int).reflect_prop(
1022 nat_align_down(prefix_val, ps) as Vaddr);
1023 self.prefix.align_up_concrete(gl as int);
1024 self.prefix.align_up(gl as int).reflect_prop(
1025 nat_align_up(prefix_val, ps) as Vaddr);
1026 }
1027 }
1028
1029 pub proof fn in_locked_range_prefix_match(self)
1032 requires
1033 self.inv(),
1034 self.prefix.inv(),
1035 1 <= self.guard_level <= NR_LEVELS,
1036 self.in_locked_range(),
1037 ensures
1038 forall|i:int| self.guard_level <= i < NR_LEVELS ==> self.va.index[i] == self.prefix.index[i],
1039 {
1040 let gl = self.guard_level;
1041 let start = self.prefix.align_down(gl as int).to_vaddr();
1042
1043 self.prefix.align_down_shape(gl as int);
1045 let prefix_ad = self.prefix.align_down(gl as int);
1046
1047 self.prefix.align_down_concrete(gl as int);
1049 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_down(
1050 self.prefix.to_vaddr() as nat, page_size(gl as PagingLevel) as nat) as Vaddr);
1051 lemma_page_size_ge_page_size(gl as PagingLevel);
1052 lemma_nat_align_down_sound(self.prefix.to_vaddr() as nat, page_size(gl as PagingLevel) as nat);
1053
1054 self.prefix.align_diff(gl as int);
1056
1057 if gl as int >= 2 && (gl as int) < NR_LEVELS as int {
1058 self.prefix.align_up_concrete(gl as int);
1060 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(
1061 nat_align_up(self.prefix.to_vaddr() as nat, page_size(gl as PagingLevel) as nat) as Vaddr);
1062
1063 AbstractVaddr::same_node_indices_match(
1066 self.va.to_vaddr(),
1067 self.prefix.to_vaddr(),
1068 start,
1069 (gl - 1) as PagingLevel,
1070 );
1071 AbstractVaddr::to_vaddr_from_vaddr_roundtrip(self.va);
1073 AbstractVaddr::to_vaddr_from_vaddr_roundtrip(self.prefix);
1074 } else if gl as int == 1 {
1075 let ps1 = page_size(1 as PagingLevel) as nat;
1079 let ps2 = page_size(2 as PagingLevel) as nat;
1080 let pv = self.prefix.to_vaddr() as nat;
1081 let cv = self.va.to_vaddr() as nat;
1082 let node_start = nat_align_down(pv, ps2) as usize;
1083
1084 lemma_page_size_ge_page_size(1 as PagingLevel);
1085 lemma_page_size_ge_page_size(2 as PagingLevel);
1086 page_size_monotonic(1 as PagingLevel, 2 as PagingLevel);
1087 lemma_page_size_divides(1 as PagingLevel, 2 as PagingLevel);
1088 lemma_nat_align_down_sound(pv, ps2);
1089 lemma_nat_align_down_sound(pv, ps1);
1090
1091 self.prefix.align_up_concrete(1);
1092 self.prefix.align_diff(1);
1093 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_up(pv, ps1) as Vaddr);
1094
1095 lemma_nat_align_down_monotone(pv, ps1, ps2);
1096 lemma_nat_align_down_within_block(pv, ps1, ps2);
1097
1098 AbstractVaddr::same_node_indices_match(
1099 self.va.to_vaddr(),
1100 self.prefix.to_vaddr(),
1101 node_start,
1102 1 as PagingLevel,
1103 );
1104 AbstractVaddr::to_vaddr_from_vaddr_roundtrip(self.va);
1105 AbstractVaddr::to_vaddr_from_vaddr_roundtrip(self.prefix);
1106 }
1107 }
1108
1109 pub proof fn in_locked_range_level_lt_nr_levels(self)
1110 requires self.inv(), self.in_locked_range(), !self.popped_too_high,
1111 ensures self.level < NR_LEVELS,
1112 {
1113 self.in_locked_range_level_lt_guard_level();
1114 }
1115
1116 pub proof fn in_locked_range_top_index_lt_top_end(self)
1120 requires
1121 self.inv(),
1122 self.in_locked_range(),
1123 !self.popped_too_high,
1124 ensures
1125 self.va.index[NR_LEVELS - 1] < C::TOP_LEVEL_INDEX_RANGE_spec().end,
1126 {
1127 self.in_locked_range_level_lt_guard_level();
1128 if self.guard_level as int == NR_LEVELS as int {
1129 assert(self.va.index[self.guard_level - 1] == 0);
1130 assert(self.va.index[NR_LEVELS - 1] == 0);
1131 } else {
1132 assert(self.va.index[NR_LEVELS - 1]
1133 == self.prefix.index[NR_LEVELS - 1]);
1134 }
1135 }
1136
1137 pub proof fn in_locked_range_level_lt_guard_level(self)
1138 requires self.inv(), self.in_locked_range(), !self.popped_too_high,
1139 ensures self.level < self.guard_level,
1140 {
1141 assert(self.in_locked_range() ==> !self.above_locked_range());
1142 }
1143
1144 #[verifier::rlimit(10000)]
1146 pub proof fn node_within_locked_range(self, level: PagingLevel)
1147 requires
1148 self.inv(),
1149 self.in_locked_range(),
1150 1 <= level < self.guard_level,
1151 ensures
1152 self.locked_range().start <= nat_align_down(self.va.to_vaddr() as nat, page_size((level + 1) as PagingLevel) as nat) as usize,
1153 nat_align_down(self.va.to_vaddr() as nat, page_size((level + 1) as PagingLevel) as nat) as usize + page_size((level + 1) as PagingLevel) <= self.locked_range().end,
1154 {
1155 let gl = self.guard_level;
1156 let ps_gl = page_size(gl as PagingLevel) as nat;
1157 let ps = page_size((level + 1) as PagingLevel) as nat;
1158 let pv = self.prefix.to_vaddr() as nat;
1159 let va = self.va.to_vaddr() as nat;
1160
1161 lemma_page_size_ge_page_size(gl as PagingLevel);
1162 lemma_page_size_ge_page_size((level + 1) as PagingLevel);
1163 lemma_page_size_divides((level + 1) as PagingLevel, gl as PagingLevel);
1164
1165 self.prefix.align_down_concrete(gl as int);
1166 self.prefix.align_up_concrete(gl as int);
1167 self.prefix.align_diff(gl as int);
1168 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_down(pv, ps_gl) as Vaddr);
1169 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_up(pv, ps_gl) as Vaddr);
1170
1171 let start = nat_align_down(pv, ps_gl);
1172 let end = nat_align_up(pv, ps_gl);
1173
1174 lemma_nat_align_down_sound(pv, ps_gl);
1175 lemma_nat_align_up_sound(pv, ps_gl);
1176 lemma_nat_align_down_sound(va, ps);
1177 let ad = nat_align_down(va, ps);
1178
1179 let q = (ps_gl / ps) as int;
1181 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(ps_gl as int, ps as int);
1182 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(start as int, ps_gl as int);
1183 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(end as int, ps_gl as int);
1184 let ks = start as int / ps_gl as int;
1185 let ke = end as int / ps_gl as int;
1186 vstd::arithmetic::mul::lemma_mul_is_associative(ps as int, q, ks);
1187 vstd::arithmetic::mul::lemma_mul_is_associative(ps as int, q, ke);
1188 vstd::arithmetic::div_mod::lemma_mod_multiples_vanish(q * ks, 0int, ps as int);
1189 vstd::arithmetic::div_mod::lemma_mod_multiples_vanish(q * ke, 0int, ps as int);
1190 vstd::arithmetic::div_mod::lemma_small_mod(0nat, ps);
1191 assert(start as int % ps as int == 0int);
1192 assert(end as int % ps as int == 0int);
1193
1194 assert(ad >= start) by {
1195 vstd::arithmetic::div_mod::lemma_div_is_ordered(start as int, va as int, ps as int);
1196 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(start as int, ps as int);
1197 vstd::arithmetic::mul::lemma_mul_inequality(
1198 start as int / ps as int, va as int / ps as int, ps as int);
1199 };
1200
1201 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(ad as int, ps as int);
1202 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(end as int, ps as int);
1203 vstd::arithmetic::div_mod::lemma_div_is_ordered(ad as int, end as int, ps as int);
1204 if ad as int / ps as int == end as int / ps as int {
1205 assert(false);
1206 }
1207 assert(ad as int / ps as int + 1 <= end as int / ps as int);
1208 let ad_q = ad as int / ps as int;
1209 let end_q = end as int / ps as int;
1210 let ps_i = ps as int;
1211 vstd_extra::arithmetic::lemma_nat_align_down_sound(va, ps);
1212 vstd_extra::arithmetic::lemma_nat_align_up_sound(pv, ps_gl);
1213 assert(ad as int % ps as int == 0);
1214 assert(end as int % ps as int == 0);
1215 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(ad as int, ps as int);
1216 vstd::arithmetic::div_mod::lemma_fundamental_div_mod(end as int, ps as int);
1217 assert(ad as int == (ps as int) * (ad as int / ps as int) + ad as int % ps as int);
1219 assert(end as int == (ps as int) * (end as int / ps as int) + end as int % ps as int);
1220 vstd::arithmetic::mul::lemma_mul_is_commutative(ps as int, ad as int / ps as int);
1221 vstd::arithmetic::mul::lemma_mul_is_commutative(ps as int, end as int / ps as int);
1222 assert(ad as int == ad_q * ps_i + (ad as int % ps as int));
1223 assert(ad as int % ps as int == 0);
1224 assert(ad as int == ad_q * ps_i);
1225 assert(end as int % ps as int == 0);
1226 assert(end as int == end_q * ps_i) by (nonlinear_arith)
1227 requires
1228 end as int == (ps as int) * (end as int / ps as int) + end as int % ps as int,
1229 end as int % ps as int == 0,
1230 end_q == end as int / ps as int,
1231 ps_i == ps as int;
1232 assert((ad_q + 1) * ps_i <= end_q * ps_i) by (nonlinear_arith)
1233 requires ad_q + 1 <= end_q, ps_i >= 0;
1234 assert((ad_q + 1) * ps_i == ad_q * ps_i + ps_i) by (nonlinear_arith);
1235 }
1236
1237 pub proof fn locked_range_page_aligned(self)
1238 requires
1239 self.inv(),
1240 ensures
1241 self.locked_range().end % PAGE_SIZE == 0,
1242 self.locked_range().start % PAGE_SIZE == 0,
1243 {
1244 let gl = self.guard_level;
1245 let pv = self.prefix.to_vaddr() as nat;
1246 let ps = page_size(gl as PagingLevel) as nat;
1247 lemma_page_size_ge_page_size(gl as PagingLevel);
1248 lemma_page_size_divides(1u8, gl as PagingLevel);
1249 lemma_page_size_spec_level1();
1250 lemma_nat_align_down_sound(pv, ps);
1251 lemma_nat_align_up_sound(pv, ps);
1252 let start_va = nat_align_down(pv, ps);
1253 let end_va = nat_align_up(pv, ps);
1254 vstd::arithmetic::div_mod::lemma_mod_mod(start_va as int, PAGE_SIZE as int, ps as int / PAGE_SIZE as int);
1255 vstd::arithmetic::div_mod::lemma_mod_mod(end_va as int, PAGE_SIZE as int, ps as int / PAGE_SIZE as int);
1256 self.prefix.align_down_concrete(gl as int);
1257 self.prefix.align_up_concrete(gl as int);
1258
1259 self.prefix.to_vaddr_bounded();
1260 self.prefix.to_vaddr_indices_gap_bound(0);
1261 vstd::arithmetic::power2::lemma2_to64();
1262 vstd::arithmetic::power2::lemma2_to64_rest();
1263 crate::specs::arch::paging_consts::lemma_nr_subpage_per_huge_eq_nr_entries();
1264 vstd_extra::external::ilog2::lemma_usize_ilog2_to32();
1265 page_size_monotonic(gl as PagingLevel, NR_LEVELS as PagingLevel);
1266 vstd::arithmetic::power2::lemma_pow2_adds(12nat, 27nat);
1267 assert(page_size(NR_LEVELS as PagingLevel) == pow2(39nat));
1268 vstd::arithmetic::power2::lemma_pow2_adds(1nat, 48nat);
1269 vstd_extra::external::ilog2::lemma_pow2_increases(49nat, 64nat);
1270
1271 self.prefix.align_down_shape(gl as int);
1272 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(start_va as Vaddr);
1273 AbstractVaddr::from_vaddr_to_vaddr_roundtrip(end_va as Vaddr);
1274 }
1275
1276 pub proof fn cur_subtree_inv(self)
1277 requires
1278 self.inv()
1279 ensures
1280 self.cur_subtree().inv()
1281 {
1282 let cont = self.continuations[self.level - 1];
1283 cont.inv_children_unroll(cont.idx as int)
1284 }
1285
1286 pub proof fn cur_entry_absent_not_present(self)
1288 requires self.inv(), self.cur_entry_owner().is_absent(),
1289 ensures !self@.present(),
1290 {
1291 self.cur_subtree_inv();
1292 let cur_va = self.cur_va();
1293 let cur_subtree = self.cur_subtree();
1294 let cur_path = cur_subtree.value.path;
1295 PageTableOwner(cur_subtree).view_rec_absent_empty(cur_path);
1296
1297 assert forall |m: Mapping| self.view_mappings().contains(m) implies
1298 !(m.va_range.start <= cur_va < m.va_range.end) by {
1299 if m.va_range.start <= cur_va < m.va_range.end {
1300 self.mapping_covering_cur_va_from_cur_subtree(m);
1301 }
1302 };
1303
1304 let filtered = self@.mappings.filter(
1305 |m: Mapping| m.va_range.start <= self@.cur_va < m.va_range.end);
1306 assert(filtered =~= set![]) by {
1307 assert forall |m: Mapping| !filtered.contains(m) by {
1308 if self@.mappings.contains(m) && m.va_range.start <= self@.cur_va < m.va_range.end {
1309 assert(self.view_mappings().contains(m));
1310 }
1311 };
1312 };
1313 }
1314
1315 pub proof fn cur_subtree_empty_not_present(self)
1317 requires
1318 self.inv(),
1319 PageTableOwner(self.cur_subtree()).view_rec(self.cur_subtree().value.path) =~= set![],
1320 ensures !self@.present(),
1321 {
1322 let cur_va = self.cur_va();
1323
1324 assert forall |m: Mapping| self.view_mappings().contains(m) implies
1325 !(m.va_range.start <= cur_va < m.va_range.end) by {
1326 if m.va_range.start <= cur_va < m.va_range.end {
1327 self.mapping_covering_cur_va_from_cur_subtree(m);
1328 }
1329 };
1330
1331 let filtered = self@.mappings.filter(
1332 |m: Mapping| m.va_range.start <= self@.cur_va < m.va_range.end);
1333 assert(filtered =~= set![]) by {
1334 assert forall |m: Mapping| !filtered.contains(m) by {
1335 if self@.mappings.contains(m) && m.va_range.start <= self@.cur_va < m.va_range.end {
1336 assert(self.view_mappings().contains(m));
1337 }
1338 };
1339 };
1340 assert(filtered.len() == 0);
1341 }
1342
1343 pub proof fn cur_entry_frame_present(self)
1344 requires
1345 self.inv(),
1346 self.cur_entry_owner().is_frame(),
1347 ensures
1348 self@.present(),
1349 self@.query(self.cur_entry_owner().frame.unwrap().mapped_pa,
1350 self.cur_entry_owner().frame.unwrap().size,
1351 self.cur_entry_owner().frame.unwrap().prop),
1352 {
1353 self.cur_subtree_inv();
1354 self.cur_va_in_subtree_range();
1355 self.view_preserves_inv();
1356 let subtree = self.cur_subtree();
1357 let path = subtree.value.path;
1358 let frame = self.cur_entry_owner().frame.unwrap();
1359 let pt_level = INC_LEVELS - path.len();
1360 let cont = self.continuations[self.level - 1];
1361
1362 cont.path().push_tail_property_len(cont.idx as usize);
1363 assert(cont.level() == self.level) by {
1364 if self.level == 1 {} else if self.level == 2 {} else if self.level == 3 {} else {}
1365 };
1366 assert(pt_level == self.level);
1367
1368 let m = Mapping {
1369 va_range: Range {
1370 start: vaddr_of::<C>(path) as int,
1371 end: vaddr_of::<C>(path) as int + page_size(pt_level as PagingLevel) as int,
1372 },
1373 pa_range: Range { start: frame.mapped_pa, end: (frame.mapped_pa + page_size(pt_level as PagingLevel)) as Paddr },
1374 page_size: page_size(pt_level as PagingLevel),
1375 property: frame.prop,
1376 };
1377 assert(PageTableOwner(subtree).view_rec(path) =~= set![m]);
1378 assert(self.view_mappings().contains(m));
1379 assert(m.inv());
1380 assert(m.va_range.start <= self@.cur_va < m.va_range.end) by {
1381 self.cur_va_in_subtree_range();
1382 admit();
1384 };
1385
1386 let filtered = self@.mappings.filter(|m2: Mapping| m2.va_range.start <= self@.cur_va < m2.va_range.end);
1387 assert(filtered.contains(m));
1388 axiom_set_intersect_finite::<Mapping>(
1389 self@.mappings, Set::new(|m2: Mapping| m2.va_range.start <= self@.cur_va < m2.va_range.end));
1390 axiom_set_contains_len(filtered, m);
1391 }
1392
1393 pub open spec fn path_metaregion_sound(self, regions: MetaRegionOwners) -> bool {
1395 forall|i: int| #![trigger self.continuations[i]]
1396 self.level - 1 <= i < NR_LEVELS ==>
1397 self.continuations[i].entry_own.metaregion_sound(regions)
1398 }
1399
1400 pub open spec fn metaregion_sound(self, regions: MetaRegionOwners) -> bool
1401 {
1402 &&& self.map_full_tree(|entry_owner: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
1403 entry_owner.metaregion_sound(regions))
1404 &&& self.path_metaregion_sound(regions)
1405 }
1406
1407 pub proof fn metaregion_preserved(self, other: Self, regions0: MetaRegionOwners, regions1: MetaRegionOwners)
1411 requires
1412 self.inv(),
1413 self.metaregion_sound(regions0),
1414 self.level == other.level,
1415 self.continuations =~= other.continuations,
1416 OwnerSubtree::implies(
1417 PageTableOwner::<C>::metaregion_sound_pred(regions0),
1418 PageTableOwner::<C>::metaregion_sound_pred(regions1)),
1419 ensures
1420 other.metaregion_sound(regions1),
1421 {
1422 let f = PageTableOwner::metaregion_sound_pred(regions0);
1423 let g = PageTableOwner::metaregion_sound_pred(regions1);
1424
1425 assert forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS implies {
1426 other.continuations[i].map_children(g)
1427 } by {
1428 let cont = self.continuations[i];
1429 assert(cont.inv());
1430 assert(cont.map_children(f));
1431 reveal(CursorContinuation::inv_children);
1432 assert forall |j: int| 0 <= j < NR_ENTRIES && #[trigger] cont.children[j] is Some implies
1433 cont.children[j].unwrap().tree_predicate_map(cont.path().push_tail(j as usize), g) by {
1434 cont.inv_children_unroll(j);
1435 cont.children[j].unwrap().map_implies(cont.path().push_tail(j as usize), f, g);
1436 };
1437 };
1438 assert(other.path_metaregion_sound(regions1)) by {
1439 assert forall|i: int| #![trigger other.continuations[i]]
1440 self.level - 1 <= i < NR_LEVELS implies
1441 other.continuations[i].entry_own.metaregion_sound(regions1) by {
1442 self.inv_continuation(i);
1443 let eo = self.continuations[i].entry_own;
1444 assert(eo.metaregion_sound(regions0));
1445 assert(g(eo, self.continuations[i].path()));
1446 };
1447 };
1448 }
1449
1450 pub proof fn metaregion_slot_owners_preserved(self, regions0: MetaRegionOwners, regions1: MetaRegionOwners)
1452 requires
1453 self.inv(),
1454 self.metaregion_sound(regions0),
1455 regions0.slot_owners =~= regions1.slot_owners,
1456 forall |k: usize| regions0.slots.contains_key(k) ==> #[trigger] regions1.slots.contains_key(k),
1457 forall |k: usize| regions0.slots.contains_key(k) ==> regions0.slots[k] == #[trigger] regions1.slots[k],
1458 ensures
1459 self.metaregion_sound(regions1),
1460 {
1461 let f = PageTableOwner::<C>::metaregion_sound_pred(regions0);
1462 let g = PageTableOwner::<C>::metaregion_sound_pred(regions1);
1463 assert forall |entry: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
1464 entry.inv() && f(entry, path) implies #[trigger] g(entry, path) by {
1465 entry.metaregion_sound_slot_owners_only(regions0, regions1);
1466 };
1467 self.metaregion_preserved(self, regions0, regions1);
1468 }
1469
1470 pub proof fn metaregion_slot_owners_rc_increment(
1471 self, regions0: MetaRegionOwners, regions1: MetaRegionOwners, idx: usize)
1472 requires
1473 self.inv(),
1474 self.metaregion_sound(regions0),
1475 regions0.inv(),
1476 regions1.slots == regions0.slots,
1477 regions1.slot_owners.dom() == regions0.slot_owners.dom(),
1478 regions1.slot_owners[idx].inner_perms.ref_count.value() ==
1479 regions0.slot_owners[idx].inner_perms.ref_count.value() + 1,
1480 regions1.slot_owners[idx].inner_perms.ref_count.id() ==
1481 regions0.slot_owners[idx].inner_perms.ref_count.id(),
1482 regions1.slot_owners[idx].inner_perms.storage ==
1483 regions0.slot_owners[idx].inner_perms.storage,
1484 regions1.slot_owners[idx].inner_perms.vtable_ptr ==
1485 regions0.slot_owners[idx].inner_perms.vtable_ptr,
1486 regions1.slot_owners[idx].inner_perms.in_list ==
1487 regions0.slot_owners[idx].inner_perms.in_list,
1488 regions1.slot_owners[idx].paths_in_pt == regions0.slot_owners[idx].paths_in_pt,
1489 regions1.slot_owners[idx].self_addr == regions0.slot_owners[idx].self_addr,
1490 regions1.slot_owners[idx].raw_count == regions0.slot_owners[idx].raw_count,
1491 regions1.slot_owners[idx].usage == regions0.slot_owners[idx].usage,
1492 regions1.slot_owners[idx].inner_perms.ref_count.value()
1493 != REF_COUNT_UNUSED,
1494 forall |i: usize| #![trigger regions1.slot_owners[i]]
1495 i != idx && regions0.slot_owners.contains_key(i) ==>
1496 regions1.slot_owners[i] == regions0.slot_owners[i],
1497 ensures
1498 self.metaregion_sound(regions1),
1499 {
1500 let f = PageTableOwner::<C>::metaregion_sound_pred(regions0);
1501 let g = PageTableOwner::<C>::metaregion_sound_pred(regions1);
1502 assert(OwnerSubtree::implies(f, g)) by {
1503 assert forall |entry: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
1504 entry.inv() && f(entry, path) implies #[trigger] g(entry, path) by {
1505 if entry.meta_slot_paddr() is Some {
1506 let eidx = frame_to_index(entry.meta_slot_paddr().unwrap());
1507 if eidx != idx {} else { entry.metaregion_sound_rc_value_changed(regions0, regions1); }
1508 }
1509 };
1510 };
1511 self.metaregion_preserved(self, regions0, regions1);
1512 }
1513
1514 pub proof fn metaregion_borrow_slot(
1517 self, regions0: MetaRegionOwners, regions1: MetaRegionOwners, changed_idx: usize
1518 )
1519 requires
1520 self.inv(),
1521 self.metaregion_sound(regions0),
1522 regions1.inv(),
1523 forall |k: usize| regions0.slots.contains_key(k) ==> #[trigger] regions1.slots.contains_key(k),
1524 forall |k: usize| regions0.slots.contains_key(k) && k != changed_idx
1525 ==> regions0.slots[k] == #[trigger] regions1.slots[k],
1526 regions0.slot_owners[changed_idx].raw_count == 0,
1527 regions1.slot_owners[changed_idx].raw_count == 1,
1528 regions1.slot_owners[changed_idx].inner_perms
1530 == regions0.slot_owners[changed_idx].inner_perms,
1531 regions1.slot_owners[changed_idx].self_addr
1532 == regions0.slot_owners[changed_idx].self_addr,
1533 regions1.slot_owners[changed_idx].usage
1534 == regions0.slot_owners[changed_idx].usage,
1535 regions1.slot_owners[changed_idx].paths_in_pt
1536 == regions0.slot_owners[changed_idx].paths_in_pt,
1537 forall |i: usize| #![trigger regions1.slot_owners[i]]
1539 i != changed_idx ==> regions0.slot_owners[i] == regions1.slot_owners[i],
1540 regions0.slot_owners.dom() =~= regions1.slot_owners.dom(),
1541 ensures
1542 self.metaregion_sound(regions1),
1543 {
1544 let f = PageTableOwner::<C>::metaregion_sound_pred(regions0);
1545 let g = PageTableOwner::<C>::metaregion_sound_pred(regions1);
1546 let nsp = PageTableOwner::<C>::not_in_scope_pred();
1547
1548 assert(OwnerSubtree::implies(
1549 |v: EntryOwner<C>, p: TreePath<NR_ENTRIES>| f(v, p) && nsp(v, p), g)) by {
1550 assert forall |entry: EntryOwner<C>, path: TreePath<NR_ENTRIES>|
1551 entry.inv() && f(entry, path) && nsp(entry, path)
1552 implies #[trigger] g(entry, path) by {
1553 if entry.meta_slot_paddr() is Some {
1554 let eidx = frame_to_index(entry.meta_slot_paddr().unwrap());
1555 if eidx != changed_idx {
1556 } else if entry.is_frame() {
1557 assert(regions1.slots.contains_key(eidx));
1558 }
1559 }
1560 };
1561 };
1562
1563 assert forall |i: int|
1564 #![trigger self.continuations[i]]
1565 self.level - 1 <= i < NR_LEVELS implies {
1566 self.continuations[i].map_children(g)
1567 }
1568 by {
1569 let cont = self.continuations[i];
1570 reveal(CursorContinuation::inv_children);
1571 assert forall |j: int| 0 <= j < NR_ENTRIES
1572 && #[trigger] cont.children[j] is Some implies
1573 cont.children[j].unwrap().tree_predicate_map(cont.path().push_tail(j as usize), nsp)
1574 by {
1575 cont.inv_children_unroll(j);
1576 PageTableOwner::tree_not_in_scope(cont.children[j].unwrap(), cont.path().push_tail(j as usize));
1577 };
1578 assert forall |j: int| 0 <= j < NR_ENTRIES
1579 && #[trigger] cont.children[j] is Some implies
1580 cont.children[j].unwrap().tree_predicate_map(cont.path().push_tail(j as usize), g)
1581 by {
1582 cont.inv_children_unroll(j);
1583 cont.children[j].unwrap().map_implies_and(cont.path().push_tail(j as usize), f, nsp, g);
1584 };
1585 };
1586
1587 assert(self.path_metaregion_sound(regions1)) by {
1588 assert forall|i: int| #![trigger self.continuations[i]]
1589 self.level - 1 <= i < NR_LEVELS implies
1590 self.continuations[i].entry_own.metaregion_sound(regions1) by {
1591 self.inv_continuation(i);
1592 let eo = self.continuations[i].entry_own;
1593 if eo.meta_slot_paddr() is Some {
1594 let eidx = frame_to_index(eo.meta_slot_paddr().unwrap());
1595 if eidx != changed_idx {}
1596 }
1597 };
1598 };
1599 }
1600
1601 pub proof fn cont_entries_metaregion(
1611 self, regions: MetaRegionOwners,
1612 )
1613 requires
1614 self.inv(),
1615 self.metaregion_sound(regions),
1616 ensures
1617 forall|i: int| #![trigger self.continuations[i]]
1618 self.level - 1 <= i < NR_LEVELS ==>
1619 self.continuations[i].entry_own.metaregion_sound(regions),
1620 {
1621 }
1624
1625 pub open spec fn new_spec(owner_subtree: OwnerSubtree<C>, idx: usize, guard_perm: GuardPerm<'rcu, C>) -> Self {
1626 let va = AbstractVaddr {
1627 offset: 0,
1628 index: Map::new(|i: int| 0 <= i < NR_LEVELS, |i: int| 0).insert(NR_LEVELS - 1, idx as int),
1629 leading_bits: C::LEADING_BITS_spec() as int,
1635 };
1636 Self {
1637 level: NR_LEVELS as PagingLevel,
1638 continuations: Map::empty().insert(NR_LEVELS - 1 as int, CursorContinuation::new_spec(owner_subtree, idx, guard_perm)),
1639 va,
1640 guard_level: NR_LEVELS as PagingLevel,
1641 prefix: va,
1642 popped_too_high: false,
1643 }
1644 }
1645
1646 pub axiom fn new(tracked owner_subtree: OwnerSubtree<C>, idx: usize, tracked guard_perm: GuardPerm<'rcu, C>)
1647 -> (tracked res: Self)
1648 ensures
1649 res == Self::new_spec(owner_subtree, idx, guard_perm);
1650}
1651
1652pub ghost struct CursorView<C: PageTableConfig> {
1653 pub cur_va: Vaddr,
1654 pub mappings: Set<Mapping>,
1655 pub phantom: PhantomData<C>,
1656}
1657
1658impl<'rcu, C: PageTableConfig> View for CursorOwner<'rcu, C> {
1659 type V = CursorView<C>;
1660
1661 open spec fn view(&self) -> Self::V {
1662 CursorView {
1663 cur_va: self.cur_va(),
1664 mappings: self.view_mappings(),
1665 phantom: PhantomData,
1666 }
1667 }
1668}
1669
1670impl<C: PageTableConfig> Inv for CursorView<C> {
1671 open spec fn inv(self) -> bool {
1672 &&& self.mappings.finite()
1673 &&& forall|m: Mapping| #![auto] self.mappings.contains(m) ==> m.inv()
1674 &&& forall|m: Mapping| #![auto] self.mappings.contains(m) ==> {
1680 &&& vaddr_range_bounds_spec::<C>().0 <= m.va_range.start
1681 &&& m.va_range.end <= vaddr_range_bounds_spec::<C>().1 + 1
1682 }
1683 &&& self.non_overlapping()
1684 }
1685}
1686
1687impl<C: PageTableConfig> CursorView<C> {
1688 pub open spec fn non_overlapping(self) -> bool {
1691 forall|m: Mapping, n: Mapping| #![auto]
1692 self.mappings.contains(m) ==>
1693 self.mappings.contains(n) ==>
1694 m != n ==>
1695 m.va_range.end <= n.va_range.start || n.va_range.end <= m.va_range.start
1696 }
1697}
1698
1699pub axiom fn axiom_view_in_vaddr_range<'rcu, C: PageTableConfig>(
1720 owner: &CursorOwner<'rcu, C>,
1721)
1722 requires
1723 owner.inv(),
1724 ensures
1725 forall |m: Mapping| #![auto] owner.view_mappings().contains(m) ==> {
1726 &&& vaddr_range_bounds_spec::<C>().0 <= m.va_range.start
1727 &&& m.va_range.end <= vaddr_range_bounds_spec::<C>().1 + 1
1728 };
1729
1730impl<'rcu, C: PageTableConfig> InvView for CursorOwner<'rcu, C> {
1731 proof fn view_preserves_inv(self) {
1732 self.view_non_overlapping();
1734 self.view_mappings_finite();
1736 self.view_mapping_inv();
1742 axiom_view_in_vaddr_range::<C>(&self);
1751 }
1752}
1753
1754impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C> {
1755 pub proof fn view_non_overlapping(self)
1761 requires
1762 self.inv(),
1763 ensures
1764 self@.non_overlapping(),
1765 {
1766 self.as_page_table_owner_view_non_overlapping();
1767 }
1768}
1769
1770impl<'rcu, C: PageTableConfig, A: InAtomicMode> Inv for Cursor<'rcu, C, A> {
1771 open spec fn inv(self) -> bool {
1772 &&& 1 <= self.level <= NR_LEVELS
1773 &&& self.level <= self.guard_level <= NR_LEVELS
1774&&& self.va >= self.barrier_va.start
1776 &&& self.va % PAGE_SIZE == 0
1777 }
1778}
1779
1780impl<'rcu, C: PageTableConfig, A: InAtomicMode> OwnerOf for Cursor<'rcu, C, A> {
1781 type Owner = CursorOwner<'rcu, C>;
1782
1783 open spec fn wf(self, owner: Self::Owner) -> bool {
1784 &&& owner.va.reflect(self.va)
1785 &&& self.level == owner.level
1786 &&& owner.guard_level == self.guard_level
1787&&& self.level <= 4 ==> {
1789 &&& self.path[3] is Some
1790 &&& owner.continuations.contains_key(3)
1791 &&& owner.continuations[3].guard_perm.pptr() == self.path[3].unwrap()
1792 }
1793 &&& self.level <= 3 ==> {
1794 &&& self.path[2] is Some
1795 &&& owner.continuations.contains_key(2)
1796 &&& owner.continuations[2].guard_perm.pptr() == self.path[2].unwrap()
1797 }
1798 &&& self.level <= 2 ==> {
1799 &&& self.path[1] is Some
1800 &&& owner.continuations.contains_key(1)
1801 &&& owner.continuations[1].guard_perm.pptr() == self.path[1].unwrap()
1802 }
1803 &&& self.level == 1 ==> {
1804 &&& self.path[0] is Some
1805 &&& owner.continuations.contains_key(0)
1806 &&& owner.continuations[0].guard_perm.pptr() == self.path[0].unwrap()
1807 }
1808 &&& self.barrier_va.start == owner.locked_range().start
1809 &&& self.barrier_va.end == owner.locked_range().end
1810 }
1811}
1812
1813impl<'rcu, C: PageTableConfig, A: InAtomicMode> ModelOf for Cursor<'rcu, C, A> {
1814
1815}
1816
1817}