ostd/specs/mm/page_table/cursor/
owners.rs

1use vstd::prelude::*;
2
3use vstd_extra::ownership::*;
4use vstd_extra::ghost_tree::TreePath;
5use vstd_extra::undroppable::*;
6
7use crate::mm::page_table::*;
8
9use core::marker::PhantomData;
10use core::ops::Range;
11
12use crate::mm::{Paddr, PagingConstsTrait, PagingLevel, Vaddr};
13use crate::mm::page_prop::PageProperty;
14use crate::specs::arch::mm::{CONST_NR_ENTRIES, NR_ENTRIES, NR_LEVELS, PAGE_SIZE};
15use crate::specs::arch::paging_consts::PagingConsts;
16use crate::specs::arch::mm::MAX_USERSPACE_VADDR;
17use crate::specs::mm::page_table::node::GuardPerm;
18use crate::specs::mm::page_table::owners::*;
19use crate::specs::mm::page_table::AbstractVaddr;
20use crate::specs::mm::page_table::Guards;
21use crate::specs::mm::page_table::Mapping;
22use crate::specs::mm::page_table::view::PageTableView;
23use crate::specs::task::InAtomicMode;
24use crate::specs::mm::frame::meta_region_owners::MetaRegionOwners;
25
26verus! {
27
28pub tracked struct CursorContinuation<'rcu, C: PageTableConfig> {
29    pub entry_own: EntryOwner<C>,
30    pub idx: usize,
31    pub tree_level: nat,
32    pub children: Seq<Option<OwnerSubtree<C>>>,
33    pub path: TreePath<CONST_NR_ENTRIES>,
34    pub guard_perm: GuardPerm<'rcu, C>,
35}
36
37impl<'rcu, C: PageTableConfig> CursorContinuation<'rcu, C> {
38
39    pub open spec fn child(self) -> OwnerSubtree<C> {
40        self.children[self.idx as int].unwrap()
41    }
42
43    pub open spec fn take_child_spec(self) -> (OwnerSubtree<C>, Self) {
44        let child = self.children[self.idx as int].unwrap();
45        let cont = Self {
46            children: self.children.remove(self.idx as int).insert(self.idx as int, None),
47            ..self
48        };
49        (child, cont)
50    }
51
52    #[verifier::returns(proof)]
53    pub proof fn take_child(tracked &mut self) -> (res: OwnerSubtree<C>)
54        requires
55            old(self).idx < old(self).children.len(),
56            old(self).children[old(self).idx as int] is Some,
57        ensures
58            res == old(self).take_child_spec().0,
59            *self == old(self).take_child_spec().1,
60    {
61        let tracked child = self.children.tracked_remove(old(self).idx as int).tracked_unwrap();
62        self.children.tracked_insert(old(self).idx as int, None);
63        child
64    }
65
66    pub open spec fn put_child_spec(self, child: OwnerSubtree<C>) -> Self {
67        Self {
68            children: self.children.remove(self.idx as int).insert(self.idx as int, Some(child)),
69            ..self
70        }
71    }
72
73    #[verifier::returns(proof)]
74    pub proof fn put_child(tracked &mut self, tracked child: OwnerSubtree<C>)
75        requires
76            old(self).idx < old(self).children.len(),
77            old(self).children[old(self).idx as int] is None,
78        ensures
79            *self == old(self).put_child_spec(child)
80    {
81        let _ = self.children.tracked_remove(old(self).idx as int);
82        self.children.tracked_insert(old(self).idx as int, Some(child));
83    }
84
85    pub proof fn take_put_child(self)
86        requires
87            self.idx < self.children.len(),
88            self.children[self.idx as int] is Some,
89        ensures
90            self.take_child_spec().1.put_child_spec(self.take_child_spec().0) == self,
91    {
92        let child = self.take_child_spec().0;
93        let cont = self.take_child_spec().1;
94        assert(cont.put_child_spec(child).children == self.children);
95    }
96
97    pub open spec fn make_cont_spec(self, idx: usize, guard_perm: GuardPerm<'rcu, C>) -> (Self, Self) {
98        let child = Self {
99            entry_own: self.children[self.idx as int].unwrap().value,
100            tree_level: (self.tree_level + 1) as nat,
101            idx: idx,
102            children: self.children[self.idx as int].unwrap().children,
103            path: self.path.push_tail(self.idx as usize),
104            guard_perm: guard_perm,
105        };
106        let cont = Self {
107            children: self.children.update(self.idx as int, None),
108            ..self
109        };
110        (child, cont)
111    }
112
113    #[verifier::returns(proof)]
114    pub axiom fn make_cont(tracked &mut self, idx: usize, tracked guard_perm: Tracked<GuardPerm<'rcu, C>>) -> (res: Self)
115        requires
116            old(self).all_some(),
117            old(self).idx < NR_ENTRIES(),
118            idx < NR_ENTRIES(),
119        ensures
120            res == old(self).make_cont_spec(idx, guard_perm@).0,
121            *self == old(self).make_cont_spec(idx, guard_perm@).1,
122    ;
123
124    pub open spec fn restore_spec(self, child: Self) -> (Self, GuardPerm<'rcu, C>) {
125        let child_node = OwnerSubtree {
126            value: child.entry_own,
127            level: child.tree_level,
128            children: child.children,
129        };
130        (Self { children: self.children.update(self.idx as int, Some(child_node)), ..self }, child.guard_perm)
131    }
132
133    #[verifier::returns(proof)]
134    pub axiom fn restore(tracked &mut self, tracked child: Self) -> (tracked guard_perm: GuardPerm<'rcu, C>)
135        ensures
136            *self == old(self).restore_spec(child).0,
137            guard_perm == old(self).restore_spec(child).1,
138    ;
139
140    pub open spec fn map_children(self, f: spec_fn(EntryOwner<C>, TreePath<CONST_NR_ENTRIES>) -> bool) -> bool {
141        forall |i: int| #![auto] 0 <= i < self.children.len() ==>
142            self.children[i] is Some ==>
143            self.children[i].unwrap().tree_predicate_map(self.path.push_tail(i as usize), f)
144    }
145
146    pub open spec fn level(self) -> PagingLevel {
147        self.entry_own.node.unwrap().level
148    }
149
150    pub open spec fn inv(self) -> bool {
151        &&& self.children.len() == NR_ENTRIES()
152        &&& 0 <= self.idx < NR_ENTRIES()
153        &&& forall|i: int|
154            #![trigger self.children[i]]
155            0 <= i < NR_ENTRIES() ==> 
156            self.children[i] is Some ==> {
157                &&& self.children[i].unwrap().value.parent_level == self.level()
158                &&& self.children[i].unwrap().inv()
159                &&& self.children[i].unwrap().level == self.tree_level + 1
160            }
161        &&& self.entry_own.is_node()
162        &&& self.entry_own.inv()
163        &&& self.entry_own.node.unwrap().relate_guard_perm(self.guard_perm)
164        &&& self.tree_level == INC_LEVELS() - self.level()
165        &&& self.tree_level < INC_LEVELS() - 1
166    }
167
168    pub open spec fn all_some(self) -> bool {
169        forall|i: int| 0 <= i < NR_ENTRIES() ==> self.children[i] is Some
170    }
171
172    pub open spec fn all_but_index_some(self) -> bool {
173        &&& forall|i: int| 0 <= i < self.idx ==> self.children[i] is Some
174        &&& forall|i: int| self.idx < i < NR_ENTRIES() ==> self.children[i] is Some
175        &&& self.children[self.idx as int] is None
176    }
177
178    pub open spec fn inc_index(self) -> Self {
179        Self {
180            idx: (self.idx + 1) as usize,
181            ..self
182        }
183    }
184
185    pub proof fn do_inc_index(tracked &mut self)
186        requires
187            old(self).idx + 1 < NR_ENTRIES(),
188        ensures
189            *self == old(self).inc_index(),
190    {
191        self.idx = (self.idx + 1) as usize;
192    }
193
194    /*
195    pub proof fn never_drop_preserves_children_not_locked(
196        self,
197        guard: PageTableGuard<'rcu, C>,
198        guards0: Guards<'rcu, C>,
199        guards1: Guards<'rcu, C>)
200        requires
201            self.inv(),
202            self.children_not_locked(guards0),
203            <PageTableGuard<'rcu, C> as Undroppable>::constructor_requires(guard,guards0),
204            <PageTableGuard<'rcu, C> as Undroppable>::constructor_ensures(guard, guards0, guards1),
205        ensures
206            self.children_not_locked(guards1),
207    {
208        admit();
209        /*
210        assert forall|i: int| 0 <= i < self.children.len() && self.children[i] is Some ==>
211            PageTableOwner::unlocked(self.children[i].unwrap(), self.path, guards1) by {
212            PageTableOwner::map_implies(self.children[i].unwrap(), self.path,
213                |owner: EntryOwner<C>, path: TreePath<CONST_NR_ENTRIES>|
214                    guards0.unlocked(owner.node.unwrap().meta_perm.addr()),
215                |owner: EntryOwner<C>, path: TreePath<CONST_NR_ENTRIES>|
216                    guards1.unlocked(owner.node.unwrap().meta_perm.addr()));
217        }
218        */
219    }
220    */
221
222    pub open spec fn node_locked(self, guards: Guards<'rcu, C>) -> bool {
223        guards.lock_held(self.guard_perm.value().inner.inner@.ptr.addr())
224    }
225
226    pub open spec fn relate_region(self, regions: MetaRegionOwners) -> bool {
227        &&& self.entry_own.node.unwrap().relate_region(regions)
228        &&& forall|i: int| #![auto] 0 <= i < self.children.len() && self.children[i] is Some ==>
229            PageTableOwner(self.children[i].unwrap()).relate_region_rec(self.entry_own.node.unwrap().path.push_tail(i as usize), regions)
230    }
231}
232
233#[rustc_has_incoherent_inherent_impls]
234pub tracked struct CursorOwner<'rcu, C: PageTableConfig> {
235    pub level: PagingLevel,
236    pub continuations: Map<int, CursorContinuation<'rcu, C>>,
237    pub va: AbstractVaddr,
238    pub guard_level: PagingLevel,
239    pub prefix: AbstractVaddr,
240    pub popped_too_high: bool,
241}
242
243impl<'rcu, C: PageTableConfig> Inv for CursorOwner<'rcu, C> {
244    open spec fn inv(self) -> bool {
245        &&& self.va.inv()
246        &&& 1 <= self.level <= NR_LEVELS()
247        &&& self.guard_level <= NR_LEVELS()
248        // The cursor is allowed to pop out of the guard range only when it reaches the end of the locked range.
249        // This allows the user to reason solely about the current vaddr and not keep track of the cursor's level.
250        &&& self.popped_too_high ==> self.level >= self.guard_level && self.in_locked_range()
251        &&& !self.popped_too_high ==> self.level < self.guard_level || self.above_locked_range()
252        &&& self.continuations[self.level - 1].all_some()
253        &&& forall|i: int| self.level <= i < NR_LEVELS() ==> {
254            (#[trigger] self.continuations[i]).all_but_index_some()
255        }
256        &&& self.prefix.inv()
257        &&& forall|i: int| i < self.guard_level ==> self.prefix.index[i] == 0
258        &&& self.level <= 4 ==> {
259            &&& self.continuations.contains_key(3)
260            &&& self.continuations[3].inv()
261            &&& self.continuations[3].level() == 4
262            // Obviously there is no level 5 pt, but that would be the level of the parent of the root pt.
263            &&& self.continuations[3].entry_own.parent_level == 5
264            &&& self.va.index[3] == self.continuations[3].idx
265        }
266        &&& self.level <= 3 ==> {
267            &&& self.continuations.contains_key(2)
268            &&& self.continuations[2].inv()
269            &&& self.continuations[2].level() == 3
270            &&& self.continuations[2].entry_own.parent_level == 4
271            &&& self.va.index[2] == self.continuations[2].idx
272            &&& self.continuations[2].guard_perm.value().inner.inner@.ptr.addr() !=
273                self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
274        }
275        &&& self.level <= 2 ==> {
276            &&& self.continuations.contains_key(1)
277            &&& self.continuations[1].inv()
278            &&& self.continuations[1].level() == 2
279            &&& self.continuations[1].entry_own.parent_level == 3
280            &&& self.va.index[1] == self.continuations[1].idx
281            &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr() !=
282                self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
283            &&& self.continuations[1].guard_perm.value().inner.inner@.ptr.addr() !=
284                self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
285        }
286        &&& self.level == 1 ==> {
287            &&& self.continuations.contains_key(0)
288            &&& self.continuations[0].inv()
289            &&& self.continuations[0].level() == 1
290            &&& self.continuations[0].entry_own.parent_level == 2
291            &&& self.va.index[0] == self.continuations[0].idx
292            &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
293                self.continuations[1].guard_perm.value().inner.inner@.ptr.addr()
294            &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
295                self.continuations[2].guard_perm.value().inner.inner@.ptr.addr()
296            &&& self.continuations[0].guard_perm.value().inner.inner@.ptr.addr() !=
297                self.continuations[3].guard_perm.value().inner.inner@.ptr.addr()
298        }
299    }
300}
301
302impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C> {
303
304    pub open spec fn node_unlocked(guards: Guards<'rcu, C>) ->
305        (spec_fn(EntryOwner<C>, TreePath<CONST_NR_ENTRIES>) -> bool) {
306        |owner: EntryOwner<C>, path: TreePath<CONST_NR_ENTRIES>|
307            owner.is_node() ==>
308            guards.unlocked(owner.node.unwrap().meta_perm.addr())
309    }
310
311    pub open spec fn node_unlocked_except(guards: Guards<'rcu, C>, addr: usize) ->
312        (spec_fn(EntryOwner<C>, TreePath<CONST_NR_ENTRIES>) -> bool) {
313        |owner: EntryOwner<C>, path: TreePath<CONST_NR_ENTRIES>|
314            owner.is_node() ==>
315            owner.node.unwrap().meta_perm.addr() != addr ==>
316            guards.unlocked(owner.node.unwrap().meta_perm.addr())
317    }
318
319    pub open spec fn children_not_locked(self, guards: Guards<'rcu, C>) -> bool {
320        forall|i: int|
321            #![trigger self.continuations[i]]
322            self.level - 1 <= i < NR_LEVELS() ==> {
323                self.continuations[i].map_children(Self::node_unlocked(guards))
324            }
325    }
326
327    pub open spec fn only_current_locked(self, guards: Guards<'rcu, C>) -> bool {
328        forall|i: int|
329            #![trigger self.continuations[i]]
330            self.level - 1 <= i < NR_LEVELS() ==>
331            self.continuations[i].map_children(Self::node_unlocked_except(guards, self.cur_entry_owner().node.unwrap().meta_perm.addr()))
332    }
333
334    pub proof fn never_drop_preserves_nodes_locked(
335        self,
336        guard: PageTableGuard<'rcu, C>,
337        guards0: Guards<'rcu, C>,
338        guards1: Guards<'rcu, C>)
339        requires
340            self.inv(),
341            self.nodes_locked(guards0),
342            <PageTableGuard<'rcu, C> as Undroppable>::constructor_requires(guard, guards0),
343            <PageTableGuard<'rcu, C> as Undroppable>::constructor_ensures(guard, guards0, guards1),
344            forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS() ==>
345                self.continuations[i].guard_perm.value().inner.inner@.ptr.addr() != 
346                    guard.inner.inner@.ptr.addr(),
347        ensures
348            self.nodes_locked(guards1),
349    { admit() }
350
351    pub proof fn never_drop_preserves_children_not_locked(
352        self,
353        guard: PageTableGuard<'rcu, C>,
354        guards0: Guards<'rcu, C>,
355        guards1: Guards<'rcu, C>)
356    requires
357        self.inv(),
358        self.children_not_locked(guards0),
359        <PageTableGuard<'rcu, C> as Undroppable>::constructor_requires(guard, guards0),
360        <PageTableGuard<'rcu, C> as Undroppable>::constructor_ensures(guard, guards0, guards1),
361    ensures
362        self.children_not_locked(guards1),
363    {
364        admit()
365    }
366
367    pub proof fn map_children_implies(
368        self,
369        f: spec_fn(EntryOwner<C>, TreePath<CONST_NR_ENTRIES>) -> bool,
370        g: spec_fn(EntryOwner<C>, TreePath<CONST_NR_ENTRIES>) -> bool,
371    )
372    requires
373        self.inv(),
374        OwnerSubtree::implies(f, g),
375        forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS() ==>
376            self.continuations[i].map_children(f),
377    ensures
378        forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS() ==>
379            self.continuations[i].map_children(g),
380    {
381        assert forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS() implies self.continuations[i].map_children(g) by {
382            let cont = self.continuations[i];
383            assert forall|j: int| #![auto] 0 <= j < cont.children.len() && cont.children[j] is Some
384                implies cont.children[j].unwrap().tree_predicate_map(cont.path.push_tail(j as usize), g) by {
385                OwnerSubtree::map_implies(cont.children[j].unwrap(), cont.path.push_tail(j as usize), f, g);
386            }
387        }
388    }
389
390    pub open spec fn nodes_locked(self, guards: Guards<'rcu, C>) -> bool {
391        forall|i: int|
392            #![trigger self.continuations[i]]
393            self.level - 1 <= i < NR_LEVELS() ==> {
394                self.continuations[i].node_locked(guards)
395            }
396    }
397
398    pub open spec fn index(self) -> usize {
399        self.continuations[self.level - 1].idx
400    }
401
402    pub open spec fn inc_index(self) -> Self {
403        Self {
404            continuations: self.continuations.insert(self.level - 1, self.continuations[self.level - 1].inc_index()),
405            va: AbstractVaddr {
406                offset: self.va.offset,
407                index: self.va.index.insert(self.level - 1, self.continuations[self.level - 1].inc_index().idx as int)
408            },
409            popped_too_high: false,
410            ..self
411        }
412    }
413
414    pub proof fn do_inc_index(tracked &mut self)
415        requires
416            old(self).inv(),
417            old(self).continuations[old(self).level - 1].idx + 1 < NR_ENTRIES(),
418            old(self).in_locked_range(),
419        ensures
420            self.inv(),
421            *self == old(self).inc_index(),
422    {
423        self.popped_too_high = false;
424        let tracked mut cont = self.continuations.tracked_remove(self.level - 1);
425        if self.level >= self.guard_level {
426            let ghost size = self.locked_range().end - self.locked_range().start;
427            let ghost new_va = AbstractVaddr {
428                offset: self.va.offset,
429                index: self.va.index.insert(self.level - 1, cont.idx + 1)
430            };
431            assert(new_va.to_vaddr() == self.va.to_vaddr() + size) by { admit() };
432        }
433
434        cont.do_inc_index();
435        self.va.index.tracked_insert(self.level - 1, cont.idx as int);
436        self.continuations.tracked_insert(self.level - 1, cont);
437        assert(self.continuations == old(self).continuations.insert(self.level - 1, cont));
438    }
439
440    pub proof fn inc_index_increases_va(self)
441        requires
442            self.inv()
443        ensures
444            self.inc_index().va.to_vaddr() > self.va.to_vaddr(),
445    { admit() }
446
447    pub open spec fn cur_va(self) -> Vaddr {
448        self.va.to_vaddr()
449    }
450
451    pub proof fn inv_continuation(self, i: int)
452        requires
453            self.inv(),
454            self.level - 1 <= i <= NR_LEVELS() - 1,
455        ensures
456            self.continuations.contains_key(i),
457            self.continuations[i].inv(),
458            self.continuations[i].children.len() == NR_ENTRIES(),
459    {
460        assert(self.continuations.contains_key(i));
461    }
462
463    pub open spec fn cur_entry_owner(self) -> EntryOwner<C> {
464        self.continuations[self.level - 1].children[self.index() as int].unwrap().value
465    }
466
467    pub open spec fn locked_range(self) -> Range<Vaddr> {
468        let start = self.prefix.align_down(self.guard_level as int).to_vaddr();
469        let end = self.prefix.align_up(self.guard_level as int).to_vaddr();
470        Range { start, end }
471    }
472
473    pub open spec fn in_locked_range(self) -> bool {
474        self.locked_range().start <= self.va.to_vaddr() < self.locked_range().end
475    }
476
477    pub open spec fn above_locked_range(self) -> bool {
478        self.va.to_vaddr() >= self.locked_range().end
479    }
480
481    pub proof fn prefix_in_locked_range(self)
482        requires
483            forall|i:int| i >= self.guard_level ==> self.va.index[i] == self.prefix.index[i],
484        ensures
485            self.in_locked_range(),
486    { admit() }
487
488    pub open spec fn relate_region(self, regions: MetaRegionOwners) -> bool
489    {
490        let f = |owner: EntryOwner<C>, path: TreePath<CONST_NR_ENTRIES>|
491            owner.is_node() ==> owner.node.unwrap().relate_region(regions);
492        forall|i: int| #![auto] self.level - 1 <= i < NR_LEVELS() ==> self.continuations[i].map_children(f)
493    }
494}
495
496pub tracked struct CursorView<C: PageTableConfig> {
497    pub cur_va: Vaddr,
498    pub mappings: Set<Mapping>,
499    pub phantom: PhantomData<C>,
500}
501
502impl<'rcu, C: PageTableConfig> View for CursorOwner<'rcu, C> {
503    type V = CursorView<C>;
504
505    open spec fn view(&self) -> Self::V {
506        CursorView {
507            cur_va: self.cur_va(),
508            mappings: self.into_pt_owner_rec().view().mappings,
509            phantom: PhantomData,
510        }
511    }
512}
513
514impl<C: PageTableConfig> Inv for CursorView<C> {
515    open spec fn inv(self) -> bool {
516        &&& self.cur_va < MAX_USERSPACE_VADDR()
517        &&& forall|m: Mapping| #![auto] self.mappings.contains(m) ==> m.inv()
518        &&& forall|m: Mapping, n: Mapping| #![auto]
519            self.mappings.contains(m) ==>
520            self.mappings.contains(n) ==>
521            m != n ==>
522            m.va_range.end <= n.va_range.start || n.va_range.end <= m.va_range.start
523    }
524}
525
526impl<'rcu, C: PageTableConfig> InvView for CursorOwner<'rcu, C> {
527    proof fn view_preserves_inv(self) {
528        assert(self@.inv()) by { admit() };
529    }
530}
531
532impl<'rcu, C: PageTableConfig, A: InAtomicMode> Inv for Cursor<'rcu, C, A> {
533    open spec fn inv(self) -> bool {
534        &&& 1 <= self.level <= NR_LEVELS()
535        &&& self.level <= self.guard_level <= NR_LEVELS()
536//        &&& forall|i: int| 0 <= i < self.guard_level - self.level ==> self.path[i] is Some
537    }
538}
539
540impl<'rcu, C: PageTableConfig, A: InAtomicMode> OwnerOf for Cursor<'rcu, C, A> {
541    type Owner = CursorOwner<'rcu, C>;
542
543    open spec fn wf(self, owner: Self::Owner) -> bool {
544        &&& owner.va.reflect(self.va)
545        &&& self.level == owner.level
546        &&& owner.guard_level == self.guard_level
547//        &&& owner.index() == self.va % page_size(self.level)
548        &&& self.level <= 4 ==> {
549            &&& self.path[3] is Some
550            &&& owner.continuations.contains_key(3)
551            &&& owner.continuations[3].guard_perm.pptr() == self.path[3].unwrap()
552        }
553        &&& self.level <= 3 ==> {
554            &&& self.path[2] is Some
555            &&& owner.continuations.contains_key(2)
556            &&& owner.continuations[2].guard_perm.pptr() == self.path[2].unwrap()
557        }
558        &&& self.level <= 2 ==> {
559            &&& self.path[1] is Some
560            &&& owner.continuations.contains_key(1)
561            &&& owner.continuations[1].guard_perm.pptr() == self.path[1].unwrap()
562        }
563        &&& self.level == 1 ==> {
564            &&& self.path[0] is Some
565            &&& owner.continuations.contains_key(0)
566            &&& owner.continuations[0].guard_perm.pptr() == self.path[0].unwrap()
567        }
568        &&& self.barrier_va.start == owner.locked_range().start
569        &&& self.barrier_va.end == owner.locked_range().end
570    }
571}
572
573impl<'rcu, C: PageTableConfig, A: InAtomicMode> ModelOf for Cursor<'rcu, C, A> {
574
575}
576
577} // verus!