Skip to main content

ostd/mm/frame/
linked_list.rs

1// SPDX-License-Identifier: MPL-2.0
2//! Enabling linked lists of frames without heap allocation.
3//!
4//! This module leverages the customizability of the metadata system (see
5//! [super::meta]) to allow any type of frame to be used in a linked list.
6use vstd::prelude::*;
7
8use vstd::atomic::PermissionU64;
9use vstd::seq_lib::*;
10use vstd::simple_pptr::*;
11
12use vstd_extra::cast_ptr::*;
13use vstd_extra::drop_tracking::{Drop, TrackDrop};
14use vstd_extra::ownership::*;
15use vstd_extra::trans_macros::*;
16
17use crate::mm::frame::meta::mapping::frame_to_meta;
18use crate::mm::frame::meta::REF_COUNT_UNUSED;
19use crate::mm::frame::UniqueFrame;
20use crate::mm::{Paddr, PagingLevel, Vaddr};
21use crate::specs::arch::mm::{MAX_NR_PAGES, MAX_PADDR, PAGE_SIZE};
22use crate::specs::mm::frame::linked_list::linked_list_owners::*;
23use crate::specs::mm::frame::meta_owners::{MetaSlotOwner, Metadata};
24use crate::specs::mm::frame::meta_region_owners::MetaRegionOwners;
25use crate::specs::mm::frame::unique::UniqueFrameOwner;
26
27use core::borrow::BorrowMut;
28use core::{
29    ops::{Deref, DerefMut},
30    ptr::NonNull,
31    sync::atomic::{AtomicU64, Ordering},
32};
33
34use crate::specs::*;
35
36use crate::mm::frame::meta::mapping::{frame_to_index, max_meta_slots, meta_addr, meta_to_frame, META_SLOT_SIZE};
37use crate::specs::arch::kspace::FRAME_METADATA_RANGE;
38use crate::mm::frame::meta::{get_slot, has_safe_slot, AnyFrameMeta, MetaSlot};
39
40verus! {
41
42/// A link in the linked list.
43pub struct Link<M: AnyFrameMeta + Repr<MetaSlotSmall>> {
44    pub next: Option<ReprPtr<MetaSlot, MetadataAsLink<M>>>,
45    pub prev: Option<ReprPtr<MetaSlot, MetadataAsLink<M>>>,
46    pub meta: M,
47}
48
49/// A linked list of frames.
50///
51/// Two key features that [`LinkedList`] is different from
52/// [`alloc::collections::LinkedList`] is that:
53///  1. It is intrusive, meaning that the links are part of the frame metadata.
54///     This allows the linked list to be used without heap allocation. But it
55///     disallows a frame to be in multiple linked lists at the same time.
56///  2. The linked list exclusively own the frames, meaning that it takes
57///     unique pointers [`UniqueFrame`]. And other bodies cannot
58///     [`from_in_use`] a frame that is inside a linked list.
59///  3. We also allow creating cursors at a specific frame, allowing $O(1)$
60///     removal without iterating through the list at a cost of some checks.
61/// # Verified Properties
62/// ## Verification Design
63/// The linked list is abstractly represented by a [`LinkedListOwner`]:
64/// ```rust
65/// tracked struct LinkedListOwner<M: AnyFrameMeta + Repr<MetaSlotStorage>> {
66///     pub list: Seq<LinkOwner>,
67///     pub perms: Map<int, vstd_extra::cast_ptr::PointsTo<MetaSlot, Metadata<Link<M>>>>,
68///     pub list_id: u64,
69/// }
70/// ```
71/// ## Invariant
72/// The linked list uniquely owns the raw frames that it contains, so they cannot be used by other
73/// data structures. The frame metadata field `in_list` is equal to `list_id` for all links in the list.
74/// ```rust
75///    pub open spec fn inv_at(self, i: int) -> bool {
76///        &&& self.perms.contains_key(i)
77///        &&& self.perms[i].addr() == self.list[i].paddr
78///        &&& self.perms[i].is_init()
79///        &&& self.perms[i].value().wf(self.list[i])
80///        &&& i == 0 <==> self.perms[i].mem_contents().value().prev is None
81///        &&& i == self.list.len() - 1 <==> self.perms[i].value().next is None
82///        &&& 0 < i ==> self.perms[i].value().prev is Some && self.perms[i].value().prev.unwrap()
83///            == self.perms[i - 1].pptr()
84///        &&& i < self.list.len() - 1 ==> self.perms[i].value().next is Some
85///            && self.perms[i].value().next.unwrap() == self.perms[i + 1].pptr()
86///        &&& self.list[i].inv()
87///        &&& self.list[i].in_list == self.list_id
88///    }
89///
90///    pub open spec fn inv(self) -> bool {
91///        forall|i: int| 0 <= i < self.list.len() ==> self.inv_at(i)
92///    }
93/// ```
94/// ## Safety
95/// A given linked list can only have one cursor at a time, so there are no data races.
96/// The `prev` and `next` fields of the metadata for each link always points to valid
97/// links in the list, so the structure is memory safe (will not read or write invalid memory).
98pub struct LinkedList<M: AnyFrameMeta + Repr<MetaSlotSmall>> {
99    pub front: Option<ReprPtr<MetaSlot, MetadataAsLink<M>>>,
100    pub back: Option<ReprPtr<MetaSlot, MetadataAsLink<M>>>,
101    /// The number of frames in the list.
102    pub size: usize,
103    /// A lazily initialized ID, used to check whether a frame is in the list.
104    /// 0 means uninitialized.
105    pub list_id: u64,
106}
107
108/// A cursor that can mutate the linked list links.
109///
110/// The cursor points to either a frame or the "ghost" non-element. It points
111/// to the "ghost" non-element when the cursor surpasses the back of the list.
112pub struct CursorMut<'a, M: AnyFrameMeta + Repr<MetaSlotSmall>> {
113    pub list: &'a mut LinkedList<M>,
114    pub current: Option<ReprPtr<MetaSlot, MetadataAsLink<M>>>,
115}
116
117impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> LinkedList<M> {
118    /// Creates a new linked list.
119    pub const fn new() -> Self {
120        Self { front: None, back: None, size: 0, list_id: 0 }
121    }
122}
123
124impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> Default for LinkedList<M> {
125    fn default() -> Self {
126        Self::new()
127    }
128}
129
130// SAFETY: Only the pointers are not `Send` and `Sync`. But our interfaces
131// enforces that only with `&mut` references can we access with the pointers.
132//unsafe impl<M> Send for LinkedList<M> where Link<M>: AnyFrameMeta {}
133//unsafe impl<M> Sync for LinkedList<M> where Link<M>: AnyFrameMeta {}
134#[verus_verify]
135impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> LinkedList<M> {
136    /// Gets the number of frames in the linked list.
137    #[verus_spec(s =>
138        with
139            Tracked(owner): Tracked<LinkedListOwner<M>>,
140        requires
141            self.wf(owner),
142            owner.inv(),
143        ensures
144            s == self.model(owner).list.len(),
145    )]
146    pub fn size(&self) -> usize
147    {
148        proof {
149            LinkedListOwner::<M>::view_preserves_len(owner.list);
150        }
151        self.size
152    }
153
154    /// Tells if the linked list is empty.
155    #[verus_spec(b =>
156        with
157            Tracked(owner): Tracked<LinkedListOwner<M>>,
158        requires
159            self.wf(owner),
160            owner.inv(),
161        ensures
162            b ==> self.size == 0 && self.front is None && self.back is None,
163            !b ==> self.size > 0 && self.front is Some && self.back is Some,
164    )]
165    pub fn is_empty(&self) -> bool
166    {
167        let is_empty = self.size == 0;
168        is_empty
169    }
170
171    /// Pushes a frame to the front of the linked list.
172    /// # Verified Properties
173    /// ## Preconditions
174    /// The list must be well-formed, with the pointers to its links' metadata slots
175    /// matching the tracked permission objects. The new frame must be active, so that it is
176    /// valid to call `into_raw` on it inside of `insert_before`.
177    /// ## Postconditions
178    /// The new frame is inserted at the front of the list, and the cursor is moved to the new frame.
179    /// The list invariants are preserved.
180    /// ## Safety
181    /// See [`insert_before`] for the safety guarantees.
182    #[verus_spec(
183        with
184            Tracked(regions): Tracked<&mut MetaRegionOwners>,
185            Tracked(owner): Tracked<&mut LinkedListOwner<M>>,
186            Tracked(frame_own): Tracked<&mut UniqueFrameOwner<Link<M>>>,
187        requires
188            old(self).wf(*old(owner)),
189            old(owner).inv(),
190            old(owner).list_id != 0,
191            old(frame_own).inv(),
192            old(frame_own).global_inv(*old(regions)),
193            frame.wf(*old(frame_own)),
194            old(frame_own).frame_link_inv(),
195            old(owner).list.len() < usize::MAX,
196            old(regions).inv(),
197            old(regions).slots.contains_key(old(frame_own).slot_index),
198            old(regions).slot_owners[old(frame_own).slot_index].inner_perms.in_list.is_for(
199                old(regions).slots[old(frame_own).slot_index].value().in_list,
200            ),
201        ensures
202            final(owner).inv(),
203            final(owner).list == old(owner).list.insert(0, final(frame_own).meta_own),
204            final(owner).list_id == old(owner).list_id,
205            final(frame_own).meta_own.paddr == old(frame_own).meta_own.paddr,
206            final(frame_own).meta_own.in_list == old(owner).list_id,
207    )]
208    pub fn push_front(&mut self, frame: UniqueFrame<Link<M>>) {
209        let current = self.front;
210        let tracked mut cursor_own = CursorOwner::front_owner(*owner);
211        let mut cursor = CursorMut { list: self, current };
212
213        #[verus_spec(with Tracked(regions), Tracked(&mut cursor_own), Tracked(frame_own))]
214        cursor.insert_before(frame);
215
216        proof {
217            *owner = cursor_own.list_own;
218        }
219    }
220
221    /// Pops a frame from the front of the linked list.
222    /// # Verified Properties
223    /// ## Preconditions
224    /// The list must be well-formed, with the pointers to its links' metadata slots
225    /// matching the tracked permission objects. The list must be non-empty, so that the
226    /// current frame is valid.
227    /// ## Postconditions
228    /// The front frame is removed from the list, and the cursor is moved to the next frame.
229    /// The list invariants are preserved.
230    /// ## Safety
231    /// See [`take_current`] for the safety guarantees.
232    #[verus_spec(r =>
233        with
234            Tracked(regions): Tracked<&mut MetaRegionOwners>,
235            Tracked(owner): Tracked<LinkedListOwner<M>>,
236            Tracked(frame_own): Tracked<UniqueFrameOwner<Link<M>>>,
237        requires
238            old(regions).inv(),
239            old(self).wf(owner),
240            owner.inv(),
241            old(regions).slots.contains_key(frame_to_index(owner.list[0].paddr)),
242        ensures
243            owner.list.len() == 0 ==> r.is_none(),
244            r.is_some() ==> r.unwrap().0.model(r.unwrap().1@).meta == owner.list[0]@,
245            r.is_some() ==> r.unwrap().1@.frame_link_inv(),
246    )]
247    pub fn pop_front(&mut self) -> Option<(UniqueFrame<Link<M>>, Tracked<UniqueFrameOwner<Link<M>>>)> {
248        assert(owner.list.len() > 0 ==> owner.inv_at(0));
249
250        let tracked mut cursor_own = CursorOwner::front_owner(owner);
251        let current = self.front;
252        let mut cursor = CursorMut { list: self, current };
253
254        #[verus_spec(with Tracked(regions), Tracked(&mut cursor_own))]
255        cursor.take_current()
256    }
257
258    /// Pushes a frame to the back of the linked list.
259    /// # Verified Properties
260    /// ## Preconditions
261    /// The list must be well-formed, with the pointers to its links' metadata slots
262    /// matching the tracked permission objects. The new frame must be active, so that it is
263    /// valid to call `into_raw` on it inside of `insert_before`.
264    /// ## Postconditions
265    /// - The new frame is inserted at the back of the list, and the cursor is moved to the new frame.
266    /// - The list invariants are preserved.
267    /// ## Safety
268    /// See [`insert_before`] for the safety guarantees.
269    #[verus_spec(
270        with
271            Tracked(regions): Tracked<&mut MetaRegionOwners>,
272            Tracked(owner): Tracked<&mut LinkedListOwner<M>>,
273            Tracked(frame_own): Tracked<&mut UniqueFrameOwner<Link<M>>>,
274        requires
275            old(self).wf(*old(owner)),
276            old(owner).inv(),
277            old(owner).list_id != 0,
278            old(frame_own).inv(),
279            old(frame_own).global_inv(*old(regions)),
280            frame.wf(*old(frame_own)),
281            old(frame_own).frame_link_inv(),
282            old(owner).list.len() < usize::MAX,
283            old(regions).inv(),
284            old(regions).slots.contains_key(old(frame_own).slot_index),
285            old(regions).slot_owners[old(frame_own).slot_index].inner_perms.in_list.is_for(
286                old(regions).slots[old(frame_own).slot_index].value().in_list,
287            ),
288        ensures
289            final(owner).inv(),
290            old(owner).list.len() > 0 ==> final(owner).list == old(owner).list.insert(
291                old(owner).list.len() as int - 1, final(frame_own).meta_own),
292            old(owner).list.len() == 0 ==> final(owner).list == old(owner).list.insert(
293                0, final(frame_own).meta_own),
294            final(owner).list_id == old(owner).list_id,
295            final(frame_own).meta_own.paddr == old(frame_own).meta_own.paddr,
296            final(frame_own).meta_own.in_list == old(owner).list_id,
297    )]
298    pub fn push_back(&mut self, frame: UniqueFrame<Link<M>>) {
299        let current = self.back;
300        let tracked mut cursor_own = CursorOwner::back_owner(*owner);
301        let mut cursor = CursorMut { list: self, current };
302
303        #[verus_spec(with Tracked(regions), Tracked(&mut cursor_own), Tracked(frame_own))]
304        cursor.insert_before(frame);
305
306        proof {
307            *owner = cursor_own.list_own;
308        }
309    }
310
311    /// Pops a frame from the back of the linked list.
312    /// # Verified Properties
313    /// ## Preconditions
314    /// - The list must be well-formed, with the pointers to its links' metadata slots
315    /// matching the tracked permission objects.
316    /// - The list must be non-empty, so that the
317    /// current frame is valid.
318    /// ## Postconditions
319    /// - The back frame is removed from the list, and the cursor is moved to the "ghost" non-element.
320    /// - The list invariants are preserved.
321    /// ## Safety
322    /// See [`take_current`] for the safety guarantees.
323    #[verus_spec(r =>
324        with
325            Tracked(regions): Tracked<&mut MetaRegionOwners>,
326            Tracked(owner): Tracked<LinkedListOwner<M>>,
327            Tracked(frame_own): Tracked<UniqueFrameOwner<Link<M>>>,
328        requires
329            old(regions).inv(),
330            old(self).wf(owner),
331            owner.inv(),
332            old(regions).slots.contains_key(frame_to_index(owner.list[owner.list.len() - 1].paddr)),
333        ensures
334            owner.list.len() == 0 ==> r.is_none(),
335            r.is_some() ==> r.unwrap().0.model(r.unwrap().1@).meta == owner.list[owner.list.len() - 1]@,
336            r.is_some() ==> r.unwrap().1@.frame_link_inv(),
337    )]
338    pub fn pop_back(&mut self) -> Option<(UniqueFrame<Link<M>>, Tracked<UniqueFrameOwner<Link<M>>>)> {
339        assert(owner.list.len() > 0 ==> owner.inv_at(owner.list.len() - 1));
340
341        let current = self.back;
342        let tracked mut cursor_own = CursorOwner::back_owner(owner);
343        let mut cursor = CursorMut { list: self, current };
344
345        #[verus_spec(with Tracked(regions), Tracked(&mut cursor_own))]
346        cursor.take_current()
347    }
348
349    /// Tells if a frame is in the list.
350    /// # Verified Properties
351    /// ## Preconditions
352    /// - The list must be well-formed, with the pointers to its links' metadata slots
353    /// matching the tracked permission objects.
354    /// - The frame must be a valid, active frame.
355    /// ## Postconditions
356    /// The function returns `true` if the frame is in the list, `false` otherwise.
357    /// ## Safety
358    /// - `lazy_get_id` uses atomic memory accesses, so there are no data races.
359    /// - We assume that the ID allocator has an available ID if the list previously didn't have one,
360    /// but the consequence if that is not the case is a failsafe panic.
361    /// - Everything else conforms to the safe interface.
362    #[verus_spec(r =>
363        with
364            Tracked(regions): Tracked<&mut MetaRegionOwners>,
365            Tracked(slot_own): Tracked<&MetaSlotOwner>,
366            Tracked(owner): Tracked<&mut LinkedListOwner<M>>,
367        requires
368            slot_own.inv(),
369            old(regions).inv(),
370            old(regions).slots.contains_key(frame_to_index(frame)),
371            old(regions).slots[frame_to_index(frame)].is_init(),
372            old(regions).slot_owners.contains_key(frame_to_index(frame)),
373            old(regions).slot_owners[frame_to_index(frame)].inner_perms.in_list.is_for(
374                old(regions).slots[frame_to_index(frame)].mem_contents().value().in_list,
375            ),
376        ensures
377            old(owner).list_id != 0 ==> *final(owner) == *old(owner),
378    )]
379    pub fn contains(&mut self, frame: Paddr) -> bool {
380        let Ok(slot_ptr) = get_slot(frame) else {
381            return false;
382        };
383
384        let tracked mut slot_perm = regions.slots.tracked_remove(frame_to_index(frame));
385        let tracked mut slot_own = regions.slot_owners.tracked_remove(frame_to_index(frame));
386
387        let slot = slot_ptr.take(Tracked(&mut slot_perm));
388
389        let tracked mut inner_perms = slot_own.take_inner_perms();
390
391        let in_list = slot.in_list.load(Tracked(&mut inner_perms.in_list));
392        slot_ptr.put(Tracked(&mut slot_perm), slot);
393
394        proof {
395            slot_own.sync_inner(&inner_perms);
396            regions.slot_owners.tracked_insert(frame_to_index(frame), slot_own);
397            regions.slots.tracked_insert(frame_to_index(frame), slot_perm);
398        }
399
400        in_list == #[verus_spec(with Tracked(owner))]
401        self.lazy_get_id()
402    }
403
404    /// Gets a cursor at the specified frame if the frame is in the list.
405    ///
406    /// This method fails if the frame is not in the list.
407    /// # Verified Properties
408    /// ## Preconditions
409    /// - The list must be well-formed, with the pointers to its links' metadata slots
410    /// matching the tracked permission objects.
411    /// - The frame should be raw (because it is owned by the list)
412    /// ## Postconditions
413    /// - This functions post-conditions are incomplete due to refactoring of the permission model.
414    /// When complete, it will guarantee that the cursor is well-formed and points to the matching
415    /// element in the list.
416    /// ## Safety
417    /// - `lazy_get_id` uses atomic memory accesses, so there are no data races.
418    /// - We assume that the ID allocator has an available ID if the list previously didn't have one,
419    /// but the consequence if that is not the case is a failsafe panic.
420    /// - Everything else conforms to the safe interface.
421    #[verus_spec(r =>
422        with
423            Tracked(regions): Tracked<&mut MetaRegionOwners>,
424            Tracked(owner): Tracked<LinkedListOwner<M>>,
425            -> cursor_owner: Tracked<Option<CursorOwner<M>>>,
426        requires
427            old(regions).inv(),
428        ensures
429//            has_safe_slot(frame) && owner.list_id != 0 ==> r is Some,
430            !has_safe_slot(frame) ==> r is None,
431    )]
432    #[verifier::external_body]
433    pub fn cursor_mut_at(&mut self, frame: Paddr) -> Option<CursorMut<'_, M>>
434    {
435        let tracked mut slot_own = regions.slot_owners.tracked_remove(frame_to_index(frame));
436        let tracked mut inner_perms = slot_own.take_inner_perms();
437
438        if let Ok(slot_ptr) = get_slot(frame) {
439            let slot = slot_ptr.borrow(Tracked(&regions.slots[frame_to_index(frame)]));
440
441            let in_list = slot.in_list.load(Tracked(&mut inner_perms.in_list));
442
443            let contains = in_list == #[verus_spec(with Tracked(&owner))]
444            self.lazy_get_id();
445
446            #[verus_spec(with Tracked(&regions.slots[frame_to_index(frame)]))]
447            let meta_ptr = slot.as_meta_ptr::<Link<M>>();
448
449            if contains {
450                proof {
451                    slot_own.sync_inner(&inner_perms);
452                    regions.slot_owners.tracked_insert(frame_to_index(frame), slot_own);
453                }
454
455                let ghost link = owner.list.filter(|link: LinkOwner| link.paddr == frame).first();
456                let ghost index = owner.list.index_of(link);
457                let tracked cursor_owner = CursorOwner::cursor_mut_at_owner(owner, index);
458
459                proof_with!(|= Tracked(Some(cursor_owner)));
460                Some(CursorMut { list: self, current: Some(MetadataAsLink::cast_from_metadata(meta_ptr)) })
461            } else {
462                proof {
463                    slot_own.sync_inner(&inner_perms);
464                    regions.slot_owners.tracked_insert(frame_to_index(frame), slot_own);
465                }
466
467                proof_with!(|= Tracked(None));
468                None
469            }
470        } else {
471            assert(!has_safe_slot(frame));
472            proof_with!(|= Tracked(None));
473            None
474        }
475    }
476
477    /// Gets a cursor at the front that can mutate the linked list links.
478    ///
479    /// If the list is empty, the cursor points to the "ghost" non-element.
480    /// # Verified Properties
481    /// ## Preconditions
482    /// - The list must be well-formed, with the pointers to its links' metadata slots
483    /// matching the tracked permission objects.
484    /// ## Postconditions
485    /// - The cursor is well-formed, with the pointers to its links' metadata slots
486    /// matching the tracked permission objects. The list invariants are preserved.
487    /// - See [`CursorOwner::front_owner_spec`] for the precise specification.
488    /// ## Safety
489    /// - This function only uses the list permission, so there are no illegal memory accesses.
490    /// - No data races are possible.
491    #[verus_spec(r =>
492        with
493            Tracked(owner): Tracked<LinkedListOwner<M>>,
494        requires
495            old(self).wf(owner),
496            owner.inv(),
497        ensures
498            r.0.wf(r.1@),
499            r.1@.inv(),
500            r.1@ == CursorOwner::front_owner_spec(owner),
501    )]
502    pub fn cursor_front_mut(&mut self) -> (CursorMut<'_, M>, Tracked<CursorOwner<M>>) {
503        let current = self.front;
504
505        (CursorMut { list: self, current }, Tracked(CursorOwner::front_owner(owner)))
506    }
507
508    /// Gets a cursor at the back that can mutate the linked list links.
509    ///
510    /// If the list is empty, the cursor points to the "ghost" non-element.
511    /// # Verified Properties
512    /// ## Preconditions
513    /// - The list must be well-formed, with the pointers to its links' metadata slots
514    /// matching the tracked permission objects.
515    /// ## Postconditions
516    /// - The cursor is well-formed, with the pointers to its links' metadata slots
517    /// matching the tracked permission objects. The list invariants are preserved.
518    /// See [`CursorOwner::back_owner_spec`] for the precise specification.
519    /// ## Safety
520    /// - This function only uses the list permission, so there are no illegal memory accesses.
521    /// - No data races are possible.
522    #[verus_spec(
523        with
524            Tracked(owner): Tracked<LinkedListOwner<M>>,
525    )]
526    pub fn cursor_back_mut(&mut self) -> (res: (CursorMut<'_, M>, Tracked<CursorOwner<M>>))
527        requires
528            old(self).wf(owner),
529            owner.inv(),
530        ensures
531            res.0.wf(res.1@),
532            res.1@.inv(),
533            res.1@ == CursorOwner::back_owner_spec(owner),
534    {
535        let current = self.back;
536
537        (CursorMut { list: self, current }, Tracked(CursorOwner::back_owner(owner)))
538    }
539
540    /// Gets a cursor at the "ghost" non-element that can mutate the linked list links.
541    #[verus_spec(
542        with Tracked(owner): Tracked<&mut LinkedListOwner<M>>
543    )]
544    fn cursor_at_ghost_mut(&mut self) -> CursorMut<'_, M> {
545        CursorMut { list: self, current: None }
546    }
547
548    /// # Verification Assumption
549    /// We assume that there is an available ID for `lazy_get_id` to return.
550    /// This is safe because it will panic if the ID allocator is exhausted.
551    #[verifier::external_body]
552    #[verus_spec(
553        with Tracked(owner): Tracked<& LinkedListOwner<M>>
554    )]
555    fn lazy_get_id(&mut self) -> (id: u64)
556        ensures
557            owner.list_id != 0 ==> id == owner.list_id,
558    {
559        unimplemented!()/*        // FIXME: Self-incrementing IDs may overflow, while `core::pin::Pin`
560        // is not compatible with locks. Think about a better solution.
561        static LIST_ID_ALLOCATOR: AtomicU64 = AtomicU64::new(1);
562        const MAX_LIST_ID: u64 = i64::MAX as u64;
563
564        if self.list_id == 0 {
565            let id = LIST_ID_ALLOCATOR.fetch_add(1, Ordering::Relaxed);
566            if id >= MAX_LIST_ID {
567//                log::error!("The frame list ID allocator has exhausted.");
568//                abort();
569                unimplemented!()
570            }
571            self.list_id = id;
572            id
573        } else {
574            self.list_id
575        }*/
576
577    }
578}
579
580impl<'a, M: AnyFrameMeta + Repr<MetaSlotSmall>> CursorMut<'a, M> {
581    /// Moves the cursor to the next frame towards the back.
582    ///
583    /// If the cursor is pointing to the "ghost" non-element then this will
584    /// move it to the first element of the [`LinkedList`]. If it is pointing
585    /// to the last element of the LinkedList then this will move it to the
586    /// "ghost" non-element.
587    #[verus_spec(
588        with Tracked(owner): Tracked<CursorOwner<M>>
589    )]
590    pub fn move_next(&mut self)
591        requires
592            owner.inv(),
593            old(self).wf(owner),
594        ensures
595            final(self).model(owner.move_next_owner_spec()) == old(self).model(owner).move_next_spec(),
596            owner.move_next_owner_spec().inv(),
597            final(self).wf(owner.move_next_owner_spec()),
598    {
599        let ghost old_self = *self;
600
601        proof {
602            if self.current is Some {
603                assert(owner.list_own.inv_at(owner.index));
604            }
605            if owner.index < owner.length() - 1 {
606                assert(owner.list_own.inv_at(owner.index + 1));
607                owner.list_own.perms[owner.index + 1].pptr_implies_addr();
608            }
609        }
610
611        self.current = match self.current {
612            // SAFETY: The cursor is pointing to a valid element.
613            Some(current) => {
614                let current_md = MetadataAsLink::cast_to_metadata(current);
615                borrow_field!(current_md => metadata.next, owner.list_own.perms.tracked_borrow(owner.index))
616            },
617            None => self.list.front,
618        };
619
620        proof {
621            LinkedListOwner::<M>::view_preserves_len(owner.list_own.list);
622            assert(self.model(owner.move_next_owner_spec()).fore == old_self.model(owner).move_next_spec().fore);
623            assert(self.model(owner.move_next_owner_spec()).rear == old_self.model(owner).move_next_spec().rear);
624        }
625    }
626
627    /// Moves the cursor to the previous frame towards the front.
628    ///
629    /// If the cursor is pointing to the "ghost" non-element then this will
630    /// move it to the last element of the [`LinkedList`]. If it is pointing
631    /// to the first element of the LinkedList then this will move it to the
632    /// "ghost" non-element.
633    #[verus_spec(
634        with Tracked(owner): Tracked<CursorOwner<M>>
635    )]
636    pub fn move_prev(&mut self)
637        requires
638            owner.inv(),
639            old(self).wf(owner),
640        ensures
641            final(self).model(owner.move_prev_owner_spec()) == old(self).model(owner).move_prev_spec(),
642            owner.move_prev_owner_spec().inv(),
643            final(self).wf(owner.move_prev_owner_spec()),
644    {
645        let ghost old_self = *self;
646
647        proof {
648            if self.current is Some {
649                assert(owner.list_own.inv_at(owner.index));
650            }
651            if 0 < owner.index {
652                assert(owner.list_own.inv_at(owner.index - 1));
653                owner.list_own.perms[owner.index - 1].pptr_implies_addr();
654            }
655        }
656
657        self.current = match self.current {
658            // SAFETY: The cursor is pointing to a valid element.
659            Some(current) => {
660                let current_md = MetadataAsLink::cast_to_metadata(current);
661                borrow_field!(current_md => metadata.prev, owner.list_own.perms.tracked_borrow(owner.index))
662            },
663            None => self.list.back,
664        };
665
666        proof {
667            LinkedListOwner::<M>::view_preserves_len(owner.list_own.list);
668
669            if owner@.list_model.list.len() > 0 {
670                if owner@.fore.len() > 0 {
671                    assert(self.model(owner.move_prev_owner_spec()).fore == old_self.model(
672                        owner,
673                    ).move_prev_spec().fore);
674                    assert(self.model(owner.move_prev_owner_spec()).rear == old_self.model(
675                        owner,
676                    ).move_prev_spec().rear);
677                    if owner@.rear.len() > 0 {
678                        assert(owner.list_own.inv_at(owner.index));
679                    }
680                } else {
681                    assert(owner.list_own.inv_at(owner.index));
682                    assert(self.model(owner.move_prev_owner_spec()).rear == old_self.model(
683                        owner,
684                    ).move_prev_spec().rear);
685                    assert(owner@.rear == owner@.list_model.list);
686                }
687            }
688        }
689    }
690
691    /// Gets the mutable reference to the current frame's metadata.
692    ///
693    /// # Verified Properties
694    /// ## Preconditions
695    /// The cursor must be well-formed with respect to the tracked `CursorOwner`.
696    /// ## Postconditions
697    /// If the cursor is on an element, returns `Some(&mut meta)` borrowing the
698    /// current link's metadata. The cursor state and owner are otherwise
699    /// unchanged.
700    /// ## Safety
701    /// The `&mut self` guarantees exclusive access to the cursor; the tracked
702    /// `CursorOwner` guarantees the perm for the current link is live. Together
703    /// they justify the mutable borrow of `link.meta`. The body is
704    /// `external_body` because the map-indexed perm extraction needed to call
705    /// `ReprPtr::borrow_mut` runs into a Verus modelling gap — see
706    /// `vstd_extra::map_extra::tracked_borrow_mut_points_to`. The ownership
707    /// invariants in the requires/ensures are what we rely on.
708    #[verus_spec(
709        with Tracked(owner): Tracked<&mut CursorOwner<M>>
710    )]
711    #[verifier::external_body]
712    pub fn current_meta(&mut self) -> (res: Option<&mut M>)
713        requires
714            old(self).wf(*old(owner)),
715            old(owner).inv(),
716        ensures
717            final(self).wf(*final(owner)),
718            final(owner).inv(),
719            *final(owner) == *old(owner),
720            *final(self) == *old(self),
721            res.is_some() == (0 <= final(owner).index < final(owner).length()),
722    {
723        self.current.map(|current| {
724            // SAFETY: `&mut self` + the tracked owner ensure exclusive access
725            // to the current link's metadata.
726            let link_mut = unsafe { &mut *(current.ptr.addr() as *mut Link<M>) };
727            &mut link_mut.meta
728        })
729    }
730
731    /// Takes the current pointing frame out of the linked list.
732    ///
733    /// If successful, the frame is returned and the cursor is moved to the
734    /// next frame. If the cursor is pointing to the back of the list then it
735    /// is moved to the "ghost" non-element.
736    /// # Verified Properties
737    /// ## Preconditions
738    /// The cursor must be well-formed, with the pointers to its links' metadata slots
739    /// matching the tracked permission objects. The list must be non-empty, so that the
740    /// current frame is valid.
741    /// ## Postconditions
742    /// The current frame is removed from the list, and the cursor is moved to the next frame.
743    /// The list invariants are preserved.
744    /// ## Safety
745    /// This function calls `from_raw` on the frame, but we guarantee that the frame is forgotten
746    /// if it is in the list. So, double-free will not occur. All loads and stores are through track
747    /// tracked permissions, so there are no illegal memory accesses. No data races are possible.
748    #[verus_spec(
749        with Tracked(regions) : Tracked<&mut MetaRegionOwners>,
750            Tracked(owner) : Tracked<&mut CursorOwner<M>>
751    )]
752    pub fn take_current(&mut self) -> (res: Option<(UniqueFrame<Link<M>>, Tracked<UniqueFrameOwner<Link<M>>>)>)
753        requires
754            old(self).wf(*old(owner)),
755            old(owner).inv(),
756            old(regions).inv(),
757            old(owner).length() > 0 ==> old(regions).slot_owners.contains_key(
758                frame_to_index(old(self).current.unwrap().addr()),
759            ),
760        ensures
761            old(owner).length() == 0 ==> res.is_none(),
762            old(self).current.is_some() ==> res.is_some(),
763            res.is_some() ==> res.unwrap().0.model(res.unwrap().1@).meta == old(
764                owner,
765            ).list_own.list[old(owner).index]@,
766            res.is_some() ==> final(self).model(*final(owner)) == old(self).model(*old(owner)).remove(),
767            res.is_some() ==> res.unwrap().1@.frame_link_inv(),
768            // Invariant preservation
769            res.is_some() ==> final(owner).inv(),
770            res.is_some() ==> final(self).wf(*final(owner)),
771            res.is_none() ==> *final(owner) =~= *old(owner),
772            final(regions).inv(),
773            // Structural: remove_owner_spec
774            res.is_some() ==> final(owner).index == old(owner).index,
775            res.is_some() ==> final(owner).list_own.list
776                == old(owner).list_own.list.remove(old(owner).index),
777            // Slot effects: from_raw decrements raw_count by 1
778            res.is_some() ==> {
779                let paddr = old(self).current.unwrap().addr();
780                let idx = frame_to_index(meta_to_frame(paddr));
781                &&& final(regions).slot_owners[idx].raw_count
782                    == old(regions).slot_owners[idx].raw_count - 1
783                &&& final(regions).slots =~= old(regions).slots
784            },
785            // Frame for other slots
786            res.is_some() ==> forall|idx: usize| #![trigger final(regions).slot_owners[idx]]
787                idx != frame_to_index(meta_to_frame(old(self).current.unwrap().addr()))
788                ==> final(regions).slot_owners[idx] == old(regions).slot_owners[idx],
789            res.is_none() ==> *final(regions) =~= *old(regions),
790            // Properties of the returned frame needed for UniqueFrame::drop
791            res.is_some() ==> res.unwrap().0.wf(res.unwrap().1@),
792            res.is_some() ==> res.unwrap().1@.inv(),
793    {
794        let ghost owner0 = *owner;
795
796        let current = self.current?;
797
798        assert(owner.list_own.inv_at(owner.index));
799        assert(owner.index > 0 ==> owner.list_own.inv_at(owner.index - 1));
800        assert(owner.index < owner.length() - 1 ==> owner.list_own.inv_at(owner.index + 1));
801
802        let meta_ptr = current.addr();
803        let paddr = meta_to_frame(meta_ptr);
804
805        let tracked cur_perm = owner.list_own.perms.tracked_remove(owner.index);
806        let tracked mut cur_own = owner.list_own.list.tracked_remove(owner.index);
807
808        #[verus_spec(with Tracked(regions), Tracked(cur_perm), Tracked(cur_own))]
809        let (frame, frame_own) = UniqueFrame::<Link<M>>::from_raw(paddr);
810        let mut frame = frame;
811        let tracked mut frame_own = frame_own.get();
812
813        let next_ptr = frame.meta().next;
814
815        #[verus_spec(with Tracked(&frame_own))]
816        let frame_meta = frame.meta_mut();
817
818        let opt_prev_link = borrow_field!(frame_meta => metadata.prev, &frame_own.meta_perm);
819
820        if let Some(prev_link) = opt_prev_link {
821            let prev = MetadataAsLink::cast_to_metadata(prev_link);
822            // SAFETY: We own the previous node by `&mut self` and the node is
823            // initialized.
824            update_field!(prev => metadata.next <- next_ptr; owner.list_own.perms, owner.index-1);
825
826            assert(owner.index > 0);
827        } else {
828            self.list.front = next_ptr;
829        }
830
831        let prev_ptr = frame.meta().prev;
832
833        #[verus_spec(with Tracked(&frame_own))]
834        let frame_meta = frame.meta_mut();
835        let opt_next_link = frame_meta.borrow(Tracked(&frame_own.meta_perm)).metadata.next;
836
837        if let Some(next_link) = opt_next_link {
838            let next = MetadataAsLink::cast_to_metadata(next_link);
839            // SAFETY: We own the next node by `&mut self` and the node is
840            // initialized.
841            update_field!(next => metadata.prev <- prev_ptr; owner.list_own.perms, owner.index+1);
842
843            self.current = Some(next_link);
844        } else {
845            self.list.back = prev_ptr;
846
847            self.current = None;
848        }
849
850        #[verus_spec(with Tracked(&frame_own))]
851        let frame_meta = frame.meta_mut();
852
853        update_field!(frame_meta => metadata.next <- None; frame_own.meta_perm);
854        update_field!(frame_meta => metadata.prev <- None; frame_own.meta_perm);
855
856        let tracked mut slot_own = regions.slot_owners.tracked_remove(frame_to_index(paddr));
857
858        #[verus_spec(with Tracked(&frame_own.meta_perm.points_to))]
859        let slot = frame.slot();
860        let tracked mut inner_perms = frame_own.meta_perm.inner_perms;
861        slot.in_list.store(Tracked(&mut inner_perms.in_list), 0);
862        proof { frame_own.meta_perm.inner_perms = inner_perms; }
863
864        proof {
865            regions.slot_owners.tracked_insert(frame_to_index(paddr), slot_own);
866        }
867
868        self.list.size = self.list.size - 1;
869
870        proof {
871            owner0.remove_owner_spec_implies_model_spec(*owner);
872        }
873        Some((frame, Tracked(frame_own)))
874    }
875
876    /// Inserts a frame before the current frame.
877    ///
878    /// If the cursor is pointing at the "ghost" non-element then the new
879    /// element is inserted at the back of the [`LinkedList`].
880    /// # Verified Properties
881    /// ## Preconditions
882    /// The cursor must be well-formed, with the pointers to its links' metadata slots matching the tracked permission objects.
883    /// - The new frame must be active, so that it is valid to call `into_raw` on it.
884    /// ## Postconditions
885    /// - The new frame is inserted into the list, immediately before the current index.
886    /// - The list invariants are preserved.
887    /// ## Safety
888    /// - This function calls `into_raw` on the frame, so the caller must ensure that the frame is active and
889    /// has not been forgotten already to avoid a memory leak. If the caller attempts to insert a forgotten frame,
890    /// the invariant around `into_raw` and `from_raw` will be violated. But, it is the safe failure case in that
891    /// it will not cause a double-free. (Note: we should be able to move this requirement into the `UniqueFrame` invariants.)
892    #[verus_spec(
893        with Tracked(regions): Tracked<&mut MetaRegionOwners>,
894            Tracked(owner): Tracked<&mut CursorOwner<M>>,
895            Tracked(frame_own): Tracked<&mut UniqueFrameOwner<Link<M>>>
896    )]
897    pub fn insert_before(&mut self, mut frame: UniqueFrame<Link<M>>)
898        requires
899            old(self).wf(*old(owner)),
900            old(owner).inv(),
901            old(owner).list_own.list_id != 0,
902            old(frame_own).inv(),
903            old(frame_own).global_inv(*old(regions)),
904            frame.wf(*old(frame_own)),
905            old(owner).length() < usize::MAX,
906            old(regions).inv(),
907            old(regions).slots.contains_key(old(frame_own).slot_index),
908            old(regions).slot_owners[old(frame_own).slot_index].inner_perms.in_list.is_for(
909                old(regions).slots[old(frame_own).slot_index].value().in_list,
910            ),
911            old(frame_own).meta_perm.addr() == frame.ptr.addr(),
912            old(frame_own).frame_link_inv(),
913        ensures
914            final(self).model(*final(owner)) == old(self).model(*old(owner)).insert(final(frame_own).meta_own@),
915            final(self).wf(*final(owner)),
916            final(owner).inv(),
917            final(owner).list_own.list == old(owner).list_own.list.insert(old(owner).index, final(frame_own).meta_own),
918            final(owner).list_own.list_id == old(owner).list_own.list_id,
919            final(frame_own).meta_own.paddr == old(frame_own).meta_own.paddr,
920            final(frame_own).meta_own.in_list == old(owner).list_own.list_id,
921    {
922        let ghost owner0 = *owner;
923
924        assert(meta_addr(frame_own.slot_index) == frame.ptr.addr());
925        assert(regions.slot_owners.contains_key(frame_own.slot_index));
926        let tracked slot_own = regions.slot_owners.tracked_borrow(frame_own.slot_index);
927
928        #[verus_spec(with Tracked(frame_own))]
929        let frame_ptr = frame.meta_mut();
930        assert(frame_ptr.addr() == frame.ptr.addr());
931
932        let frame_ptr_as_link = MetadataAsLink::cast_from_metadata(frame_ptr);
933
934        if let Some(current) = self.current {
935            let current_md = MetadataAsLink::cast_to_metadata(current);
936
937            assert(owner.list_own.inv_at(owner.index));
938            assert(owner.index > 0 ==> owner.list_own.inv_at(owner.index - 1));
939            assert(owner.index < owner.length() - 1 ==> owner.list_own.inv_at(owner.index + 1));
940
941            // SAFETY: We own the current node by `&mut self` and the node is initialized.
942            let tracked mut cur_perm = owner.list_own.perms.tracked_remove(owner.index);
943            let opt_prev_link : Option<ReprPtr<MetaSlot, MetadataAsLink<M>>> =
944                borrow_field!(current_md => metadata.prev, &cur_perm);
945            proof {
946                owner.list_own.perms.tracked_insert(owner.index, cur_perm);
947            }
948
949            if let Some(prev_link) = opt_prev_link {
950                let prev = MetadataAsLink::cast_to_metadata(prev_link);
951
952                // SAFETY: We own the previous node by `&mut self` and the node is initialized.
953                update_field!(prev => metadata.next <- Some(frame_ptr_as_link); owner.list_own.perms, owner.index-1);
954
955                update_field!(frame_ptr => metadata.prev <- Some(prev_link); frame_own.meta_perm);
956                update_field!(frame_ptr => metadata.next <- Some(current); frame_own.meta_perm);
957
958                update_field!(current_md => metadata.prev <- Some(frame_ptr_as_link); owner.list_own.perms, owner.index);
959            } else {
960                update_field!(frame_ptr => metadata.next <- Some(current); frame_own.meta_perm);
961
962                update_field!(current_md => metadata.prev <- Some(frame_ptr_as_link); owner.list_own.perms, owner.index);
963
964                self.list.front = Some(frame_ptr_as_link);
965            }
966        } else {
967            assert(0 < owner.length() ==> owner.list_own.inv_at(owner.index - 1));
968
969            if let Some(back) = self.list.back {
970                let back_md = MetadataAsLink::cast_to_metadata(back);
971
972                assert(owner.index == owner.length());
973
974                // SAFETY: We have ownership of the links via `&mut self`.
975                //                    debug_assert!(back.as_mut().next.is_none());
976                update_field!(back_md => metadata.next <- Some(frame_ptr_as_link); owner.list_own.perms, owner.length()-1);
977
978                update_field!(frame_ptr => metadata.prev <- Some(back); frame_own.meta_perm);
979
980                self.list.back = Some(frame_ptr_as_link);
981            } else {
982                //                debug_assert_eq!(self.list.front, None);
983                self.list.front = Some(frame_ptr_as_link);
984                self.list.back = Some(frame_ptr_as_link);
985            }
986        }
987
988        assert(regions.slot_owners.contains_key(frame_to_index(meta_to_frame(frame.ptr.addr()))));
989        let tracked mut slot_own = regions.slot_owners.tracked_remove(
990            frame_to_index(meta_to_frame(frame.ptr.addr())),
991        );
992
993        #[verus_spec(with Tracked(&mut owner.list_own))]
994        let list_id = self.list.lazy_get_id();
995
996        #[verus_spec(with Tracked(&frame_own.meta_perm.points_to))]
997        let slot = frame.slot();
998        slot.in_list.store(Tracked(&mut frame_own.meta_perm.inner_perms.in_list), list_id);
999
1000        proof {
1001            // The store only changed in_list.value(); ref_count is unchanged.
1002            // From global_inv: ref_count != REF_COUNT_UNUSED && ref_count != 0.
1003            // So slot_own.inv() holds after the store.
1004            assert(frame_own.slot_index == frame_to_index(meta_to_frame(frame.ptr.addr())));
1005            assert(slot_own.inner_perms.ref_count.value() != REF_COUNT_UNUSED);
1006            assert(slot_own.inner_perms.ref_count.value() != 0);
1007            assert(slot_own.inv());
1008
1009            regions.slot_owners.tracked_insert(frame_to_index(meta_to_frame(frame.ptr.addr())), slot_own)
1010        }
1011
1012        assert(regions.inv()) by {
1013            // slot_owners only changed at frame_own.slot_index.
1014            // For all other keys, the invariant is preserved from old(regions).inv().
1015            // For the modified key, we proved slot_own.inv() above.
1016            assert(forall|i: usize| #[trigger]
1017                regions.slots.contains_key(i) ==>
1018                    regions.slots[i].value().wf(regions.slot_owners[i]));
1019        };
1020
1021        // Forget the frame to transfer the ownership to the list.
1022        #[verus_spec(with Tracked(frame_own), Tracked(regions))]
1023        let _ = frame.into_raw();
1024
1025        self.list.size = self.list.size + 1;
1026
1027        proof {
1028            CursorOwner::<M>::list_insert(owner, &mut frame_own.meta_own, &frame_own.meta_perm);
1029
1030            assert(forall|i: int| 0 <= i < owner.index - 1 ==> #[trigger] owner0.list_own.inv_at(i) ==> owner.list_own.inv_at(i)) by {};
1031            assert(forall|i: int| owner.index <= i < owner.length() ==> owner0.list_own.inv_at(i - 1) == owner.list_own.inv_at(i)) by {};
1032            assert(owner.list_own.inv_at(owner0.index as int));
1033
1034            assert(owner.list_own.list == owner0.list_own.list.insert(owner0.index, frame_own.meta_own));
1035            owner0.insert_owner_spec_implies_model_spec(frame_own.meta_own, *owner);
1036        }
1037    }
1038
1039    /// Provides a reference to the linked list.
1040    pub fn as_list(&self) -> &LinkedList<M> {
1041        self.list
1042    }
1043}
1044
1045impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> TrackDrop for LinkedList<M> {
1046    type State = (LinkedListOwner<M>, MetaRegionOwners);
1047
1048    open spec fn constructor_requires(self, s: Self::State) -> bool {
1049        true
1050    }
1051
1052    open spec fn constructor_ensures(self, s0: Self::State, s1: Self::State) -> bool {
1053        s0 =~= s1
1054    }
1055
1056    proof fn constructor_spec(self, tracked s: &mut Self::State) {
1057    }
1058
1059    open spec fn drop_requires(self, s: Self::State) -> bool {
1060        &&& self.wf(s.0)
1061        &&& s.0.inv()
1062        &&& s.1.inv()
1063        // Each element's slot is in slot_owners
1064        &&& forall|i: int| #![trigger s.0.list[i]]
1065            0 <= i < s.0.list.len() ==>
1066                s.1.slot_owners.contains_key(frame_to_index(s.0.list[i].paddr))
1067        // List element slots are not unused (needed to preserve MetaSlotOwner::inv after raw_count change)
1068        &&& forall|i: int| #![trigger s.0.list[i]]
1069            0 <= i < s.0.list.len() ==> {
1070                let idx = frame_to_index(meta_to_frame(s.0.list[i].paddr));
1071                s.1.slot_owners[idx].inner_perms.ref_count.value() != REF_COUNT_UNUSED
1072            }
1073        // List element slots are not in regions.slots (perms owned by the list)
1074        &&& forall|i: int| #![trigger s.0.list[i]]
1075            0 <= i < s.0.list.len() ==> {
1076                let idx = frame_to_index(meta_to_frame(s.0.list[i].paddr));
1077                !s.1.slots.contains_key(idx)
1078            }
1079        // List element slots have raw_count > 0 (from into_raw when inserted)
1080        &&& forall|i: int| #![trigger s.0.list[i]]
1081            0 <= i < s.0.list.len() ==> {
1082                let idx = frame_to_index(meta_to_frame(s.0.list[i].paddr));
1083                s.1.slot_owners[idx].raw_count > 0
1084            }
1085        // Elements have distinct slot indices
1086        &&& forall|i: int, j: int|
1087            #![trigger s.0.list[i], s.0.list[j]]
1088            0 <= i < j < s.0.list.len() ==>
1089                frame_to_index(meta_to_frame(s.0.list[i].paddr))
1090                    != frame_to_index(meta_to_frame(s.0.list[j].paddr))
1091    }
1092
1093    open spec fn drop_ensures(self, s0: Self::State, s1: Self::State) -> bool {
1094        &&& s1.0.list.len() == 0
1095        // Each element's slot has raw_count decremented by 1
1096        &&& forall|i: int| #![trigger s0.0.list[i]]
1097            0 <= i < s0.0.list.len() ==> {
1098                let idx = frame_to_index(meta_to_frame(s0.0.list[i].paddr));
1099                s1.1.slot_owners[idx].raw_count
1100                    == s0.1.slot_owners[idx].raw_count - 1
1101            }
1102        // Slots not in the list are unchanged
1103        &&& forall|idx: usize| #![trigger s1.1.slot_owners[idx]]
1104            (forall|i: int| #![trigger s0.0.list[i]]
1105                0 <= i < s0.0.list.len() ==>
1106                    idx != frame_to_index(meta_to_frame(s0.0.list[i].paddr)))
1107            ==> s1.1.slot_owners[idx] == s0.1.slot_owners[idx]
1108        &&& s1.1.slots =~= s0.1.slots
1109        &&& s1.1.inv()
1110    }
1111
1112    proof fn drop_tracked(self, tracked s: &mut Self::State) {
1113        let ghost list = s.0.list;
1114        let ghost n = list.len();
1115        let ghost old_regions = s.1;
1116
1117        assert forall|j: int| #![trigger list[j]]
1118            0 <= j < n implies
1119                s.1.slot_owners.contains_key(
1120                    frame_to_index(meta_to_frame(list[j].paddr)))
1121        by {
1122            assert(s.0.inv_at(j));
1123            let paddr = list[j].paddr;
1124            assert(frame_to_index(meta_to_frame(paddr))
1125                < max_meta_slots()) by {
1126                assert(meta_to_frame(paddr) == ((paddr - FRAME_METADATA_RANGE.start)
1127                    / META_SLOT_SIZE as int * PAGE_SIZE) as usize);
1128                assert(frame_to_index(meta_to_frame(paddr))
1129                    == meta_to_frame(paddr) / PAGE_SIZE);
1130            };
1131        };
1132
1133        Self::drop_tracked_helper(list, 0, &old_regions, &mut s.1);
1134
1135        assert forall|i: int| 0 <= i < n implies s.0.perms.contains_key(i)
1136        by {
1137            assert(s.0.list.len() == n);
1138            assert(s.0.inv_at(i));
1139        };
1140        Self::drain_list(&mut s.0, n as int);
1141
1142        assert forall|i: usize|
1143            #[trigger] s.1.slot_owners.contains_key(i)
1144            implies s.1.slot_owners[i].inv()
1145        by {
1146            if forall|j: int| #![trigger list[j]]
1147                0 <= j < n ==> i != frame_to_index(meta_to_frame(list[j].paddr))
1148            {
1149                assert(s.1.slot_owners[i] == old_regions.slot_owners[i]);
1150            }
1151        };
1152        assert(s.1.inv());
1153    }
1154
1155}
1156
1157impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> LinkedList<M> {
1158    proof fn drain_list(
1159        tracked owner: &mut LinkedListOwner<M>,
1160        n: int,
1161    )
1162        requires
1163            old(owner).list.len() == n,
1164            n >= 0,
1165            forall|i: int| 0 <= i < n ==> old(owner).perms.contains_key(i),
1166        ensures
1167            final(owner).list.len() == 0,
1168        decreases n,
1169    {
1170        if n <= 0 {
1171            return;
1172        }
1173        // Remove perm first (perms is a Map, uses contains_key from precondition)
1174        let tracked _perm = owner.perms.tracked_remove(n - 1);
1175        let tracked _link = owner.list.tracked_remove(n - 1);
1176        Self::drain_list(owner, n - 1);
1177    }
1178
1179    proof fn drop_tracked_helper(
1180        list: Seq<LinkOwner>,
1181        k: int,
1182        old_regions: &MetaRegionOwners,
1183        tracked regions: &mut MetaRegionOwners,
1184    )
1185        requires
1186            0 <= k <= list.len() as int,
1187            old_regions.slots =~= old(regions).slots,
1188            old_regions.slot_owners.dom() =~= old(regions).slot_owners.dom(),
1189            // Distinct indices
1190            forall|i: int, j: int|
1191                #![trigger list[i], list[j]]
1192                0 <= i < j < list.len() ==>
1193                    frame_to_index(meta_to_frame(list[i].paddr))
1194                        != frame_to_index(meta_to_frame(list[j].paddr)),
1195            // Containment (for the actual index used in tracked operations)
1196            forall|j: int| #![trigger list[j]]
1197                0 <= j < list.len() ==>
1198                    old(regions).slot_owners.contains_key(
1199                        frame_to_index(meta_to_frame(list[j].paddr))),
1200            // Already processed [0, k): only raw_count changed
1201            forall|j: int| #![trigger list[j]]
1202                0 <= j < k ==> {
1203                    let idx = frame_to_index(meta_to_frame(list[j].paddr));
1204                    &&& old(regions).slot_owners[idx].raw_count
1205                        == old_regions.slot_owners[idx].raw_count - 1
1206                    &&& old(regions).slot_owners[idx].inner_perms
1207                        == old_regions.slot_owners[idx].inner_perms
1208                    &&& old(regions).slot_owners[idx].self_addr
1209                        == old_regions.slot_owners[idx].self_addr
1210                    &&& old(regions).slot_owners[idx].usage
1211                        == old_regions.slot_owners[idx].usage
1212                    &&& old(regions).slot_owners[idx].paths_in_pt
1213                        == old_regions.slot_owners[idx].paths_in_pt
1214                },
1215            // Not yet processed [k, n)
1216            forall|j: int| #![trigger list[j]]
1217                k <= j < list.len() ==> {
1218                    let idx = frame_to_index(meta_to_frame(list[j].paddr));
1219                    old(regions).slot_owners[idx]
1220                        == old_regions.slot_owners[idx]
1221                },
1222            // Non-list slots unchanged
1223            forall|idx: usize| #![trigger old(regions).slot_owners[idx]]
1224                (forall|j: int| #![trigger list[j]]
1225                    0 <= j < list.len() ==>
1226                        idx != frame_to_index(meta_to_frame(list[j].paddr)))
1227                ==> old(regions).slot_owners[idx] == old_regions.slot_owners[idx],
1228            old(regions).slots =~= old_regions.slots,
1229            // raw_count > 0 for unprocessed elements
1230            forall|j: int| #![trigger list[j]]
1231                k <= j < list.len() ==>
1232                    old_regions.slot_owners[frame_to_index(meta_to_frame(list[j].paddr))].raw_count > 0,
1233        ensures
1234            // All elements decremented (only raw_count changed)
1235            forall|j: int| #![trigger list[j]]
1236                0 <= j < list.len() ==> {
1237                    let idx = frame_to_index(meta_to_frame(list[j].paddr));
1238                    &&& final(regions).slot_owners[idx].raw_count
1239                        == old_regions.slot_owners[idx].raw_count - 1
1240                    &&& final(regions).slot_owners[idx].inner_perms
1241                        == old_regions.slot_owners[idx].inner_perms
1242                    &&& final(regions).slot_owners[idx].self_addr
1243                        == old_regions.slot_owners[idx].self_addr
1244                    &&& final(regions).slot_owners[idx].usage
1245                        == old_regions.slot_owners[idx].usage
1246                    &&& final(regions).slot_owners[idx].paths_in_pt
1247                        == old_regions.slot_owners[idx].paths_in_pt
1248                },
1249            // Non-list slots unchanged
1250            forall|idx: usize| #![trigger final(regions).slot_owners[idx]]
1251                (forall|j: int| #![trigger list[j]]
1252                    0 <= j < list.len() ==>
1253                        idx != frame_to_index(meta_to_frame(list[j].paddr)))
1254                ==> final(regions).slot_owners[idx] == old_regions.slot_owners[idx],
1255            final(regions).slots =~= old_regions.slots,
1256            final(regions).slot_owners.dom() =~= old_regions.slot_owners.dom(),
1257        decreases list.len() - k,
1258    {
1259        if k >= list.len() {
1260            return;
1261        }
1262
1263        let ghost idx = frame_to_index(meta_to_frame(list[k].paddr));
1264        let ghost _trigger_k = list[k as int]; // trigger containment precondition
1265        let tracked mut slot_own = regions.slot_owners.tracked_remove(idx);
1266        slot_own.raw_count = (slot_own.raw_count - 1) as usize;
1267        regions.slot_owners.tracked_insert(idx, slot_own);
1268
1269        let ghost _trigger_k = list[k as int];
1270        assert(regions.slot_owners[idx].raw_count == slot_own.raw_count);
1271        assert(old_regions.slot_owners[idx].raw_count > 0);
1272
1273        // Non-raw_count fields preserved for all elements [0, k+1)
1274        assert forall|j: int| #![trigger list[j]]
1275            0 <= j < k + 1 implies ({
1276                let jdx = frame_to_index(meta_to_frame(list[j].paddr));
1277                &&& regions.slot_owners[jdx].inner_perms == old_regions.slot_owners[jdx].inner_perms
1278                &&& regions.slot_owners[jdx].self_addr == old_regions.slot_owners[jdx].self_addr
1279                &&& regions.slot_owners[jdx].usage == old_regions.slot_owners[jdx].usage
1280                &&& regions.slot_owners[jdx].paths_in_pt == old_regions.slot_owners[jdx].paths_in_pt
1281            })
1282        by {
1283            let jdx = frame_to_index(meta_to_frame(list[j].paddr));
1284            if j < k {
1285                let ghost _a = list[j as int];
1286                let ghost _b = list[k as int];
1287                assert(jdx != idx);
1288            }
1289            // j == k: only raw_count changed on slot_own
1290        };
1291
1292        // All processed elements [0, k+1) have raw_count decremented
1293        assert forall|j: int| #![trigger list[j]]
1294            0 <= j < k + 1 implies ({
1295                let jdx = frame_to_index(meta_to_frame(list[j].paddr));
1296                regions.slot_owners[jdx].raw_count
1297                    == old_regions.slot_owners[jdx].raw_count - 1
1298            })
1299        by {
1300            let jdx = frame_to_index(meta_to_frame(list[j].paddr));
1301            if j < k {
1302                let ghost _a = list[j as int];
1303                let ghost _b = list[k as int];
1304                assert(jdx != idx);
1305            }
1306        };
1307
1308        Self::drop_tracked_helper(list, k + 1, old_regions, regions);
1309    }
1310}
1311
1312impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> Drop for LinkedList<M> {
1313    fn drop(self, Tracked(s): Tracked<Self::State>) -> (res: Tracked<Self::State>)
1314    {
1315        let tracked (list_own, mut regions) = s;
1316        let ghost original_list = list_own.list;
1317        let ghost original_regions = regions;
1318        let ghost n = original_list.len();
1319        let mut this = self;
1320
1321        #[verus_spec(with Tracked(list_own))]
1322        let cursor_pair = this.cursor_front_mut();
1323        let (mut cursor, Tracked(mut cursor_own)) = cursor_pair;
1324
1325        let ghost mut k: int = 0;
1326
1327        loop
1328            invariant_except_break
1329                cursor.wf(cursor_own),
1330                cursor.current.is_some() <==> k < n,
1331            invariant
1332                cursor_own.inv(),
1333                cursor_own.index == 0,
1334                regions.inv(),
1335                cursor_own.list_own.list.len() == n - k,
1336                0 <= k <= n,
1337                // The remaining list is a suffix of the original
1338                forall|j: int| #![trigger cursor_own.list_own.list[j]]
1339                    0 <= j < n - k ==>
1340                        cursor_own.list_own.list[j] == original_list[j + k],
1341                // Elements already taken have raw_count decremented
1342                forall|j: int| #![trigger original_list[j]]
1343                    0 <= j < k ==> {
1344                        let idx = frame_to_index(meta_to_frame(original_list[j].paddr));
1345                        regions.slot_owners[idx].raw_count
1346                            == original_regions.slot_owners[idx].raw_count - 1
1347                    },
1348                // Slots not in the original list are unchanged
1349                forall|idx: usize| #![trigger regions.slot_owners[idx]]
1350                    (forall|j: int| #![trigger original_list[j]]
1351                        0 <= j < n ==>
1352                            idx != frame_to_index(meta_to_frame(original_list[j].paddr)))
1353                    ==> regions.slot_owners[idx] == original_regions.slot_owners[idx],
1354                regions.slots =~= original_regions.slots,
1355                // Elements not yet processed have original raw_count
1356                forall|j: int| #![trigger original_list[j]]
1357                    k <= j < n ==> {
1358                        let idx = frame_to_index(meta_to_frame(original_list[j].paddr));
1359                        regions.slot_owners[idx].raw_count
1360                            == original_regions.slot_owners[idx].raw_count
1361                    },
1362                // Each remaining element's slot is in slot_owners
1363                forall|j: int| #![trigger original_list[j]]
1364                    k <= j < n ==>
1365                        regions.slot_owners.contains_key(frame_to_index(original_list[j].paddr)),
1366                // Distinct slot indices in original list (from drop_requires)
1367                forall|i: int, j: int|
1368                    #![trigger original_list[i], original_list[j]]
1369                    0 <= i < j < n ==>
1370                        frame_to_index(meta_to_frame(original_list[i].paddr))
1371                            != frame_to_index(meta_to_frame(original_list[j].paddr)),
1372            ensures
1373                k == n,
1374                cursor_own.list_own.list.len() == 0,
1375            decreases n - k,
1376        {
1377            proof {
1378                if cursor.current.is_some() {
1379                    assert(cursor_own.length() > 0);
1380                    // cursor.current.addr() == list[0].paddr == original_list[k].paddr
1381                    // from the slot containment invariant, this key is in slot_owners
1382                    let ghost _trigger = original_list[k as int];
1383                    assert(cursor.current.unwrap().addr() == original_list[k].paddr);
1384                }
1385            }
1386
1387            #[verus_spec(with Tracked(&mut regions), Tracked(&mut cursor_own))]
1388            let entry = cursor.take_current();
1389
1390            if let Some(current) = entry {
1391                let ghost cur_addr = current.0.ptr.addr();
1392                let ghost cur_idx = frame_to_index(meta_to_frame(cur_addr));
1393
1394                let (mut frame, frame_own_tracked) = current;
1395                let tracked frame_own = frame_own_tracked.get();
1396
1397                proof {
1398                    assert(cur_addr == original_list[k].paddr);
1399                    assert(cur_idx == frame_to_index(meta_to_frame(original_list[k].paddr)));
1400                }
1401
1402                // Drop the frame, returning its slot to regions
1403                #[verus_spec(with Tracked(frame_own), Tracked(&mut regions))]
1404                frame.drop();
1405
1406                proof {
1407                    assert forall|j: int| #![trigger cursor_own.list_own.list[j]]
1408                        0 <= j < n - k - 1 implies
1409                            cursor_own.list_own.list[j] == original_list[j + k + 1]
1410                    by {};
1411
1412                    assert forall|j: int| #![trigger original_list[j]]
1413                        0 <= j < k implies ({
1414                            let idx = frame_to_index(meta_to_frame(original_list[j].paddr));
1415                            regions.slot_owners[idx].raw_count
1416                                == original_regions.slot_owners[idx].raw_count - 1
1417                        })
1418                    by {
1419                        let idx = frame_to_index(meta_to_frame(original_list[j].paddr));
1420                        let ghost _a = original_list[j as int];
1421                        let ghost _b = original_list[k as int];
1422                        assert(j < k);
1423                        assert(idx != cur_idx);
1424                    };
1425
1426                    k = k + 1;
1427                }
1428            } else {
1429                break;
1430            }
1431        }
1432
1433        Tracked((cursor_own.list_own, regions))
1434    }
1435}
1436
1437
1438impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> Deref for Link<M> {
1439    type Target = M;
1440
1441    fn deref(&self) -> &Self::Target {
1442        &self.meta
1443    }
1444}
1445
1446impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> DerefMut for Link<M> {
1447
1448    fn deref_mut(&mut self) -> &mut Self::Target {
1449        &mut self.meta
1450    }
1451}
1452
1453impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> Link<M> {
1454    /// Creates a new linked list metadata.
1455    pub const fn new(meta: M) -> Self {
1456        Self { next: None, prev: None, meta }
1457    }
1458}
1459
1460// SAFETY: If `M::on_drop` reads the page using the provided `VmReader`,
1461// the safety is upheld by the one who implements `AnyFrameMeta` for `M`.
1462impl<M: AnyFrameMeta + Repr<MetaSlotSmall>> AnyFrameMeta for Link<M>
1463{
1464    fn on_drop(&mut self, reader: &mut crate::mm::VmReader<crate::mm::Infallible>) {
1465        self.meta.on_drop(reader);
1466    }
1467
1468    fn is_untyped(&self) -> bool {
1469        self.meta.is_untyped()
1470    }
1471
1472    uninterp spec fn vtable_ptr(&self) -> usize;
1473}
1474
1475} // verus!