1use vstd::prelude::*;
4use vstd_extra::seq_extra::{seq_tracked_map_values, seq_tracked_new, seq_tracked_subrange};
5
6use core::{fmt::Debug, mem::ManuallyDrop, ops::Range};
7
8use crate::mm::frame::{inc_frame_ref_count, untyped::AnyUFrameMeta, Frame};
9
10use vstd_extra::ownership::*;
11
12use super::meta::mapping::{frame_to_index, frame_to_index_spec, meta_addr, meta_to_frame_spec};
13use super::{AnyFrameMeta, GetFrameError, MetaPerm, MetaSlot};
14use crate::mm::{Paddr, PagingLevel, Vaddr};
15use crate::specs::arch::mm::{MAX_NR_PAGES, MAX_PADDR, PAGE_SIZE};
16use crate::specs::mm::frame::meta_region_owners::MetaRegionOwners;
17use vstd_extra::undroppable::*;
18
19verus! {
20
21#[repr(transparent)]
34#[rustc_has_incoherent_inherent_impls]
35pub struct Segment<M: AnyFrameMeta + ?Sized> {
36 pub range: Range<Paddr>,
38 pub _marker: core::marker::PhantomData<M>,
40}
41
42impl<M: AnyFrameMeta + ?Sized> Undroppable for Segment<M> {
44 type State = MetaRegionOwners;
45
46 open spec fn constructor_requires(self, s: Self::State) -> bool {
47 true
48 }
49
50 open spec fn constructor_ensures(self, s0: Self::State, s1: Self::State) -> bool {
51 s0 =~= s1
52 }
53
54 proof fn constructor_spec(self, tracked s: &mut Self::State) {
55 admit()
56 }
57}
58
59pub tracked struct SegmentOwner<M: AnyFrameMeta + ?Sized> {
62 pub perms: Seq<MetaPerm<M>>,
63}
64
65impl<M: AnyFrameMeta + ?Sized> Inv for Segment<M> {
66 open spec fn inv(self) -> bool {
68 &&& self.range.start % PAGE_SIZE() == 0
69 &&& self.range.end % PAGE_SIZE() == 0
70 &&& self.range.start <= self.range.end < MAX_PADDR()
71 }
72}
73
74impl<M: AnyFrameMeta + ?Sized> Inv for SegmentOwner<M> {
75 open spec fn inv(self) -> bool {
77 &&& forall|i: int|
78 #![trigger self.perms[i]]
79 0 <= i < self.perms.len() as int ==> {
80 &&& self.perms[i].addr() % PAGE_SIZE() == 0
81 &&& self.perms[i].addr() < MAX_PADDR()
82 &&& self.perms[i].wf()
83 &&& self.perms[i].is_init()
84 }
85 }
86}
87
88impl<M: AnyFrameMeta + ?Sized> Segment<M> {
89 pub open spec fn inv_with(&self, owner: &SegmentOwner<M>) -> bool {
90 &&& self.inv()
91 &&& owner.perms.len() * PAGE_SIZE() == self.range.end - self.range.start
92 &&& forall|i: int|
93 #![trigger owner.perms[i]]
94 0 <= i < owner.perms.len() as int ==> owner.perms[i].addr() == meta_addr(
95 frame_to_index((self.range.start + i * PAGE_SIZE()) as usize),
96 )
97 }
98}
99
100impl<M: AnyFrameMeta + ?Sized> SegmentOwner<M> {
101 pub open spec fn is_disjoint_with_meta_region(&self, region: &MetaRegionOwners) -> bool {
102 forall|i: int|
103 #![trigger self.perms[i]]
104 0 <= i < self.perms.len() as int ==> {
105 &&& !region.dropped_slots.contains_key(frame_to_index_spec(self.perms[i].addr()))
106 }
107 }
108}
109
110pub type USegment = Segment<dyn AnyUFrameMeta>;
123
124#[verus_verify]
155impl<M: AnyFrameMeta> Segment<M> {
156 #[rustc_allow_incoherent_impl]
157 #[verifier::inline]
158 pub open spec fn start_paddr_spec(&self) -> Paddr
159 recommends
160 self.inv(),
161 {
162 self.range.start
163 }
164
165 #[rustc_allow_incoherent_impl]
166 #[verifier::inline]
167 pub open spec fn end_paddr_spec(&self) -> Paddr
168 recommends
169 self.inv(),
170 {
171 self.range.end
172 }
173
174 #[rustc_allow_incoherent_impl]
175 #[verifier::inline]
176 pub open spec fn size_spec(&self) -> usize
177 recommends
178 self.inv(),
179 {
180 (self.range.end - self.range.start) as usize
181 }
182
183 #[rustc_allow_incoherent_impl]
184 #[verifier::inline]
185 pub open spec fn nrpage_spec(&self) -> usize
186 recommends
187 self.inv(),
188 {
189 self.size_spec() / PAGE_SIZE()
190 }
191
192 #[rustc_allow_incoherent_impl]
193 pub open spec fn split_spec(self, offset: usize) -> (Self, Self)
194 recommends
195 self.inv(),
196 offset % PAGE_SIZE() == 0,
197 0 < offset < self.size_spec(),
198 {
199 let at = (self.range.start + offset) as usize;
200 let idx = at / PAGE_SIZE();
201 (
202 Self { range: self.range.start..at, _marker: core::marker::PhantomData },
203 Self { range: at..self.range.end, _marker: core::marker::PhantomData },
204 )
205 }
206
207 #[rustc_allow_incoherent_impl]
209 #[inline(always)]
210 #[verifier::when_used_as_spec(start_paddr_spec)]
211 pub fn start_paddr(&self) -> (res: Paddr)
212 requires
213 self.inv(),
214 returns
215 self.start_paddr_spec(),
216 {
217 self.range.start
218 }
219
220 #[rustc_allow_incoherent_impl]
222 #[inline(always)]
223 #[verifier::when_used_as_spec(end_paddr_spec)]
224 pub fn end_paddr(&self) -> (res: Paddr)
225 requires
226 self.inv(),
227 returns
228 self.end_paddr_spec(),
229 {
230 self.range.end
231 }
232
233 #[rustc_allow_incoherent_impl]
235 #[inline(always)]
236 #[verifier::when_used_as_spec(size_spec)]
237 pub fn size(&self) -> (res: usize)
238 requires
239 self.inv(),
240 returns
241 self.size_spec(),
242 {
243 self.range.end - self.range.start
244 }
245
246 #[rustc_allow_incoherent_impl]
247 pub open spec fn from_unused_requires(
248 regions: MetaRegionOwners,
249 range: Range<Paddr>,
250 metadata_fn: impl Fn(Paddr) -> (Paddr, M),
251 ) -> bool {
252 &&& regions.inv()
253 &&& range.start % PAGE_SIZE() == 0
254 &&& range.end % PAGE_SIZE() == 0
255 &&& range.start <= range.end < MAX_PADDR()
256 &&& forall|paddr_in: Paddr|
257 (range.start <= paddr_in < range.end && paddr_in % PAGE_SIZE() == 0) ==> {
258 &&& metadata_fn.requires((paddr_in,))
259 }
260 &&& forall|paddr_in: Paddr, paddr_out: Paddr, m: M|
261 metadata_fn.ensures((paddr_in,), (paddr_out, m)) ==> {
262 &&& paddr_out < MAX_PADDR()
263 &&& paddr_out % PAGE_SIZE() == 0
264 &&& paddr_in == paddr_out
265 &&& regions.slots.contains_key(frame_to_index(paddr_out))
266 &&& !regions.dropped_slots.contains_key(frame_to_index(paddr_out))
267 &&& regions.slot_owners[frame_to_index(paddr_out)].usage is Unused
268 &&& regions.slot_owners[frame_to_index(paddr_out)].in_list.points_to(0)
269 }
270 }
271
272 #[rustc_allow_incoherent_impl]
273 pub open spec fn from_unused_ensures(
274 old_regions: MetaRegionOwners,
275 new_regions: MetaRegionOwners,
276 owner: Option<SegmentOwner<M>>,
277 range: Range<Paddr>,
278 metadata_fn: impl Fn(Paddr) -> (Paddr, M),
279 r: Result<Self, GetFrameError>,
280 ) -> bool {
281 &&& r matches Ok(r) ==> {
282 &&& new_regions.inv()
283 &&& r.range.start == range.start
284 &&& r.range.end == range.end
285 &&& owner matches Some(owner) && {
286 &&& r.inv_with(&owner)
287 &&& forall|i: int|
288 #![trigger owner.perms[i]]
289 0 <= i < owner.perms.len() as int ==> {
290 &&& owner.perms[i].addr() == meta_addr(
291 frame_to_index_spec((range.start + i * PAGE_SIZE()) as usize),
292 )
293 }
294 &&& new_regions.paddr_range_not_in_region(range)
295 }
296 }
297 }
298
299 #[rustc_allow_incoherent_impl]
300 pub open spec fn split_requires(self, owner: SegmentOwner<M>, offset: usize) -> bool {
301 &&& self.inv_with(&owner)
302 &&& offset % PAGE_SIZE() == 0
303 &&& 0 < offset < self.size()
304 }
305
306 #[rustc_allow_incoherent_impl]
307 pub open spec fn split_ensures(
308 self,
309 offset: usize,
310 lhs: Self,
311 rhs: Self,
312 ori_owner: SegmentOwner<M>,
313 lhs_owner: SegmentOwner<M>,
314 rhs_owner: SegmentOwner<M>,
315 ) -> bool {
316 &&& lhs.inv_with(&lhs_owner)
317 &&& rhs.inv_with(&rhs_owner)
318 &&& (lhs, rhs) == self.split_spec(offset)
319 &&& ori_owner.perms =~= (lhs_owner.perms + rhs_owner.perms)
320 }
321
322 #[rustc_allow_incoherent_impl]
323 pub open spec fn into_raw_requires(
324 self,
325 regions: MetaRegionOwners,
326 owner: SegmentOwner<M>,
327 ) -> bool {
328 &&& self.inv_with(&owner)
329 &&& regions.inv()
330 &&& owner.inv()
331 &&& owner.is_disjoint_with_meta_region(®ions)
332 }
333
334 #[rustc_allow_incoherent_impl]
335 pub open spec fn into_raw_ensures(
336 self,
337 regions: MetaRegionOwners,
338 owner: SegmentOwner<M>,
339 r: Range<Paddr>,
340 ) -> bool {
341 &&& r == self.range
342 &&& regions.inv()
343 &&& regions.paddr_range_in_dropped_region(self.range)
344 }
345
346 #[rustc_allow_incoherent_impl]
347 pub open spec fn from_raw_requires(regions: MetaRegionOwners, range: Range<Paddr>) -> bool {
348 &&& regions.inv()
349 &&& regions.paddr_range_in_dropped_region(range)
350 &&& range.start % PAGE_SIZE() == 0
351 &&& range.end % PAGE_SIZE() == 0
352 &&& range.start < range.end < MAX_PADDR()
353 }
354
355 #[rustc_allow_incoherent_impl]
356 pub open spec fn from_raw_ensures(
357 self,
358 old_regions: MetaRegionOwners,
359 new_regions: MetaRegionOwners,
360 owner: SegmentOwner<M>,
361 range: Range<Paddr>,
362 ) -> bool {
363 &&& self.inv_with(&owner)
364 &&& self.range == range
365 &&& new_regions.inv()
366 &&& new_regions.paddr_range_not_in_region(range)
367 }
368
369 #[rustc_allow_incoherent_impl]
370 pub open spec fn slice_requires(self, owner: SegmentOwner<M>, range: Range<Paddr>) -> bool {
371 &&& self.inv_with(&owner)
372 &&& range.start % PAGE_SIZE() == 0
373 &&& range.end % PAGE_SIZE() == 0
374 &&& self.range.start + range.start <= self.range.start + range.end <= self.range.end
375 }
376
377 #[rustc_allow_incoherent_impl]
378 pub open spec fn slice_ensures(
379 self,
380 owner: SegmentOwner<M>,
381 range: Range<Paddr>,
382 res: Self,
383 ) -> bool {
384 &&& res.inv_with(
385 &SegmentOwner {
386 perms: owner.perms.subrange(
387 (range.start / PAGE_SIZE()) as int,
388 (range.end / PAGE_SIZE()) as int,
389 ),
390 },
391 )
392 }
393
394 #[rustc_allow_incoherent_impl]
395 pub open spec fn next_requires(self, regions: MetaRegionOwners) -> bool {
396 &&& self.inv()
397 &&& regions.inv()
398 &&& regions.dropped_slots.contains_key(frame_to_index(self.range.start))
399 &&& !regions.slots.contains_key(frame_to_index(self.range.start))
400 }
401
402 #[rustc_allow_incoherent_impl]
403 pub open spec fn next_ensures(
404 old_self: Self,
405 new_self: Self,
406 old_regions: MetaRegionOwners,
407 new_regions: MetaRegionOwners,
408 res: Option<Frame<M>>,
409 ) -> bool {
410 &&& new_regions.inv()
411 &&& new_self.inv()
412 &&& match res {
413 None => { &&& new_self.range.start == old_self.range.end },
414 Some(f) => {
415 &&& new_self.range.start == old_self.range.start + PAGE_SIZE()
416 &&& f.paddr() == old_self.range.start
417 &&& forall|i: usize|
418 #![trigger new_regions.dropped_slots[i], old_regions.dropped_slots[i]]
419 {
420 &&& i != frame_to_index(f.paddr()) ==> old_regions.dropped_slots[i]
421 == new_regions.dropped_slots[i]
422 &&& i == frame_to_index(f.paddr())
423 ==> !new_regions.dropped_slots.contains_key(i)
424 }
425 },
426 }
427 }
428
429 #[rustc_allow_incoherent_impl]
439 #[verus_spec(r =>
440 with
441 Tracked(regions): Tracked<&mut MetaRegionOwners>,
442 -> owner: Tracked<Option<SegmentOwner<M>>>,
443 requires
444 Self::from_unused_requires(*old(regions), range, metadata_fn),
445 ensures
446 Self::from_unused_ensures(*old(regions), *regions, owner@, range, metadata_fn, r),
447 )]
448 pub fn from_unused(range: Range<Paddr>, metadata_fn: impl Fn(Paddr) -> (Paddr, M)) -> (res:
449 Result<Self, GetFrameError>) {
450 proof_decl! {
451 let tracked mut owner: Option<SegmentOwner<M>> = None;
452 let tracked mut addrs = Seq::<usize>::tracked_empty();
453 }
454 let mut segment = Self {
457 range: range.start..range.start,
458 _marker: core::marker::PhantomData,
459 };
460
461 let mut i = 0;
462 let addr_len = (range.end - range.start) / PAGE_SIZE();
463
464 #[verus_spec(
465 invariant
466 i <= addr_len,
467 i as int == addrs.len(),
468 range.start % PAGE_SIZE() == 0,
469 range.end % PAGE_SIZE() == 0,
470 range.start <= range.start + i * PAGE_SIZE() <= range.end,
471 range.end == range.start + addr_len * PAGE_SIZE(),
472 addr_len == (range.end - range.start) / PAGE_SIZE() as int,
473 i <= addr_len,
474 forall|paddr_in: Paddr|
475 (range.start <= paddr_in < range.end && paddr_in % PAGE_SIZE() == 0) ==> {
476 &&& metadata_fn.requires((paddr_in,))
477 },
478 forall|paddr_in: Paddr, paddr_out: Paddr, m: M|
479 metadata_fn.ensures((paddr_in,), (paddr_out, m)) ==> {
480 &&& paddr_out < MAX_PADDR()
481 &&& paddr_out % PAGE_SIZE() == 0
482 &&& paddr_in == paddr_out
483 &&& regions.slots.contains_key(frame_to_index(paddr_out))
484 &&& !regions.dropped_slots.contains_key(frame_to_index(paddr_out))
485 &&& regions.slot_owners[frame_to_index(paddr_out)].usage is Unused
486 &&& regions.slot_owners[frame_to_index(paddr_out)].in_list.points_to(0)
487 },
488 forall|j: int|
489 0 <= j < addrs.len() as int ==> {
490 &&& regions.slots.contains_key(frame_to_index_spec(addrs[j]))
491 &&& !regions.dropped_slots.contains_key(frame_to_index_spec(addrs[j]))
492 &&& addrs[j] % PAGE_SIZE() == 0
493 &&& addrs[j] == range.start + (j as u64) * PAGE_SIZE()
494 },
495 forall|paddr: Paddr| #[trigger]
496 old(regions).slots.contains_key(frame_to_index(paddr))
497 ==> regions.slots.contains_key(frame_to_index(paddr)),
498 regions.inv(),
499 segment.range.start == range.start,
500 segment.range.end == range.start + i * PAGE_SIZE(),
501 decreases addr_len - i,
502 )]
503 while i < addr_len {
504 let paddr = range.start + i * PAGE_SIZE();
505 let (paddr, meta) = metadata_fn(paddr);
506
507 let frame = match #[verus_spec(with Tracked(regions))]
508 Frame::<M>::from_unused(paddr, meta) {
509 Ok(f) => f,
510 Err(e) => {
511 return {
512 proof_with!(|= Tracked(owner));
513 Err(e)
514 };
515 },
516 };
517
518 proof {
519 assert(forall|paddr_in: Paddr, paddr_out: Paddr, m: M|
520 metadata_fn.ensures((paddr_in,), (paddr_out, m)) ==> {
521 &&& regions.slot_owners[frame_to_index(paddr_out)].usage is Unused
522 &&& regions.slot_owners[frame_to_index(paddr_out)].in_list.points_to(0)
523 }) by {
524 admit();
525 }
526 assert(frame.constructor_requires(*old(regions))) by { admit() };
527 }
528
529 let _ = NeverDrop::new(frame, Tracked(regions));
530 segment.range.end = paddr + PAGE_SIZE();
531 proof {
532 addrs.tracked_push(paddr);
533 admit();
534 }
535
536 i += 1;
537 }
538
539 proof {
540 broadcast use vstd_extra::map_extra::lemma_map_remove_keys_finite;
541 broadcast use vstd_extra::seq_extra::lemma_seq_to_set_map_contains;
542 broadcast use vstd::map::group_map_axioms;
543 broadcast use vstd::map_lib::group_map_extra;
544
545 let tracked owner_seq = seq_tracked_map_values(
546 addrs,
547 |addr: usize|
548 {
549 let perm = regions.slots[frame_to_index(addr)];
550 MetaPerm {
551 addr: meta_addr(frame_to_index(addr)),
552 points_to: perm,
553 _T: core::marker::PhantomData,
554 }
555 },
556 );
557 owner = Some(SegmentOwner { perms: owner_seq });
558
559 let index = addrs.map_values(|addr: Paddr| frame_to_index(addr)).to_set();
560 regions.slots.tracked_remove_keys(index);
561
562 assert forall|addr: usize|
563 #![trigger frame_to_index_spec(addr)]
564 range.start <= addr < range.end && addr % PAGE_SIZE() == 0 implies {
565 &&& !regions.slots.contains_key(frame_to_index_spec(addr))
566 &&& !regions.dropped_slots.contains_key(frame_to_index_spec(addr))
567 } by {
568 assert(addrs.contains(addr)) by {
570 if !addrs.contains(addr) {
571 let j = (addr - range.start) / PAGE_SIZE() as int;
572 assert(0 <= j < addrs.len() as usize) by {
573 assert(addr >= range.start);
574 assert(addr < range.end);
575 assert(addr % PAGE_SIZE() == 0);
576 };
577 assert(addrs[j as int] == addr);
578 }
579 }
580 }
581 }
582
583 proof_with!(|= Tracked(owner));
584 Ok(segment)
585 }
586
587 #[rustc_allow_incoherent_impl]
597 #[verus_spec(r =>
598 with
599 Tracked(owner): Tracked<SegmentOwner<M>>,
600 Tracked(regions): Tracked<&mut MetaRegionOwners>,
601 -> frame_perms: (Tracked<SegmentOwner<M>>, Tracked<SegmentOwner<M>>),
602 requires
603 Self::split_requires(self, owner, offset),
604 ensures
605 Self::split_ensures(self, offset, r.0, r.1, owner, frame_perms.0@, frame_perms.1@),
606 )]
607 pub fn split(self, offset: usize) -> (Self, Self) {
608 let at = self.range.start + offset;
609 let idx = offset / PAGE_SIZE();
610 let res = (
611 Self { range: self.range.start..at, _marker: core::marker::PhantomData },
612 Self { range: at..self.range.end, _marker: core::marker::PhantomData },
613 );
614
615 let _ = NeverDrop::new(self, Tracked(regions));
616
617 let tracked frame_perms1 = SegmentOwner {
618 perms: seq_tracked_subrange(owner.perms, 0, idx as int),
619 };
620 let tracked frame_perms2 = SegmentOwner {
621 perms: seq_tracked_subrange(owner.perms, idx as int, owner.perms.len() as int),
622 };
623
624 proof {
625 owner.perms.lemma_split_at(idx as int);
626 }
627
628 proof_with!(|= (Tracked(frame_perms1), Tracked(frame_perms2)));
629 res
630 }
631
632 #[rustc_allow_incoherent_impl]
636 #[verus_spec(r =>
637 with
638 Tracked(regions): Tracked<&mut MetaRegionOwners>,
639 Tracked(owner): Tracked<SegmentOwner<M>>,
640 requires
641 Self::into_raw_requires(self, *old(regions), owner),
642 ensures
643 Self::into_raw_ensures(self, *regions, owner, r),
644 )]
645 pub(crate) fn into_raw(self) -> Range<Paddr> {
646 proof {
647 broadcast use vstd_extra::seq_extra::lemma_seq_to_set_map_contains;
648 broadcast use crate::mm::frame::meta::mapping::lemma_paddr_to_meta_biinjective;
649 let new_keys = owner.perms.map_values(|v: MetaPerm<M>| meta_to_frame_spec(v.addr()));
653 let new_dropped_slots = Map::new(
654 |i: usize| new_keys.contains(i),
655 |k: usize| { owner.perms[k as int].points_to },
656 );
657
658 regions.dropped_slots.tracked_union_prefer_right(new_dropped_slots);
659
660 assert forall|paddr: Paddr|
661 self.range.start <= paddr < self.range.end && paddr % PAGE_SIZE() == 0 implies {
662 &&& regions.dropped_slots.contains_key(#[trigger] frame_to_index_spec(paddr))
663 } by {
664 assert(new_keys.to_set().contains(frame_to_index_spec(paddr))) by {
665 let j = (paddr - self.range.start) / PAGE_SIZE() as int;
666 assert(0 <= j < owner.perms.len() as int);
667 assert(owner.perms[j].addr() == meta_addr(frame_to_index_spec(paddr)));
668 }
669 }
670 }
671
672 let range = self.range.clone();
673 let _ = NeverDrop::new(self, Tracked(regions));
674 range
675 }
676
677 #[rustc_allow_incoherent_impl]
685 #[verus_spec(r =>
686 with
687 Tracked(regions): Tracked<&mut MetaRegionOwners>,
688 -> frame_perms: Tracked<SegmentOwner<M>>,
689 requires
690 Self::from_raw_requires(*old(regions), range),
691 ensures
692 Self::from_raw_ensures(r, *old(regions), *regions, frame_perms@, range),
693 )]
694 pub(crate) unsafe fn from_raw(range: Range<Paddr>) -> Self {
695 proof_decl! {
696 let tracked mut frame_perms: SegmentOwner<M>;
697 }
698 proof {
702 broadcast use vstd::arithmetic::div_mod::group_div_basics;
703
704 let len = (range.end - range.start) / PAGE_SIZE() as int;
705 let tracked owner_seq = seq_tracked_new(
706 len as nat,
707 |i: int|
708 {
709 let paddr = (range.start + (i as u64) * PAGE_SIZE() as int) as Paddr;
710 MetaPerm {
711 addr: meta_addr(frame_to_index(paddr)),
712 points_to: regions.dropped_slots[frame_to_index(paddr)],
713 _T: core::marker::PhantomData,
714 }
715 },
716 );
717 frame_perms = SegmentOwner { perms: owner_seq };
718
719 let keys = Set::new(
721 |paddr: Paddr| { range.start <= paddr < range.end && paddr % PAGE_SIZE() == 0 },
722 );
723 let keys_to_remove = keys.map(|paddr: Paddr| frame_to_index(paddr));
724
725 regions.dropped_slots.tracked_remove_keys(keys_to_remove);
726
727 assert forall|paddr: Paddr|
728 #![trigger frame_to_index(paddr)]
729 range.start <= paddr < range.end && paddr % PAGE_SIZE() == 0 implies {
730 &&& !regions.dropped_slots.contains_key(#[trigger] frame_to_index(paddr))
731 } by {
732 assert(keys.contains(paddr));
733
734 if regions.dropped_slots.contains_key(frame_to_index(paddr)) {
735 assert(!keys_to_remove.contains(frame_to_index(paddr)));
736 }
737 }
738 }
739
740 proof_with!(|= Tracked(frame_perms));
741 Self { range, _marker: core::marker::PhantomData }
742 }
743
744 #[rustc_allow_incoherent_impl]
754 #[verus_spec(r =>
755 with
756 Tracked(owner): Tracked<&SegmentOwner<M>>,
757 requires
758 Self::slice_requires(*self, *owner, *range),
759 ensures
760 Self::slice_ensures(*self, *owner, *range, r),
761 )]
762 pub fn slice(&self, range: &Range<Paddr>) -> Self {
763 let start = self.range.start + range.start;
764 let end = self.range.start + range.end;
765
766 let mut i = 0;
767 let addr_len = (end - start) / PAGE_SIZE();
768 while i < addr_len
769 invariant
770 start % PAGE_SIZE() == 0,
771 end % PAGE_SIZE() == 0,
772 start + i * PAGE_SIZE() <= end,
773 i <= addr_len,
774 addr_len == (end - start) / PAGE_SIZE() as int,
775 decreases addr_len - i,
776 {
777 let paddr = start + i * PAGE_SIZE();
778 i += 1;
783 }
784
785 Self { range: start..end, _marker: core::marker::PhantomData }
786 }
787
788 #[rustc_allow_incoherent_impl]
796 #[verus_spec(res =>
797 with
798 Tracked(regions): Tracked<&mut MetaRegionOwners>,
799 Tracked(owner): Tracked<&mut SegmentOwner<M>>
800 requires
801 Self::next_requires(*old(self), *old(regions)),
802 old(owner).perms.len() > 0
803 ensures
804 Self::next_ensures(*old(self), *self, *old(regions), *regions, res),
805 )]
806 pub fn next(&mut self) -> Option<Frame<M>> {
807 if self.range.start < self.range.end {
808 let tracked perm = owner.perms.tracked_remove(0);
809 let frame = unsafe {
812 #[verus_spec(with Tracked(regions), Tracked(&perm))]
813 Frame::<M>::from_raw(self.range.start)
814 };
815
816 self.range.start = self.range.start + PAGE_SIZE();
817 Some(frame)
818 } else {
819 None
820 }
821 }
822}
823
824}