1use crate::specs::arch::PAGE_SIZE;
42use core::marker::PhantomData;
43use core::ops::Range;
44use vstd::pervasive::arbitrary;
45use vstd::prelude::*;
46use vstd::simple_pptr::*;
47use vstd::std_specs::convert::TryFromSpecImpl;
48use vstd_extra::array_ptr::ArrayPtr;
49use vstd_extra::array_ptr::PointsToArray;
50use vstd_extra::ownership::Inv;
51
52use crate::error::*;
53use crate::mm::pod::{Pod, PodOnce};
54use crate::specs::arch::kspace::{KERNEL_BASE_VADDR, KERNEL_END_VADDR};
55use crate::specs::arch::MAX_USERSPACE_VADDR_SPEC;
56use crate::specs::mm::page_table::Mapping;
57use crate::specs::mm::virt_mem_newer::{MemView, VirtPtr};
58
59verus! {
60
61#[verus_spec(r =>
62 with
63 Tracked(owner_r): Tracked<&mut VmIoOwner<'_>>,
64 Tracked(owner_w): Tracked<&mut VmIoOwner<'_>>,
65 requires
66 old(reader).inv(),
67 old(writer).inv(),
68 old(owner_r).inv(),
69 old(owner_w).inv(),
70 old(owner_r).inv_with_reader(*old(reader)),
71 old(owner_w).inv_with_writer(*old(writer)),
72 old(owner_r).is_fallible && old(owner_w).is_fallible, ensures
74 reader.inv(),
75 writer.inv(),
76 owner_r.inv(),
77 owner_w.inv(),
78 owner_r.inv_with_reader(*reader),
79 owner_w.inv_with_writer(*writer),
80 owner_r.params_eq(*old(owner_r)),
81 owner_w.params_eq(*old(owner_w)),
82 )]
83pub fn rw_fallible(reader: &mut VmReader<'_>, writer: &mut VmWriter<'_>) -> core::result::Result<
84 usize,
85 (Error, usize),
86> {
87 Ok(0) }
90
91#[inline]
100#[verifier::external_body]
101#[verus_spec(
102 requires
103 KERNEL_BASE_VADDR() <= dst && dst + len <= KERNEL_END_VADDR() &&
104 KERNEL_BASE_VADDR() <= src && src + len <= KERNEL_END_VADDR()
105)]
106unsafe fn memcpy(dst: usize, src: usize, len: usize) {
107 core::intrinsics::volatile_copy_memory(dst as *mut u8, src as *const u8, len);
119}
120
121pub struct VmReader<'a > {
139 pub id: Ghost<nat>,
140 pub cursor: VirtPtr,
141 pub end: VirtPtr,
142 pub phantom: PhantomData<&'a [u8] >,
143}
144
145pub tracked enum VmIoMemView<'a> {
147 WriteView(MemView),
149 ReadView(&'a MemView),
151}
152
153pub tracked struct VmIoOwner<'a> {
155 pub id: Ghost<nat>,
157 pub range: Ghost<Range<usize>>,
159 pub is_fallible: bool,
161 pub phantom: PhantomData<&'a [u8] >,
164 pub is_kernel: bool,
166 pub mem_view: Option<VmIoMemView<'a>>,
168}
169
170impl Inv for VmIoOwner<'_> {
171 open spec fn inv(self) -> bool {
172 &&& self.range@.start <= self.range@.end
174 &&& match self.mem_view {
175 Some(VmIoMemView::WriteView(mv)) => {
177 &&& mv.mappings.finite()
178 &&& mv.mappings_are_disjoint()
179 &&& forall|va: usize|
180 self.range@.start <= va < self.range@.end ==> {
181 &&& #[trigger] mv.addr_transl(va) is Some
182 }
183 },
184 Some(VmIoMemView::ReadView(mv)) => {
186 &&& mv.mappings.finite()
187 &&& mv.mappings_are_disjoint()
188 &&& forall|va: usize|
189 self.range@.start <= va < self.range@.end ==> {
190 &&& #[trigger] mv.addr_transl(va) is Some
191 }
192 },
193 None => true,
196 }
197 }
198}
199
200impl VmIoOwner<'_> {
201 #[verifier::inline]
203 pub open spec fn overlaps(self, other: VmIoOwner<'_>) -> bool {
204 !self.disjoint(other)
205 }
206
207 #[verifier::inline]
208 pub open spec fn overlaps_with_range(self, range: Range<usize>) -> bool {
209 &&& self.range@.start <= range.end
210 &&& range.start <= self.range@.end
211 }
212
213 #[verifier::inline]
215 pub open spec fn disjoint(self, other: VmIoOwner<'_>) -> bool {
216 &&& !self.overlaps_with_range(other.range@)
217 &&& match (self.mem_view, other.mem_view) {
218 (Some(lhs), Some(rhs)) => match (lhs, rhs) {
219 (VmIoMemView::WriteView(lmv), VmIoMemView::WriteView(rmv)) => {
220 lmv.mappings.disjoint(rmv.mappings)
221 },
222 (VmIoMemView::WriteView(lmv), VmIoMemView::ReadView(rmv)) => {
223 lmv.mappings.disjoint(rmv.mappings)
224 },
225 (VmIoMemView::ReadView(lmv), VmIoMemView::WriteView(rmv)) => {
226 lmv.mappings.disjoint(rmv.mappings)
227 },
228 (VmIoMemView::ReadView(lmv), VmIoMemView::ReadView(rmv)) => {
229 lmv.mappings.disjoint(rmv.mappings)
230 },
231 },
232 _ => true,
233 }
234 }
235
236 #[verifier::inline]
237 pub open spec fn params_eq(self, other: VmIoOwner<'_>) -> bool {
238 &&& self.range@ == other.range@
239 &&& self.is_fallible == other.is_fallible
240 }
241
242 pub proof fn change_fallible(tracked &mut self, tracked fallible: bool)
244 requires
245 old(self).inv(),
246 old(self).is_fallible != fallible,
247 ensures
248 self.inv(),
249 self.is_fallible == fallible,
250 {
251 self.is_fallible = fallible;
252 }
253
254 pub proof fn advance(tracked &mut self, nbytes: usize) -> (tracked res: VmIoMemView<'_>)
259 requires
260 old(self).inv(),
261 old(self).mem_view is Some,
262 nbytes <= old(self).range@.end - old(self).range@.start,
263 ensures
264 self.inv(),
265 self.range@.start == old(self).range@.start + nbytes,
266 self.range@.end == old(self).range@.end,
267 self.is_fallible == old(self).is_fallible,
268 self.id == old(self).id,
269 self.is_kernel == old(self).is_kernel,
270 {
271 let start = self.range@.start;
272 let old_end = self.range@.end;
273 self.range = Ghost((start + nbytes) as usize..self.range@.end);
274 let tracked inner = self.mem_view.tracked_take();
276 let tracked ret_perm = match inner {
277 VmIoMemView::WriteView(mv) => {
278 let tracked (lhs, rhs) = mv.split(start, nbytes);
279
280 assert(forall|va: usize|
281 start <= va < start + nbytes ==> mv.addr_transl(va) is Some);
282
283 self.mem_view = Some(VmIoMemView::WriteView(rhs));
285
286 assert(self.inv()) by {
287 assert forall|va: usize| start + nbytes <= va < old_end implies {
288 #[trigger] rhs.addr_transl(va) is Some
289 } by {
290 assert(mv.addr_transl(va) is Some) by {
291 assert(old(self).inv());
292 }
293
294 let old_mappings = mv.mappings.filter(
295 |m: Mapping| m.va_range.start <= va < m.va_range.end,
296 );
297 let new_mappings = rhs.mappings.filter(
298 |m: Mapping| m.va_range.start <= va < m.va_range.end,
299 );
300 assert(old_mappings.len() != 0);
301 assert(rhs.mappings =~= mv.mappings.filter(
302 |m: Mapping| m.va_range.end > start + nbytes,
303 ));
304 assert(new_mappings =~= mv.mappings.filter(
305 |m: Mapping|
306 m.va_range.start <= va < m.va_range.end && m.va_range.end > start
307 + nbytes,
308 ));
309
310 assert(new_mappings.len() != 0) by {
311 broadcast use vstd::set::group_set_axioms;
312
313 let m = old_mappings.choose();
314 assert(start + nbytes <= va);
316 assert(m.va_range.end > va) by {
317 if (m.va_range.end <= va) {
318 assert(!old_mappings.contains(m));
319 }
320 }
321 assert(m.va_range.end > start + nbytes);
322 assert(old_mappings.contains(m));
323 assert(old_mappings.subset_of(mv.mappings));
324 assert(rhs.mappings.contains(m));
325 assert(new_mappings.contains(m));
326 assert(new_mappings.len() >= 1);
327 }
328 }
329 }
330
331 VmIoMemView::WriteView(lhs)
332 },
333 VmIoMemView::ReadView(mv) => {
334 let tracked sub_view = mv.borrow_at(start, nbytes);
335 self.mem_view = Some(VmIoMemView::ReadView(mv));
339 VmIoMemView::ReadView(sub_view)
340 },
341 };
342
343 ret_perm
344 }
345
346 pub proof fn tracked_read_view_unwrap(tracked &self) -> (tracked r: &MemView)
348 requires
349 self.inv(),
350 self.mem_view matches Some(VmIoMemView::ReadView(_)),
351 ensures
352 VmIoMemView::ReadView(r) == self.mem_view.unwrap(),
353 {
354 match self.mem_view {
355 Some(VmIoMemView::ReadView(r)) => r,
356 _ => { proof_from_false() },
357 }
358 }
359}
360
361impl Inv for VmWriter<'_> {
362 open spec fn inv(self) -> bool {
363 &&& self.cursor.inv()
364 &&& self.end.inv()
365 &&& self.cursor.vaddr
366 <= self.end.vaddr
367 &&& self.cursor.range@ == self.end.range@
369 }
370}
371
372impl Inv for VmReader<'_> {
373 open spec fn inv(self) -> bool {
374 &&& self.cursor.inv()
375 &&& self.end.inv()
376 &&& self.cursor.vaddr
377 <= self.end.vaddr
378 &&& self.cursor.range@ == self.end.range@
380 }
381}
382
383impl VmIoOwner<'_> {
384 pub open spec fn inv_with_reader(
385 self,
386 reader: VmReader<'_ >,
387 ) -> bool {
388 &&& self.inv()
389 &&& self.range@.start == reader.cursor.vaddr
390 &&& self.range@.end == reader.end.vaddr
391 &&& self.id == reader.id
392 &&& self.mem_view matches Some(VmIoMemView::ReadView(mv)) ==> {
393 forall|va: usize|
395 self.range@.start <= va < self.range@.end ==> {
396 &&& #[trigger] mv.addr_transl(va) is Some
397 }
398 }
399 }
400
401 pub open spec fn inv_with_writer(
402 self,
403 writer: VmWriter<'_ >,
404 ) -> bool {
405 &&& self.inv()
406 &&& self.range@.start == writer.cursor.vaddr
407 &&& self.range@.end == writer.end.vaddr
408 &&& self.id == writer.id
409 &&& self.mem_view matches Some(VmIoMemView::WriteView(mv)) ==> {
410 forall|va: usize|
412 self.range@.start <= va < self.range@.end ==> {
413 &&& #[trigger] mv.addr_transl(va) is Some
414 }
415 }
416 }
417}
418
419pub struct VmWriter<'a > {
437 pub id: Ghost<nat>,
438 pub cursor: VirtPtr,
439 pub end: VirtPtr,
440 pub phantom: PhantomData<&'a [u8] >,
441}
442
443#[verus_verify]
444impl<'a> VmWriter<'a > {
445 #[verus_spec(r =>
454 with
455 Ghost(id): Ghost<nat>,
456 Tracked(fallible): Tracked<bool>,
457 -> owner: Tracked<VmIoOwner<'a>>,
458 requires
459 !fallible,
460 ptr.inv(),
461 ptr.range@.start == ptr.vaddr,
462 len == ptr.range@.end - ptr.range@.start,
463 len == 0 || KERNEL_BASE_VADDR() <= ptr.vaddr,
464 len == 0 || ptr.vaddr + len <= KERNEL_END_VADDR(),
465 ensures
466 owner@.inv(),
467 owner@.inv_with_writer(r),
468 owner@.is_fallible == fallible,
469 owner@.id == id,
470 owner@.is_kernel,
471 owner@.mem_view is None,
472 r.cursor == ptr,
473 r.end.vaddr == ptr.vaddr + len,
474 r.cursor.range@ == ptr.range@,
475 r.end.range@ == ptr.range@,
476 )]
477 pub unsafe fn from_kernel_space(ptr: VirtPtr, len: usize) -> Self {
478 let ghost range = ptr.range@;
479 let tracked owner = VmIoOwner {
480 id: Ghost(id),
481 range: Ghost(range),
482 is_fallible: fallible,
483 phantom: PhantomData,
484 is_kernel: true,
485 mem_view: None,
486 };
487
488 let ghost range = ptr.vaddr..(ptr.vaddr + len) as usize;
489 let end = VirtPtr { vaddr: ptr.vaddr + len, range: Ghost(range) };
490
491 proof_with!(|= Tracked(owner));
492 Self { id: Ghost(id), cursor: ptr, end, phantom: PhantomData }
493 }
494
495 #[verus_spec(r =>
496 with
497 Ghost(id): Ghost<nat>,
498 -> owner: Tracked<Result<VmIoOwner<'a>>>,
499 ensures
500 r is Ok <==> owner@ is Ok,
501 r matches Ok(r) ==> {
502 &&& owner@ matches Ok(owner) ==> {
503 &&& r.inv()
504 &&& r.avail_spec() == core::mem::size_of::<T>()
505 &&& owner.inv()
506 &&& owner.inv_with_writer(r)
507 }
508 }
509 )]
510 pub fn from_pod<T: Pod>(mut val: T) -> Result<VmWriter<'a >> {
511 proof_decl! {
512 let tracked mut perm;
513 }
514
515 let (pnt, len) = val.as_bytes_mut();
516
517 if len != 0 && (pnt < KERNEL_BASE_VADDR() || len >= KERNEL_END_VADDR() || pnt
518 > KERNEL_END_VADDR() - len) || pnt == 0 {
519 proof_with!(|= Tracked(Err(Error::IoError)));
520 Err(Error::IoError)
521 } else {
522 let ghost range = pnt..(pnt + len) as usize;
523 let vptr = VirtPtr { vaddr: pnt as usize, range: Ghost(range) };
524 let r = unsafe {
525 #[verus_spec(with Ghost(id), Tracked(false) => Tracked(perm))]
526 VmWriter::from_kernel_space(vptr, len)
527 };
528
529 proof_with!(|= Tracked(Ok(perm)));
530 Ok(r)
531 }
532 }
533}
534
535impl Clone for VmReader<'_ > {
540 fn clone(&self) -> Self {
541 Self { id: self.id, cursor: self.cursor, end: self.end, phantom: PhantomData }
542 }
543}
544
545#[verus_verify]
546impl<'a> VmReader<'a > {
547 #[verifier::external_body]
550 #[verus_spec(r =>
551 with
552 Ghost(id): Ghost<nat>,
553 -> owner: Tracked<VmIoOwner<'a>>,
554 requires
555 ptr.inv(),
556 ensures
557 r.inv(),
558 owner@.id == id,
559 owner@.inv(),
560 owner@.inv_with_reader(r),
561 owner@.range == ptr.range@,
562 owner@.mem_view is None,
563 !owner@.is_kernel,
564 !owner@.is_fallible,
565 )]
566 pub unsafe fn from_user_space(ptr: VirtPtr) -> Self {
567 unimplemented!()
569 }
570
571 #[verus_spec(r =>
580 with
581 Ghost(id): Ghost<nat>,
582 -> owner: Tracked<VmIoOwner<'a>>,
583 requires
584 ptr.inv(),
585 ptr.range@.start == ptr.vaddr,
586 len == ptr.range@.end - ptr.range@.start,
587 len == 0 || KERNEL_BASE_VADDR() <= ptr.vaddr,
588 len == 0 || ptr.vaddr + len <= KERNEL_END_VADDR(),
589 ensures
590 owner@.inv(),
591 owner@.inv_with_reader(r),
592 r.cursor.vaddr == ptr.vaddr,
593 r.end.vaddr == ptr.vaddr + len,
594 r.cursor == ptr,
595 r.end.range@ == ptr.range@,
596 owner@.is_kernel,
597 owner@.id == id,
598 )]
599 pub unsafe fn from_kernel_space(ptr: VirtPtr, len: usize) -> Self {
600 let tracked owner = VmIoOwner {
601 id: Ghost(id),
602 range: Ghost(ptr.vaddr..(ptr.vaddr + len) as usize),
603 is_fallible: false,
604 phantom: PhantomData,
605 is_kernel: true,
606 mem_view: None,
608 };
609
610 let ghost range = ptr.vaddr..(ptr.vaddr + len) as usize;
611 let end = VirtPtr { vaddr: ptr.vaddr + len, range: Ghost(range) };
612
613 proof_with!(|= Tracked(owner));
614 Self { id: Ghost(id), cursor: ptr, end, phantom: PhantomData }
615 }
616
617 #[verus_spec(r =>
618 with
619 Ghost(id): Ghost<nat>,
620 -> owner: Tracked<Result<VmIoOwner<'a>>>,
621 ensures
622 r is Ok <==> owner@ is Ok,
623 r matches Ok(r) ==> {
624 &&& owner@ matches Ok(owner) ==> {
625 &&& r.inv()
626 &&& r.remain_spec() == core::mem::size_of::<T>()
627 &&& owner.inv()
628 &&& owner.inv_with_reader(r)
629 }
630 }
631 )]
632 pub fn from_pod<T: Pod>(val: &mut T) -> Result<VmReader<'a >> {
633 proof_decl! {
634 let tracked mut perm;
635 }
636
637 let (pnt, len) = val.as_bytes_mut();
638
639 if len != 0 && (pnt < KERNEL_BASE_VADDR() || len >= KERNEL_END_VADDR() || pnt
640 > KERNEL_END_VADDR() - len) || pnt == 0 {
641 proof_with!(|= Tracked(Err(Error::IoError)));
642 Err(Error::IoError)
643 } else {
644 let ghost range = pnt..(pnt + len) as usize;
645 let vptr = VirtPtr { vaddr: pnt, range: Ghost(range) };
646 let r = unsafe {
647 #[verus_spec(with Ghost(id) => Tracked(perm))]
648 VmReader::from_kernel_space(vptr, len)
649 };
650
651 proof {
652 assert(r.inv());
653 assert(perm.inv());
654 }
655
656 proof_with!(|= Tracked(Ok(perm)));
657 Ok(r)
658 }
659 }
660}
661
662#[verus_verify]
663impl<'a> TryFrom<&'a [u8]> for VmReader<'a > {
664 type Error = Error;
665
666 #[verus_spec()]
667 fn try_from(slice: &'a [u8]) -> Result<Self> {
668 proof_decl! {
669 let tracked mut perm;
670 }
671
672 let addr = slice.as_ptr() as usize;
673
674 if slice.len() != 0 && (addr < KERNEL_BASE_VADDR() || slice.len() >= KERNEL_END_VADDR()
675 || addr > KERNEL_END_VADDR() - slice.len()) || addr == 0 {
676 return Err(Error::IoError);
677 }
678 let ghost range = addr..(addr + slice.len()) as usize;
685 let vptr = VirtPtr { vaddr: addr, range: Ghost(range) };
686
687 Ok(
688 unsafe {
689 #[verus_spec(with Ghost(arbitrary()) => Tracked(perm))]
690 Self::from_kernel_space(vptr, slice.len())
691 },
692 )
693 }
694}
695
696impl<'a> TryFromSpecImpl<&'a [u8]> for VmReader<'a> {
697 open spec fn obeys_try_from_spec() -> bool {
698 true
699 }
700
701 open spec fn try_from_spec(slice: &'a [u8]) -> Result<Self> {
702 let addr = slice.as_ptr() as usize;
703 let len = slice.len();
704
705 if len != 0 && (addr < KERNEL_BASE_VADDR() || len >= KERNEL_END_VADDR() || addr
706 > KERNEL_END_VADDR() - slice.len()) || addr == 0 {
707 Err(Error::IoError)
708 } else {
709 Ok(
710 Self {
711 id: Ghost(arbitrary()),
713 cursor: VirtPtr { vaddr: addr, range: Ghost(addr..(addr + len) as usize) },
714 end: VirtPtr {
715 vaddr: (addr + len) as usize,
716 range: Ghost(addr..(addr + len) as usize),
717 },
718 phantom: PhantomData,
719 },
720 )
721 }
722 }
723}
724
725#[verus_verify]
728impl<'a> TryFrom<&'a [u8]> for VmWriter<'a > {
729 type Error = Error;
730
731 #[verus_spec()]
733 fn try_from(slice: &'a [u8] ) -> Result<Self> {
734 proof_decl! {
735 let tracked mut perm;
736 }
737
738 let addr = slice.as_ptr() as usize;
739
740 if slice.len() != 0 && (addr < KERNEL_BASE_VADDR() || slice.len() >= KERNEL_END_VADDR()
741 || addr > KERNEL_END_VADDR() - slice.len()) || addr == 0 {
742 return Err(Error::IoError);
743 }
744 let ghost range = addr..(addr + slice.len()) as usize;
751 let vptr = VirtPtr { vaddr: addr, range: Ghost(range) };
752
753 proof {
754 assert(vptr.inv());
755 }
756
757 Ok(
758 unsafe {
759 #[verus_spec(with Ghost(arbitrary()), Tracked(false) => Tracked(perm))]
760 Self::from_kernel_space(vptr, slice.len())
761 },
762 )
763 }
764}
765
766impl<'a> TryFromSpecImpl<&'a [u8]> for VmWriter<'a> {
767 open spec fn obeys_try_from_spec() -> bool {
768 true
769 }
770
771 open spec fn try_from_spec(slice: &'a [u8]) -> Result<Self> {
772 let addr = slice.as_ptr() as usize;
773 let len = slice.len();
774
775 if len != 0 && (addr < KERNEL_BASE_VADDR() || len >= KERNEL_END_VADDR() || addr
776 > KERNEL_END_VADDR() - slice.len()) || addr == 0 {
777 Err(Error::IoError)
778 } else {
779 Ok(
780 Self {
781 id: Ghost(arbitrary()),
782 cursor: VirtPtr { vaddr: addr, range: Ghost(addr..(addr + len) as usize) },
783 end: VirtPtr {
784 vaddr: (addr + len) as usize,
785 range: Ghost(addr..(addr + len) as usize),
786 },
787 phantom: PhantomData,
788 },
789 )
790 }
791
792 }
793}
794
795type Result<T> = core::result::Result<T, Error>;
796
797pub trait VmIo<P: Sized>: Send + Sync + Sized {
815 spec fn obeys_vmio_read_requires() -> bool;
818
819 spec fn obeys_vmio_read_ensures() -> bool;
821
822 spec fn obeys_vmio_write_requires() -> bool;
824
825 spec fn obeys_vmio_write_ensures() -> bool;
827
828 spec fn vmio_read_requires(self, owner: P, offset: usize) -> bool;
830
831 spec fn vmio_read_ensures(self, owner: P, offset: usize, new_owner: P, r: Result<()>) -> bool;
833
834 spec fn vmio_write_requires(self, owner: P, offset: usize) -> bool;
836
837 spec fn vmio_write_ensures(self, owner: P, offset: usize, new_owner: P, r: Result<()>) -> bool;
839
840 fn read_slice<T: Pod, const N: usize>(
841 &self,
842 offset: usize,
843 slice: ArrayPtr<T, N>,
844 Tracked(slice_owner): Tracked<&mut PointsToArray<u8, N>>,
845 Tracked(owner): Tracked<&mut P>,
846 ) -> (r: Result<()>);
847
848 fn write_bytes<const N: usize>(
849 &self,
850 offset: usize,
851 bytes: ArrayPtr<u8, N>,
852 Tracked(bytes_owner): Tracked<&mut PointsToArray<u8, N>>,
853 Tracked(owner): Tracked<&mut P>,
854 ) -> (r: Result<()>)
855 ;
870
871 fn write_val<T: Pod>(&self, offset: usize, val: T, Tracked(owner): Tracked<&mut P>) -> (r:
872 Result<()>)
873 ;
888
889 fn write_slice<T: Pod, const N: usize>(
890 &self,
891 offset: usize,
892 slice: ArrayPtr<T, N>,
893 Tracked(slice_owner): Tracked<&mut PointsToArray<T, N>>,
894 Tracked(owner): Tracked<&mut P>,
895 ) -> (r: Result<()>)
896 requires
897 Self::obeys_vmio_write_requires() ==> Self::vmio_write_requires(
898 *self,
899 *old(owner),
900 offset,
901 ),
902 ensures
903 Self::obeys_vmio_write_ensures() ==> Self::vmio_write_ensures(
904 *self,
905 *old(owner),
906 offset,
907 *owner,
908 r,
909 ),
910 ;
911}
912
913pub trait VmIoOnce: Sized {
919 spec fn obeys_vmio_once_read_requires() -> bool;
920
921 spec fn obeys_vmio_once_write_requires() -> bool;
922
923 spec fn obeys_vmio_once_read_ensures() -> bool;
924
925 spec fn obeys_vmio_once_write_ensures() -> bool;
926
927 fn read_once<T: PodOnce>(&self, offset: usize) -> Result<T>;
933
934 fn write_once<T: PodOnce>(&self, offset: usize, new_val: &T) -> Result<()>;
940}
941
942#[verus_verify]
943impl VmReader<'_> {
944 pub open spec fn remain_spec(&self) -> usize {
945 (self.end.vaddr - self.cursor.vaddr) as usize
946 }
947
948 #[inline]
950 #[verus_spec(r =>
951 requires
952 self.inv(),
953 ensures
954 r == self.remain_spec(),
955 )]
956 #[verifier::when_used_as_spec(remain_spec)]
957 pub fn remain(&self) -> usize {
958 self.end.vaddr - self.cursor.vaddr
959 }
960
961 #[inline]
963 #[verus_spec(
964 requires
965 old(self).inv(),
966 len <= old(self).remain_spec(),
967 ensures
968 self.inv(),
969 self.cursor.vaddr == old(self).cursor.vaddr + len,
970 self.remain_spec() == old(self).remain_spec() - len,
971 self.id == old(self).id,
972 self.end == old(self).end,
973 )]
974 pub fn advance(&mut self, len: usize) {
975 self.cursor.vaddr = self.cursor.vaddr + len;
976 }
977
978 #[verus_spec(r =>
984 with
985 Tracked(owner_r): Tracked<&mut VmIoOwner<'_>>,
986 Tracked(owner_w): Tracked<&mut VmIoOwner<'_>>,
987 requires
988 old(self).inv(),
989 old(writer).inv(),
990 old(owner_r).inv(),
991 old(owner_w).inv(),
992 old(owner_r).inv_with_reader(*old(self)),
993 old(owner_w).inv_with_writer(*old(writer)),
994 old(owner_r).mem_view is Some,
995 old(owner_w).mem_view matches Some(VmIoMemView::WriteView(_)),
996 old(writer).cursor.range@.start >= old(self).cursor.range@.end
998 || old(self).cursor.range@.start >= old(writer).cursor.range@.end,
999 ensures
1000 self.inv(),
1001 writer.inv(),
1002 owner_r.inv(),
1003 owner_w.inv(),
1004 owner_r.inv_with_reader(*self),
1005 owner_w.inv_with_writer(*writer),
1006 r == vstd::math::min(old(self).remain_spec() as int, old(writer).avail_spec() as int),
1007 self.remain_spec() == old(self).remain_spec() - r as usize,
1008 self.cursor.vaddr == old(self).cursor.vaddr + r as usize,
1009 writer.avail_spec() == old(writer).avail_spec() - r as usize,
1010 writer.cursor.vaddr == old(writer).cursor.vaddr + r as usize,
1011 )]
1012 pub fn read(&mut self, writer: &mut VmWriter) -> usize {
1013 let mut copy_len = if self.remain() < writer.avail() {
1014 self.remain()
1015 } else {
1016 writer.avail()
1017 };
1018
1019 if copy_len == 0 {
1020 return 0;
1021 }
1022 let tracked mut mv = match owner_w.mem_view.tracked_take() {
1029 VmIoMemView::WriteView(mv) => mv,
1030 _ => { proof_from_false() },
1031 };
1032
1033 self.advance(copy_len);
1034 writer.advance(copy_len);
1035
1036 proof {
1037 owner_w.mem_view = Some(VmIoMemView::WriteView(mv));
1038 owner_w.advance(copy_len);
1039 owner_r.advance(copy_len);
1040 }
1041
1042 copy_len
1043 }
1044
1045 #[verifier::external_body]
1050 #[verus_spec(r =>
1051 with
1052 Ghost(id): Ghost<nat>,
1053 Tracked(owner): Tracked<&mut VmIoOwner<'_>>,
1054 requires
1055 old(self).inv(),
1056 old(owner).inv(),
1057 old(owner).inv_with_reader(*old(self)),
1058 old(owner).mem_view is Some,
1059 ensures
1060 self.inv(),
1061 owner.inv(),
1062 owner.inv_with_reader(*self),
1063 owner.params_eq(*old(owner)),
1064 match r {
1065 Ok(_) => {
1066 &&& self.remain_spec() == old(self).remain_spec() - core::mem::size_of::<T>()
1067 &&& self.cursor.vaddr == old(self).cursor.vaddr + core::mem::size_of::<T>()
1068 },
1069 Err(_) => {
1070 *old(self) == *self
1071 },
1072 }
1073 )]
1074 pub fn read_val<T: Pod>(&mut self) -> Result<T> {
1075 if self.remain() < core::mem::size_of::<T>() {
1076 return Err(Error::InvalidArgs);
1077 }
1078 let mut v = T::new_uninit();
1079 proof_with!(Ghost(id) => Tracked(owner_w));
1080 let mut writer = VmWriter::from_pod(v)?;
1081
1082 let tracked mut owner_w = owner_w.tracked_unwrap();
1083
1084 proof_with!(Tracked(owner), Tracked(&mut owner_w));
1085 self.read(&mut writer);
1086
1087 Ok(v)
1088 }
1089
1090 #[verifier::external_body]
1091 #[verus_spec(
1092 requires
1093 self.inv(),
1094 core::mem::size_of::<T>() <= self.remain_spec(),
1095 )]
1096 fn read_once_inner<T: PodOnce>(&self) -> T {
1097 let pnt = self.cursor.vaddr as *const T;
1098 unsafe { pnt.read_volatile() }
1099 }
1100
1101 #[verus_spec(r =>
1113 with
1114 Tracked(owner): Tracked<&mut VmIoOwner<'_>>,
1115 requires
1116 old(self).inv(),
1117 old(owner).mem_view is Some,
1118 old(owner).inv(),
1119 old(owner).inv_with_reader(*old(self)),
1120 ensures
1121 self.inv(),
1122 owner.inv(),
1123 owner.inv_with_reader(*self),
1124 match r {
1125 Ok(_) => {
1126 &&& self.remain_spec() == old(self).remain_spec() - core::mem::size_of::<T>()
1127 &&& self.cursor.vaddr == old(self).cursor.vaddr + core::mem::size_of::<T>()
1128 },
1129 Err(_) => {
1130 *old(self) == *self
1131 },
1132 }
1133 )]
1134 pub fn read_val_once<T: PodOnce>(&mut self) -> Result<T> {
1135 if core::mem::size_of::<T>() > self.remain() {
1136 return Err(Error::InvalidArgs);
1137 }
1138 let v = self.read_once_inner::<T>();
1143 self.advance(core::mem::size_of::<T>());
1144
1145 proof {
1146 owner.advance(core::mem::size_of::<T>());
1147 }
1148
1149 Ok(v)
1150 }
1151}
1152
1153#[verus_verify]
1154impl<'a> VmWriter<'a> {
1155 pub open spec fn avail_spec(&self) -> usize {
1156 (self.end.vaddr - self.cursor.vaddr) as usize
1157 }
1158
1159 #[verifier::external_body]
1160 #[verus_spec(r =>
1161 with
1162 Ghost(id): Ghost<nat>,
1163 -> owner: Tracked<VmIoOwner<'a>>,
1164 requires
1165 ptr.inv(),
1166 ensures
1167 r.inv(),
1168 owner@.id == id,
1169 owner@.inv(),
1170 owner@.inv_with_writer(r),
1171 owner@.range == ptr.range@,
1172 owner@.mem_view is None,
1173 !owner@.is_kernel,
1174 !owner@.is_fallible,
1175 )]
1176 pub unsafe fn from_user_space(ptr: VirtPtr) -> Self {
1177 unimplemented!()
1179 }
1180
1181 #[inline]
1186 #[verus_spec(r =>
1187 requires
1188 self.inv(),
1189 ensures
1190 r == self.avail_spec(),
1191 )]
1192 #[verifier::when_used_as_spec(avail_spec)]
1193 pub fn avail(&self) -> usize {
1194 self.end.vaddr - self.cursor.vaddr
1195 }
1196
1197 #[inline]
1199 #[verus_spec(
1200 requires
1201 old(self).inv(),
1202 len <= old(self).avail_spec(),
1203 ensures
1204 self.inv(),
1205 self.avail_spec() == old(self).avail_spec() - len,
1206 self.cursor.vaddr == old(self).cursor.vaddr + len,
1207 self.cursor.range@ == old(self).cursor.range@,
1208 self.id == old(self).id,
1209 self.end == old(self).end,
1210 self.cursor.range@ == old(self).cursor.range@,
1211 )]
1212 pub fn advance(&mut self, len: usize) {
1213 self.cursor.vaddr = self.cursor.vaddr + len;
1214 }
1215
1216 #[inline]
1222 #[verus_spec(r =>
1223 with
1224 Tracked(owner_w): Tracked<&mut VmIoOwner<'_>>,
1225 Tracked(owner_r): Tracked<&mut VmIoOwner<'_>>,
1226 requires
1227 old(self).inv(),
1228 old(reader).inv(),
1229 old(owner_w).inv(),
1230 old(owner_r).inv(),
1231 old(owner_w).inv_with_writer(*old(self)),
1232 old(owner_r).inv_with_reader(*old(reader)),
1233 old(owner_r).mem_view is Some,
1234 old(owner_w).mem_view matches Some(VmIoMemView::WriteView(_)),
1235 old(self).cursor.range@.start >= old(reader).cursor.range@.end
1237 || old(reader).cursor.range@.start >= old(self).cursor.range@.end,
1238 ensures
1239 self.inv(),
1240 reader.inv(),
1241 owner_w.inv(),
1242 owner_r.inv(),
1243 owner_w.inv_with_writer(*self),
1244 owner_r.inv_with_reader(*reader),
1245 r == vstd::math::min(old(self).avail_spec() as int, old(reader).remain_spec() as int),
1246 self.avail_spec() == old(self).avail_spec() - r as usize,
1247 self.cursor.vaddr == old(self).cursor.vaddr + r as usize,
1248 reader.remain_spec() == old(reader).remain_spec() - r as usize,
1249 reader.cursor.vaddr == old(reader).cursor.vaddr + r as usize,
1250 )]
1251 pub fn write(&mut self, reader: &mut VmReader) -> usize {
1252 proof_with!(Tracked(owner_r), Tracked(owner_w));
1253 reader.read(self)
1254 }
1255
1256 #[verifier::external_body]
1261 #[verus_spec(r =>
1262 with
1263 Ghost(id): Ghost<nat>,
1264 Tracked(owner_w): Tracked<&mut VmIoOwner<'_>>,
1265 Tracked(memview_r): Tracked<&MemView>,
1266 requires
1267 old(self).inv(),
1268 old(owner_w).inv(),
1269 old(owner_w).inv_with_writer(*old(self)),
1270 old(owner_w).mem_view is Some,
1271 ensures
1272 self.inv(),
1273 owner_w.inv(),
1274 owner_w.inv_with_writer(*self),
1275 owner_w.params_eq(*old(owner_w)),
1276 match r {
1277 Ok(_) => {
1278 &&& self.avail_spec() == old(self).avail_spec() - core::mem::size_of::<T>()
1279 &&& self.cursor.vaddr == old(self).cursor.vaddr + core::mem::size_of::<T>()
1280 },
1281 Err(_) => {
1282 *old(self) == *self
1283 },
1284 }
1285 )]
1286 pub fn write_val<T: Pod>(&mut self, val: &mut T) -> Result<()> {
1287 if self.avail() < core::mem::size_of::<T>() {
1288 return Err(Error::InvalidArgs);
1289 }
1290 proof_with!(Ghost(id) => Tracked(owner_r));
1291 let mut reader = VmReader::from_pod(val)?;
1292
1293 let tracked mut owner_r = owner_r.tracked_unwrap();
1294
1295 proof_with!(Tracked(owner_w), Tracked(&mut owner_r));
1296 self.write(&mut reader);
1297
1298 Ok(())
1299 }
1300
1301 #[verifier::external_body]
1302 #[verus_spec(
1303 requires
1304 self.inv(),
1305 core::mem::size_of::<T>() <= self.avail_spec(),
1306 )]
1307 fn write_once_inner<T: PodOnce>(&self, new_val: &mut T) {
1308 let cursor = self.cursor.vaddr as *mut T;
1309 unsafe { cursor.write_volatile(*new_val) };
1310 }
1311
1312 #[verus_spec(r =>
1313 with
1314 Tracked(owner_w): Tracked<&mut VmIoOwner<'_>>,
1315 requires
1316 old(self).inv(),
1317 old(owner_w).mem_view is Some,
1318 old(owner_w).inv(),
1319 old(owner_w).inv_with_writer(*old(self)),
1320 ensures
1321 self.inv(),
1322 owner_w.inv(),
1323 owner_w.inv_with_writer(*self),
1324 match r {
1325 Ok(_) => {
1326 &&& self.avail_spec() == old(self).avail_spec() - core::mem::size_of::<T>()
1327 &&& self.cursor.vaddr == old(self).cursor.vaddr + core::mem::size_of::<T>()
1328 },
1329 Err(_) => {
1330 *old(self) == *self
1331 },
1332 }
1333 )]
1334 pub fn write_once<T: PodOnce>(&mut self, new_val: &mut T) -> Result<()> {
1335 if core::mem::size_of::<T>() > self.avail() {
1336 return Err(Error::InvalidArgs);
1337 }
1338 self.write_once_inner::<T>(new_val);
1343 self.advance(core::mem::size_of::<T>());
1344
1345 proof {
1346 owner_w.advance(core::mem::size_of::<T>());
1347 }
1348
1349 Ok(())
1350 }
1351}
1352
1353}