ostd/specs/mm/page_table/
mod.rs1#![allow(hidden_glob_reexports)]
2
3pub mod cursor;
4pub mod node;
5mod owners;
6mod view;
7
8pub use cursor::*;
9pub use node::*;
10pub use owners::*;
11pub use view::*;
12
13use vstd::arithmetic::power2::pow2;
14use vstd::prelude::*;
15use vstd_extra::arithmetic::*;
16use vstd_extra::ghost_tree::TreePath;
17use vstd_extra::ownership::*;
18
19use crate::mm::page_table::page_size;
20use crate::mm::page_table::PageTableConfig;
21use crate::mm::{PagingLevel, Vaddr};
22use crate::specs::arch::mm::{NR_ENTRIES, NR_LEVELS, PAGE_SIZE};
23
24use align_ext::AlignExt;
25
26verus! {
27
28pub struct AbstractVaddr {
34 pub offset: int,
35 pub index: Map<int, int>,
36}
37
38impl Inv for AbstractVaddr {
39 open spec fn inv(self) -> bool {
40 &&& 0 <= self.offset < PAGE_SIZE
41 &&& forall |i: int|
42 #![trigger self.index.contains_key(i)]
43 0 <= i < NR_LEVELS ==> {
44 &&& self.index.contains_key(i)
45 &&& 0 <= self.index[i]
46 &&& self.index[i] < NR_ENTRIES
47 }
48 }
49}
50
51impl AbstractVaddr {
52 pub open spec fn from_vaddr(va: Vaddr) -> Self {
56 AbstractVaddr {
57 offset: (va % PAGE_SIZE) as int,
58 index: Map::new(
59 |i: int| 0 <= i < NR_LEVELS,
60 |i: int| ((va / pow2((12 + 9 * i) as nat) as usize) % NR_ENTRIES) as int,
61 ),
62 }
63 }
64
65 pub proof fn from_vaddr_wf(va: Vaddr)
66 ensures
67 AbstractVaddr::from_vaddr(va).inv(),
68 {
69 let abs = AbstractVaddr::from_vaddr(va);
70 assert(0 <= abs.offset < PAGE_SIZE);
71 assert forall |i: int|
72 #![trigger abs.index.contains_key(i)]
73 0 <= i < NR_LEVELS implies {
74 &&& abs.index.contains_key(i)
75 &&& 0 <= abs.index[i]
76 &&& abs.index[i] < NR_ENTRIES
77 } by {
78 };
80 }
81
82 pub open spec fn to_vaddr(self) -> Vaddr {
85 (self.offset + self.to_vaddr_indices(0)) as Vaddr
86 }
87
88 pub open spec fn to_vaddr_indices(self, start: int) -> int
90 decreases NR_LEVELS - start when start <= NR_LEVELS
91 {
92 if start >= NR_LEVELS {
93 0
94 } else {
95 self.index[start] * pow2((12 + 9 * start) as nat) as int
96 + self.to_vaddr_indices(start + 1)
97 }
98 }
99
100 pub open spec fn reflect(self, va: Vaddr) -> bool {
102 self == Self::from_vaddr(va)
103 }
104
105 pub broadcast proof fn reflect_prop(self, va: Vaddr)
108 requires
109 self.inv(),
110 self.reflect(va),
111 ensures
112 #[trigger] self.to_vaddr() == va,
113 #[trigger] Self::from_vaddr(va) == self,
114 {
115 Self::from_vaddr_to_vaddr_roundtrip(va);
119 }
120
121 pub proof fn from_vaddr_to_vaddr_roundtrip(va: Vaddr)
123 ensures
124 Self::from_vaddr(va).to_vaddr() == va,
125 {
126 admit()
131 }
132
133 pub broadcast proof fn reflect_from_vaddr(va: Vaddr)
135 ensures
136 #[trigger] Self::from_vaddr(va).reflect(va),
137 #[trigger] Self::from_vaddr(va).inv(),
138 {
139 Self::from_vaddr_wf(va);
140 }
141
142 pub broadcast proof fn reflect_to_vaddr(self)
144 requires
145 self.inv(),
146 ensures
147 #[trigger] self.reflect(self.to_vaddr()),
148 {
149 Self::to_vaddr_from_vaddr_roundtrip(self);
150 }
151
152 pub proof fn to_vaddr_from_vaddr_roundtrip(abs: Self)
154 requires
155 abs.inv(),
156 ensures
157 Self::from_vaddr(abs.to_vaddr()) == abs,
158 {
159 admit()
161 }
162
163 pub broadcast proof fn reflect_eq(self, other: Self, va: Vaddr)
165 requires
166 self.reflect(va),
167 other.reflect(va),
168 ensures
169 #![auto] (self == other),
170 { }
171
172 pub open spec fn align_down(self, level: int) -> Self
173 decreases level when level >= 1
174 {
175 if level == 1 {
176 AbstractVaddr {
177 offset: 0,
178 index: self.index,
179 }
180 } else {
181 let tmp = self.align_down(level - 1);
182 AbstractVaddr {
183 index: tmp.index.insert(level - 2, 0),
184 ..tmp
185 }
186 }
187 }
188
189 pub proof fn align_down_inv(self, level: int)
190 requires
191 1 <= level < NR_LEVELS,
192 self.inv(),
193 ensures
194 self.align_down(level).inv(),
195 forall |i: int| level <= i < NR_LEVELS ==> #[trigger] self.index[i - 1] == self.align_down(level).index[i - 1],
196 decreases level
197 {
198 if level == 1 {
199 assert(self.inv());
200 } else {
201 let tmp = self.align_down(level - 1);
202 self.align_down_inv(level - 1);
203 }
204 }
205
206 pub proof fn align_down_to_vaddr_eq_if_upper_indices_eq(self, other: Self, level: int)
211 requires
212 1 <= level <= NR_LEVELS,
213 self.inv(),
214 other.inv(),
215 forall |i: int| level - 1 <= i < NR_LEVELS ==> self.index[i] == other.index[i],
217 ensures
218 self.align_down(level).to_vaddr() == other.align_down(level).to_vaddr(),
219 decreases level
220 {
221 self.align_down_concrete(level);
232 other.align_down_concrete(level);
233
234 admit()
258 }
259
260 pub axiom fn align_down_concrete(self, level: int)
261 requires
262 1 <= level <= NR_LEVELS,
263 ensures
264 self.align_down(level).reflect(nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) as Vaddr);
265
266 pub open spec fn align_up(self, level: int) -> Self {
267 let lower_aligned = self.align_down(level - 1);
268 lower_aligned.next_index(level)
269 }
270
271 pub axiom fn align_up_concrete(self, level: int)
272 requires
273 1 <= level <= NR_LEVELS,
274 ensures
275 self.align_up(level).reflect(nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) as Vaddr);
276
277 pub axiom fn align_diff(self, level: int)
278 requires
279 1 <= level <= NR_LEVELS,
280 ensures
281 nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) ==
282 nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) + page_size(level as PagingLevel),
283 nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) < usize::MAX;
284
285 pub open spec fn next_index(self, level: int) -> Self
286 decreases NR_LEVELS - level when 1 <= level <= NR_LEVELS
287 {
288 let index = self.index[level - 1];
289 let next_index = index + 1;
290 if next_index == NR_ENTRIES && level < NR_LEVELS {
291 let next_va = Self {
292 offset: self.offset,
293 index: self.index.insert(level - 1, 0),
294 };
295 next_va.next_index(level + 1)
296 } else if next_index == NR_ENTRIES && level == NR_LEVELS {
297 Self {
298 offset: self.offset,
299 index: self.index.insert(level - 1, 0),
300 }
301 } else {
302 Self {
303 offset: self.offset,
304 index: self.index.insert(level - 1, next_index),
305 }
306 }
307 }
308
309 pub open spec fn wrapped(self, start_level: int, level: int) -> bool
310 decreases NR_LEVELS - level when 1 <= start_level <= level <= NR_LEVELS
311 {
312 &&& self.next_index(start_level).index[level - 1] == 0 ==> {
313 &&& self.index[level - 1] + 1 == NR_ENTRIES
314 &&& if level < NR_LEVELS {
315 self.wrapped(start_level, level + 1)
316 } else {
317 true
318 }
319 }
320 &&& self.next_index(start_level).index[level - 1] != 0 ==>
321 self.index[level - 1] + 1 < NR_ENTRIES
322 }
323
324 pub proof fn use_wrapped(self, start_level: int, level: int)
325 requires
326 1 <= start_level <= level < NR_LEVELS,
327 self.wrapped(start_level, level),
328 self.next_index(start_level).index[level - 1] == 0
329 ensures
330 self.index[level - 1] + 1 == NR_ENTRIES
331 { }
332
333 pub proof fn wrapped_unwrap(self, start_level: int, level: int)
334 requires
335 1 <= start_level <= level < NR_LEVELS,
336 self.wrapped(start_level, level),
337 self.next_index(start_level).index[level - 1] == 0,
338 ensures
339 self.wrapped(start_level, level + 1)
340 { }
341
342 pub proof fn next_index_wrap_condition(self, level: int)
343 requires
344 self.inv(),
345 1 <= level <= NR_LEVELS,
346 ensures
347 self.wrapped(level, level)
348 { admit() }
349
350 pub open spec fn compute_vaddr(self) -> Vaddr {
358 self.rec_compute_vaddr(0)
359 }
360
361 pub open spec fn rec_compute_vaddr(self, i: int) -> Vaddr
363 decreases NR_LEVELS - i when 0 <= i <= NR_LEVELS
364 {
365 if i >= NR_LEVELS {
366 self.offset as Vaddr
367 } else {
368 let shift = page_size((i + 1) as PagingLevel);
369 (self.index[i] * shift + self.rec_compute_vaddr(i + 1)) as Vaddr
370 }
371 }
372
373 pub open spec fn to_path(self, level: int) -> TreePath<NR_ENTRIES>
383 recommends 0 <= level < NR_LEVELS
384 {
385 TreePath(self.rec_to_path(NR_LEVELS - 1, level))
386 }
387
388 pub open spec fn rec_to_path(self, abstract_level: int, bottom_level: int) -> Seq<usize>
392 decreases abstract_level - bottom_level when bottom_level <= abstract_level
393 {
394 if abstract_level < bottom_level {
395 seq![]
396 } else if abstract_level == bottom_level {
397 seq![self.index[abstract_level] as usize]
399 } else {
400 self.rec_to_path(abstract_level - 1, bottom_level).push(self.index[abstract_level] as usize)
402 }
403 }
404
405 pub proof fn to_path_vaddr(self, level: int)
407 requires
408 self.inv(),
409 0 <= level < NR_LEVELS,
410 ensures
411 vaddr(self.to_path(level)) == self.align_down(level + 1).compute_vaddr(),
412 {
413 admit() }
415
416 pub axiom fn to_vaddr_is_compute_vaddr(self)
418 requires
419 self.inv(),
420 ensures
421 self.to_vaddr() == self.compute_vaddr();
422
423 pub proof fn index_increment_adds_page_size(self, level: int)
425 requires
426 self.inv(),
427 1 <= level <= NR_LEVELS,
428 self.index[level - 1] + 1 < NR_ENTRIES,
429 ensures
430 (Self {
431 index: self.index.insert(level - 1, self.index[level - 1] + 1),
432 ..self
433 }).to_vaddr() == self.to_vaddr() + page_size(level as PagingLevel),
434 {
435 let new_va = Self {
436 index: self.index.insert(level - 1, self.index[level - 1] + 1),
437 ..self
438 };
439 assert forall |i: int|
441 #![trigger new_va.index.contains_key(i)]
442 0 <= i < NR_LEVELS implies {
443 &&& new_va.index.contains_key(i)
444 &&& 0 <= new_va.index[i]
445 &&& new_va.index[i] < NR_ENTRIES
446 } by {
447 assert(self.index.contains_key(i));
449 assert(0 <= self.index[i] < NR_ENTRIES);
450 if i == level - 1 {
451 assert(new_va.index[i] == self.index[i] + 1);
452 assert(0 <= self.index[i]);
453 assert(0 <= new_va.index[i]);
454 } else {
455 assert(new_va.index[i] == self.index[i]);
456 }
457 };
458 assert(new_va.inv());
459 self.to_vaddr_is_compute_vaddr();
460 new_va.to_vaddr_is_compute_vaddr();
461 admit()
464 }
465
466 pub proof fn to_path_len(self, level: int)
468 requires
469 0 <= level < NR_LEVELS,
470 ensures
471 self.to_path(level).len() == NR_LEVELS - level,
472 {
473 self.rec_to_path_len(NR_LEVELS - 1, level);
474 }
475
476 proof fn rec_to_path_len(self, abstract_level: int, bottom_level: int)
477 requires
478 bottom_level <= abstract_level,
479 ensures
480 self.rec_to_path(abstract_level, bottom_level).len() == abstract_level - bottom_level + 1,
481 decreases abstract_level - bottom_level,
482 {
483 if abstract_level > bottom_level {
488 self.rec_to_path_len(abstract_level - 1, bottom_level);
489 }
490 admit()
492 }
493
494 pub proof fn to_path_inv(self, level: int)
496 requires
497 self.inv(),
498 0 <= level < NR_LEVELS,
499 ensures
500 self.to_path(level).inv(),
501 {
502 self.to_path_len(level);
503 assert forall|i: int| 0 <= i < self.to_path(level).len()
504 implies TreePath::<NR_ENTRIES>::elem_inv(#[trigger] self.to_path(level).index(i)) by {
505 admit()
507 };
508 }
509}
510
511impl AbstractVaddr {
513 pub proof fn path_matches_vaddr(self, path: TreePath<NR_ENTRIES>)
516 requires
517 self.inv(),
518 path.inv(),
519 path.len() <= NR_LEVELS,
520 forall|i: int| 0 <= i < path.len() ==>
521 path.index(i) == self.index[NR_LEVELS - 1 - i],
522 ensures
523 vaddr(path) == self.align_down((NR_LEVELS - path.len() + 1) as int).compute_vaddr()
524 - self.align_down((NR_LEVELS - path.len() + 1) as int).offset,
525 {
526 admit() }
528
529 pub proof fn to_path_index(self, level: int, i: int)
532 requires
533 self.inv(),
534 0 <= level < NR_LEVELS,
535 0 <= i < NR_LEVELS - level,
536 ensures
537 self.to_path(level).index(i) == self.index[NR_LEVELS - 1 - i],
538 {
539 self.to_path_len(level);
540 admit()
545 }
546
547 pub proof fn to_path_vaddr_concrete(self, level: int)
550 requires
551 self.inv(),
552 0 <= level < NR_LEVELS,
553 ensures
554 vaddr(self.to_path(level)) == nat_align_down(self.to_vaddr() as nat, page_size((level + 1) as PagingLevel) as nat) as usize,
555 {
556 self.to_path_vaddr(level);
557 self.to_vaddr_is_compute_vaddr();
558 self.align_down_concrete(level + 1);
559 admit()
564 }
565
566 pub proof fn vaddr_range_from_path(self, level: int)
569 requires
570 self.inv(),
571 0 <= level < NR_LEVELS,
572 ensures
573 vaddr(self.to_path(level)) <= self.to_vaddr()
574 < vaddr(self.to_path(level)) + page_size((level + 1) as PagingLevel),
575 {
576 self.to_path_vaddr_concrete(level);
577 admit()
579 }
580}
581
582}