Skip to main content

ostd/specs/mm/page_table/cursor/
va_lemmas.rs

1/// Virtual-address manipulation specs and lemmas for `CursorOwner`.
2///
3/// This module contains:
4/// - Spec functions for zeroing VA indices below the cursor's level
5///   (`zero_below_level_rec`, `zero_below_level`).
6/// - Lemmas about how zeroing preserves fields other than VA.
7/// - Spec functions for the cursor's current VA and VA range
8///   (`cur_va`, `cur_va_range`).
9/// - Lemmas relating the abstract VA to the page table view range.
10/// - Axiom functions for updating the cursor VA (`set_va`, `set_va_in_node`).
11use vstd::prelude::*;
12
13use core::ops::Range;
14
15use vstd_extra::ghost_tree::*;
16use vstd_extra::ownership::*;
17
18use crate::mm::page_table::*;
19use crate::mm::{Paddr, PagingLevel, Vaddr};
20use crate::specs::mm::page_table::Mapping;
21use vstd_extra::arithmetic::{nat_align_down, nat_align_up};
22use crate::specs::arch::mm::{NR_ENTRIES, NR_LEVELS};
23use crate::specs::mm::page_table::cursor::page_size_lemmas::lemma_page_size_ge_page_size;
24use crate::specs::mm::page_table::cursor::owners::{CursorOwner, CursorContinuation};
25use crate::specs::mm::page_table::owners::*;
26use crate::specs::mm::page_table::AbstractVaddr;
27
28verus! {
29
30impl<'rcu, C: PageTableConfig> CursorOwner<'rcu, C> {
31
32    // ─── Spec helpers ────────────────────────────────────────────────────
33
34    pub open spec fn zero_below_level_rec(self, level: PagingLevel) -> Self
35        decreases self.level - level
36    {
37        if self.level <= level {
38            self
39        } else {
40            Self {
41                va: AbstractVaddr {
42                    index: self.va.index.insert(level - 1, 0),
43                    ..self.va
44                },
45                ..self.zero_below_level_rec((level + 1) as u8)
46            }
47        }
48    }
49
50    pub open spec fn zero_below_level(self) -> Self
51        recommends 1 <= self.level <= NR_LEVELS,
52    {
53        Self { va: self.va.align_down(self.level as int), ..self }
54    }
55
56    pub open spec fn cur_va(self) -> Vaddr {
57        self.va.to_vaddr()
58    }
59
60    pub open spec fn cur_va_range(self) -> Range<AbstractVaddr> {
61        let start = self.va.align_down(self.level as int);
62        let end = self.va.align_up(self.level as int);
63        Range { start, end }
64    }
65
66    pub open spec fn set_va_spec(self, new_va: AbstractVaddr) -> Self {
67        Self {
68            va: new_va,
69            ..self
70        }
71    }
72
73    pub open spec fn set_va_in_node_spec(self, new_va: AbstractVaddr) -> Self {
74        let old_cont = self.continuations[self.level - 1];
75        Self {
76            va: new_va,
77            continuations: self.continuations.insert(
78                self.level - 1,
79                CursorContinuation {
80                    idx: new_va.index[self.level - 1] as usize,
81                    ..old_cont
82                },
83            ),
84            ..self
85        }
86    }
87
88    // ─── Proofs: zero preserves structure ────────────────────────────────
89
90    pub proof fn zero_below_level_rec_preserves_above(self, level: PagingLevel)
91        ensures
92            forall |lv: int| lv >= self.level ==>  self.zero_below_level_rec(level).va.index[lv] == #[trigger] self.va.index[lv],
93        decreases self.level - level,
94    {
95        if self.level > level {
96            self.zero_below_level_rec_preserves_above((level + 1) as u8);
97        }
98    }
99
100    /// Unfolds zero_below_level to expose the VA as align_down(level).
101    pub proof fn zero_below_level_va(self)
102        requires
103            1 <= self.level <= NR_LEVELS,
104        ensures
105            self.zero_below_level().va == self.va.align_down(self.level as int),
106    {}
107
108    pub proof fn zero_preserves_above(self)
109        requires
110            self.va.inv(),
111            1 <= self.level <= NR_LEVELS,
112        ensures
113            forall |lv: int| self.level <= lv < NR_LEVELS ==> self.zero_below_level().va.index[lv] == #[trigger] self.va.index[lv],
114    {
115        self.va.align_down_shape(self.level as int);
116    }
117
118    pub axiom fn do_zero_below_level(tracked &mut self)
119        requires
120            old(self).inv(),
121        ensures
122            *final(self) == old(self).zero_below_level(),
123            final(self).inv();
124
125    pub proof fn zero_rec_preserves_all_but_va(self, level: PagingLevel)
126        ensures
127            self.zero_below_level_rec(level).level == self.level,
128            self.zero_below_level_rec(level).continuations == self.continuations,
129            self.zero_below_level_rec(level).guard_level == self.guard_level,
130            self.zero_below_level_rec(level).prefix == self.prefix,
131            self.zero_below_level_rec(level).popped_too_high == self.popped_too_high,
132        decreases self.level - level,
133    {
134        if self.level > level {
135            self.zero_rec_preserves_all_but_va((level + 1) as u8);
136        }
137    }
138
139    pub proof fn zero_preserves_all_but_va(self)
140        ensures
141            self.zero_below_level().level == self.level,
142            self.zero_below_level().continuations == self.continuations,
143            self.zero_below_level().guard_level == self.guard_level,
144            self.zero_below_level().prefix == self.prefix,
145            self.zero_below_level().popped_too_high == self.popped_too_high,
146    {
147        self.zero_rec_preserves_all_but_va(1u8);
148    }
149
150    // ─── Proofs: inc + zero ──────────────────────────────────────────────
151
152    pub proof fn inc_and_zero_increases_va(self)
153        requires
154            self.inv(),
155            self.index() + 1 < NR_ENTRIES,
156        ensures
157            self.inc_index().zero_below_level().va.to_vaddr() > self.va.to_vaddr(),
158    {
159        // inc_index increments va.index[level-1] by 1. zero_below_level zeroes
160        // indices below level (= align_down). The result is align_up(va, ps).
161        let inc = self.inc_index();
162        inc.zero_preserves_all_but_va();
163        inc.zero_below_level_va();
164        assert(inc.va.inv()) by {
165            assert forall |i: int| 0 <= i < NR_LEVELS implies
166                inc.va.index.contains_key(i) && 0 <= #[trigger] inc.va.index[i] && inc.va.index[i] < NR_ENTRIES
167            by { if i != self.level - 1 { assert(inc.va.index[i] == self.va.index[i]); } };
168        };
169        let ps = page_size(self.level as PagingLevel) as nat;
170        let self_va = self.va.to_vaddr() as nat;
171        lemma_page_size_ge_page_size(self.level as PagingLevel);
172
173        // Step 1: inc_index adds page_size to the vaddr.
174        assert(self.va.index[self.level - 1] == self.continuations[self.level - 1].idx);
175        self.va.index_increment_adds_page_size(self.level as int);
176        let inc_va = inc.va.to_vaddr() as nat;
177        assert(inc_va == self_va + ps);
178
179        // Step 2: zero_below_level().va == inc.va.align_down(level).
180        // align_down_concrete gives .reflect(nat_align_down(inc_va, ps)).
181        inc.va.align_down_concrete(self.level as int);
182        let new_va = vstd_extra::arithmetic::nat_align_down(inc_va, ps);
183        AbstractVaddr::from_vaddr_to_vaddr_roundtrip(new_va as Vaddr);
184        // Now inc.zero_below_level().va.to_vaddr() == new_va.
185
186        // Step 3: align_down(self_va + ps, ps) = align_down(self_va, ps) + ps.
187        // Because (self_va + ps) % ps == self_va % ps, adding a full ps doesn't
188        // change the remainder.
189        assert(self_va + ps == ps * 1 + self_va) by (nonlinear_arith);
190        vstd::arithmetic::div_mod::lemma_mod_multiples_vanish(1int, self_va as int, ps as int);
191
192        // Step 4: align_down(self_va, ps) + ps > self_va.
193        // Because align_down(self_va, ps) = self_va - self_va % ps,
194        // and self_va % ps < ps.
195        vstd::arithmetic::div_mod::lemma_fundamental_div_mod(self_va as int, ps as int);
196        vstd::arithmetic::div_mod::lemma_mod_bound(self_va as int, ps as int);
197        let ad = vstd_extra::arithmetic::nat_align_down(self_va, ps);
198        assert(ad + ps > self_va) by {
199            vstd_extra::arithmetic::lemma_nat_align_down_sound(self_va, ps);
200        };
201        assert(new_va == ad + ps) by {
202            vstd_extra::arithmetic::lemma_nat_align_down_sound(self_va, ps);
203            vstd_extra::arithmetic::lemma_nat_align_down_sound(inc_va, ps);
204        };
205    }
206
207    // ─── Proofs: VA range / view ─────────────────────────────────────────
208
209    pub proof fn cur_va_range_reflects_view(self)
210        requires
211            self.inv(),
212            self.cur_entry_owner().is_frame(),
213        ensures
214            self.cur_va_range().start.reflect(self@.query_range().start as Vaddr),
215            self.cur_va_range().end.reflect(self@.query_range().end as Vaddr),
216    {
217        self.cur_subtree_inv();
218        self.cur_va_in_subtree_range();
219        self.view_preserves_inv();
220        self.cur_entry_frame_present();
221        let subtree = self.cur_subtree();
222        let path = subtree.value.path;
223        let frame = self.cur_entry_owner().frame.unwrap();
224        let pt_level = INC_LEVELS - path.len();
225        let cont = self.continuations[self.level - 1];
226
227        cont.path().push_tail_property_len(cont.idx as usize);
228        assert(cont.level() == self.level) by {
229            if self.level == 1 {} else if self.level == 2 {} else if self.level == 3 {} else {}
230        };
231        assert(pt_level == self.level);
232
233        let ps = page_size(self.level as PagingLevel);
234        let m = Mapping {
235            va_range: Range { start: vaddr_of::<C>(path) as int, end: vaddr_of::<C>(path) as int + ps as int },
236            pa_range: Range { start: frame.mapped_pa, end: (frame.mapped_pa + ps) as Paddr },
237            page_size: ps,
238            property: frame.prop,
239        };
240        assert(PageTableOwner(subtree).view_rec(path) =~= set![m]);
241        assert(self.view_mappings().contains(m));
242        assert(m.inv());
243        // `vaddr_of::<C>(path) <= cur_va < vaddr_of::<C>(path) + ps` —
244        // bridges the cursor's canonical `cur_va` to the path-derived
245        // `vaddr_of`. Holds by cur_va == self.va.to_vaddr() and the
246        // alignment + subtree-range invariants; admit pending extraction
247        // of the bridging lemma.
248        admit();
249        assert(m.va_range.start <= self@.cur_va < m.va_range.end);
250
251        let filtered = self@.mappings.filter(|m2: Mapping| m2.va_range.start <= self@.cur_va < m2.va_range.end);
252        assert(filtered.contains(m));
253        vstd::set::axiom_set_intersect_finite::<Mapping>(
254            self@.mappings, Set::new(|m2: Mapping| m2.va_range.start <= self@.cur_va < m2.va_range.end));
255        vstd::set::axiom_set_choose_len(filtered);
256        let qm = self@.query_mapping();
257        assert(filtered.contains(qm));
258        assert(qm == m) by {
259            if qm != m {
260                assert(self@.mappings.contains(qm));
261                assert(self@.mappings.contains(m));
262                assert(!(qm.va_range.end <= m.va_range.start || m.va_range.end <= qm.va_range.start));
263            }
264        };
265
266        let cur_va = self.va.to_vaddr() as nat;
267        let ps_nat = ps as nat;
268        self.va.align_down_concrete(self.level as int);
269        self.va.align_up_concrete(self.level as int);
270        self.va.align_diff(self.level as int);
271        lemma_page_size_ge_page_size(self.level as PagingLevel);
272        vstd_extra::arithmetic::lemma_nat_align_down_sound(cur_va, ps_nat);
273
274        // Bridge: `cur_va == vaddr_of::<C>(path)` for paths aligned with the
275        // cursor (offset is 0, the `to_vaddr_indices(0)` positional sum
276        // equals `vaddr(path)`, and the `leading_bits * 2^48` is the same
277        // `LEADING_BITS * 2^48` that `vaddr_of` adds).
278        assert(vaddr_of::<C>(path) as int % ps as int == 0);
279        assert(vaddr_of::<C>(path) <= cur_va < vaddr_of::<C>(path) + ps);
280        assert(nat_align_down(cur_va, ps_nat) == vaddr_of::<C>(path) as nat) by {
281            vstd::arithmetic::div_mod::lemma_fundamental_div_mod(cur_va as int, ps as int);
282            vstd::arithmetic::div_mod::lemma_fundamental_div_mod(vaddr_of::<C>(path) as int, ps as int);
283            vstd::arithmetic::div_mod::lemma_div_is_ordered(
284                vaddr_of::<C>(path) as int, cur_va as int, ps as int);
285            let q_cur = cur_va as int / ps as int;
286            let q_path = vaddr_of::<C>(path) as int / ps as int;
287            assert(q_path * ps as int == vaddr_of::<C>(path) as int);
288            vstd::arithmetic::mul::lemma_mul_inequality(
289                q_path, q_cur, ps as int);
290            if q_path < q_cur {
291                vstd::arithmetic::mul::lemma_mul_inequality(
292                    q_path + 1, q_cur, ps as int);
293                vstd::arithmetic::mul::lemma_mul_is_distributive_add_other_way(
294                    ps as int, q_path, 1int);
295                assert(false);
296            }
297        };
298        assert(nat_align_up(cur_va, ps_nat) == (vaddr_of::<C>(path) + ps) as nat);
299
300        AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_down(cur_va, ps_nat) as Vaddr);
301        AbstractVaddr::from_vaddr_to_vaddr_roundtrip(nat_align_up(cur_va, ps_nat) as Vaddr);
302    }
303
304    /// The current virtual address falls within the VA range of the
305    /// current subtree's path, in canonical form (positional vaddr plus
306    /// the `leading_bits * 2^48` shift).
307    pub proof fn cur_va_in_subtree_range(self)
308        requires
309            self.inv(),
310        ensures
311            vaddr(self.cur_subtree().value.path) as int
312                + self.va.leading_bits * 0x1_0000_0000_0000int
313                <= self.cur_va() as int,
314            (self.cur_va() as int)
315                < vaddr(self.cur_subtree().value.path) as int
316                    + self.va.leading_bits * 0x1_0000_0000_0000int
317                    + page_size(self.level as PagingLevel) as int,
318    {
319        let L = self.level as int;
320        let cont = self.continuations[L - 1];
321        let subtree_path = cont.path().push_tail(cont.idx as usize);
322        let va_path = self.va.to_path(L - 1);
323
324        self.va.to_path_len(L - 1);
325        cont.path().push_tail_property_len(cont.idx as usize);
326        assert(cont.level() == self.level) by {
327            if L == 1 {} else if L == 2 {} else if L == 3 {} else {}
328        };
329
330        assert forall|i: int| 0 <= i < subtree_path.len() implies
331            subtree_path.index(i) == va_path.index(i) by {
332            self.va.to_path_index(L - 1, i);
333            if L == 4 {
334                cont.path().push_tail_property_index(cont.idx as usize);
335            } else if L == 3 {
336                cont.path().push_tail_property_index(cont.idx as usize);
337                self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
338            } else if L == 2 {
339                cont.path().push_tail_property_index(cont.idx as usize);
340                self.continuations[2].path().push_tail_property_index(self.continuations[2].idx as usize);
341                self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
342            } else {
343                cont.path().push_tail_property_index(cont.idx as usize);
344                self.continuations[1].path().push_tail_property_index(self.continuations[1].idx as usize);
345                self.continuations[2].path().push_tail_property_index(self.continuations[2].idx as usize);
346                self.continuations[3].path().push_tail_property_index(self.continuations[3].idx as usize);
347            }
348        };
349
350        self.va.to_path_inv(L - 1);
351        self.cur_subtree_inv();
352        AbstractVaddr::rec_vaddr_eq_if_indices_eq(subtree_path, va_path, 0);
353        self.va.vaddr_range_from_path(L - 1);
354    }
355
356    // ─── Axioms: VA mutation ─────────────────────────────────────────────
357
358    pub axiom fn set_va(tracked &mut self, new_va: AbstractVaddr)
359        requires
360            forall |i: int| #![auto] old(self).level - 1 <= i < NR_LEVELS ==> new_va.index[i] == old(self).va.index[i],
361            forall |i: int| #![auto] old(self).guard_level - 1 <= i < NR_LEVELS ==> new_va.index[i] == old(self).prefix.index[i],
362        ensures
363            *final(self) == old(self).set_va_spec(new_va);
364
365    /// When jumping within the same page-table node, only indices at levels
366    /// >= level are guaranteed to match. The entry-within-node index (level - 1)
367    /// may change, so we update continuations[level-1].idx along with va.
368    pub axiom fn set_va_in_node(tracked &mut self, new_va: AbstractVaddr)
369        requires
370            old(self).inv(),
371            new_va.inv(),
372            forall|i: int| #![auto] old(self).level <= i < NR_LEVELS
373                ==> new_va.index[i] == old(self).va.index[i],
374            old(self).locked_range().start <= new_va.to_vaddr()
375                < old(self).locked_range().end,
376        ensures
377            *final(self) == old(self).set_va_in_node_spec(new_va),
378            final(self).inv(),;
379}
380
381} // verus!