ostd/specs/mm/page_table/
mod.rs1#![allow(hidden_glob_reexports)]
2
3pub mod cursor;
4pub mod node;
5mod owners;
6mod view;
7
8pub use cursor::*;
9pub use node::*;
10pub use owners::*;
11pub use view::*;
12
13use vstd::prelude::*;
14use vstd_extra::arithmetic::*;
15use vstd_extra::ownership::*;
16
17use crate::mm::page_table::page_size;
18use crate::mm::page_table::PageTableConfig;
19use crate::mm::{PagingLevel, Vaddr};
20use crate::specs::arch::mm::{NR_ENTRIES, NR_LEVELS, PAGE_SIZE};
21
22use align_ext::AlignExt;
23
24verus! {
25
26pub struct AbstractVaddr {
27 pub offset: int,
28 pub index: Map<int, int>,
29}
30
31impl Inv for AbstractVaddr {
32 open spec fn inv(self) -> bool {
33 &&& 0 <= self.offset < PAGE_SIZE()
34 &&& forall |i: int|
35 #![trigger self.index.contains_key(i)]
36 0 <= i < NR_LEVELS() ==> {
37 &&& self.index.contains_key(i)
38 &&& 0 <= self.index[i]
39 &&& self.index[i] < NR_ENTRIES()
40 }
41 }
42}
43
44impl AbstractVaddr {
45 pub uninterp spec fn from_vaddr(va: Vaddr) -> Self;
46
47 pub axiom fn from_vaddr_wf(va: Vaddr)
48 ensures
49 AbstractVaddr::from_vaddr(va).inv();
50
51 pub uninterp spec fn to_vaddr(self) -> Vaddr;
52
53 pub uninterp spec fn reflect(self, va: Vaddr) -> bool;
54
55 pub broadcast axiom fn reflect_prop(self, va: Vaddr)
56 requires
57 self.inv(),
58 self.reflect(va),
59 ensures
60 #[trigger] self.to_vaddr() == va,
61 #[trigger] Self::from_vaddr(va) == self;
62
63 pub broadcast axiom fn reflect_from_vaddr(va: Vaddr)
64 ensures
65 #[trigger] Self::from_vaddr(va).reflect(va),
66 #[trigger] Self::from_vaddr(va).inv();
67
68 pub broadcast axiom fn reflect_to_vaddr(self)
69 requires
70 self.inv(),
71 ensures
72 #[trigger] self.reflect(self.to_vaddr());
73
74 pub broadcast axiom fn reflect_eq(self, other: Self, va: Vaddr)
75 requires
76 self.reflect(va),
77 other.reflect(va),
78 ensures
79 #![auto] (self == other);
80
81 pub open spec fn align_down(self, level: int) -> Self
82 decreases level when level >= 1
83 {
84 if level == 1 {
85 AbstractVaddr {
86 offset: 0,
87 index: self.index,
88 }
89 } else {
90 let tmp = self.align_down(level - 1);
91 AbstractVaddr {
92 index: tmp.index.insert(level - 2, 0),
93 ..tmp
94 }
95 }
96 }
97
98 pub proof fn align_down_inv(self, level: int)
99 requires
100 1 <= level < NR_LEVELS(),
101 self.inv(),
102 ensures
103 self.align_down(level).inv(),
104 forall |i: int| level <= i < NR_LEVELS() ==> #[trigger] self.index[i - 1] == self.align_down(level).index[i - 1],
105 decreases level
106 {
107 if level == 1 {
108 assert(self.inv());
109 } else {
110 let tmp = self.align_down(level - 1);
111 self.align_down_inv(level - 1);
112 }
113 }
114
115 pub axiom fn align_down_concrete(self, level: int)
116 requires
117 1 <= level <= NR_LEVELS(),
118 ensures
119 self.align_down(level).reflect(nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) as Vaddr);
120
121 pub open spec fn align_up(self, level: int) -> Self {
122 let lower_aligned = self.align_down(level - 1);
123 lower_aligned.next_index(level)
124 }
125
126 pub axiom fn align_up_concrete(self, level: int)
127 requires
128 1 <= level <= NR_LEVELS(),
129 ensures
130 self.align_up(level).reflect(nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) as Vaddr);
131
132 pub axiom fn align_diff(self, level: int)
133 requires
134 1 <= level <= NR_LEVELS(),
135 ensures
136 nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) ==
137 nat_align_down(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) + page_size(level as PagingLevel),
138 nat_align_up(self.to_vaddr() as nat, page_size(level as PagingLevel) as nat) < usize::MAX;
139
140 pub open spec fn next_index(self, level: int) -> Self
141 decreases NR_LEVELS() - level when 1 <= level <= NR_LEVELS()
142 {
143 let index = self.index[level - 1];
144 let next_index = index + 1;
145 if next_index == NR_ENTRIES() && level < NR_LEVELS() {
146 let next_va = Self {
147 offset: self.offset,
148 index: self.index.insert(level - 1, 0),
149 };
150 next_va.next_index(level + 1)
151 } else if next_index == NR_ENTRIES() && level == NR_LEVELS() {
152 Self {
153 offset: self.offset,
154 index: self.index.insert(level - 1, 0),
155 }
156 } else {
157 Self {
158 offset: self.offset,
159 index: self.index.insert(level - 1, next_index),
160 }
161 }
162 }
163
164 pub open spec fn wrapped(self, start_level: int, level: int) -> bool
165 decreases NR_LEVELS() - level when 1 <= start_level <= level <= NR_LEVELS()
166 {
167 &&& self.next_index(start_level).index[level - 1] == 0 ==> {
168 &&& self.index[level - 1] + 1 == NR_ENTRIES()
169 &&& if level < NR_LEVELS() {
170 self.wrapped(start_level, level + 1)
171 } else {
172 true
173 }
174 }
175 &&& self.next_index(start_level).index[level - 1] != 0 ==>
176 self.index[level - 1] + 1 < NR_ENTRIES()
177 }
178
179 pub proof fn use_wrapped(self, start_level: int, level: int)
180 requires
181 1 <= start_level <= level < NR_LEVELS(),
182 self.wrapped(start_level, level),
183 self.next_index(start_level).index[level - 1] == 0
184 ensures
185 self.index[level - 1] + 1 == NR_ENTRIES()
186 { }
187
188 pub proof fn wrapped_unwrap(self, start_level: int, level: int)
189 requires
190 1 <= start_level <= level < NR_LEVELS(),
191 self.wrapped(start_level, level),
192 self.next_index(start_level).index[level - 1] == 0,
193 ensures
194 self.wrapped(start_level, level + 1)
195 { }
196
197 pub proof fn next_index_wrap_condition(self, level: int)
198 requires
199 self.inv(),
200 1 <= level <= NR_LEVELS(),
201 ensures
202 self.wrapped(level, level)
203 { admit() }
204}
205
206}