1use core::{fmt::Debug, ops::Deref};
6
7use id_alloc::IdAlloc;
8use spin::Once;
9
10use crate::{
11 Error,
12 arch::{
13 irq::{HwIrqLine, IRQ_NUM_MAX, IRQ_NUM_MIN, IrqRemapping},
14 trap::TrapFrame,
15 },
16 prelude::*,
17 sync::{RwLock, SpinLock, WriteIrqDisabled},
18};
19
20pub type IrqCallbackFunction = dyn Fn(&TrapFrame) + Sync + Send + 'static;
22
23#[derive(Debug)]
35#[must_use]
36pub struct IrqLine {
37 inner: Arc<InnerHandle>,
38 callbacks: Vec<CallbackHandle>,
39}
40
41impl IrqLine {
42 pub fn alloc() -> Result<Self> {
44 get_or_init_allocator()
45 .lock()
46 .alloc()
47 .map(|id| Self::new(id as u8))
48 .ok_or(Error::NotEnoughResources)
49 }
50
51 pub fn alloc_specific(irq_num: u8) -> Result<Self> {
53 get_or_init_allocator()
54 .lock()
55 .alloc_specific((irq_num - IRQ_NUM_MIN) as usize)
56 .map(|id| Self::new(id as u8))
57 .ok_or(Error::NotEnoughResources)
58 }
59
60 fn new(index: u8) -> Self {
61 let inner = InnerHandle { index };
62 inner.remapping.init(index + IRQ_NUM_MIN);
63
64 Self {
65 inner: Arc::new(inner),
66 callbacks: Vec::new(),
67 }
68 }
69
70 pub fn num(&self) -> u8 {
72 self.inner.index + IRQ_NUM_MIN
73 }
74
75 pub fn on_active<F>(&mut self, callback: F)
79 where
80 F: Fn(&TrapFrame) + Sync + Send + 'static,
81 {
82 let callback_handle = {
83 let callback_box = Box::new(callback);
84 let callback_addr = core::ptr::from_ref(&*callback_box).addr();
85
86 let mut callbacks = self.inner.callbacks.write();
87 callbacks.push(callback_box);
88
89 CallbackHandle {
90 irq_index: self.inner.index,
91 callback_addr,
92 }
93 };
94
95 self.callbacks.push(callback_handle);
96 }
97
98 pub fn is_empty(&self) -> bool {
100 self.callbacks.is_empty()
101 }
102
103 pub fn remapping_index(&self) -> Option<u16> {
108 self.inner.remapping.remapping_index()
109 }
110}
111
112impl Clone for IrqLine {
113 fn clone(&self) -> Self {
114 Self {
115 inner: self.inner.clone(),
116 callbacks: Vec::new(),
117 }
118 }
119}
120
121struct Inner {
122 callbacks: RwLock<Vec<Box<IrqCallbackFunction>>, WriteIrqDisabled>,
123 remapping: IrqRemapping,
124}
125
126impl Inner {
127 const fn new() -> Self {
128 Self {
129 callbacks: RwLock::new(Vec::new()),
130 remapping: IrqRemapping::new(),
131 }
132 }
133}
134
135const NUMBER_OF_IRQS: usize = (IRQ_NUM_MAX - IRQ_NUM_MIN) as usize + 1;
136
137static INNERS: [Inner; NUMBER_OF_IRQS] = [const { Inner::new() }; NUMBER_OF_IRQS];
138static ALLOCATOR: Once<SpinLock<IdAlloc>> = Once::new();
139
140fn get_or_init_allocator() -> &'static SpinLock<IdAlloc> {
141 ALLOCATOR.call_once(|| SpinLock::new(IdAlloc::with_capacity(NUMBER_OF_IRQS)))
142}
143
144#[must_use]
148#[derive(Debug)]
149struct InnerHandle {
150 index: u8,
151}
152
153impl Deref for InnerHandle {
154 type Target = Inner;
155
156 fn deref(&self) -> &Self::Target {
157 &INNERS[self.index as usize]
158 }
159}
160
161impl Drop for InnerHandle {
162 fn drop(&mut self) {
163 ALLOCATOR.get().unwrap().lock().free(self.index as usize);
164 }
165}
166
167#[must_use]
171#[derive(Debug)]
172struct CallbackHandle {
173 irq_index: u8,
174 callback_addr: usize,
175}
176
177impl Drop for CallbackHandle {
178 fn drop(&mut self) {
179 let mut callbacks = INNERS[self.irq_index as usize].callbacks.write();
180
181 let pos = callbacks
182 .iter()
183 .position(|element| core::ptr::from_ref(&**element).addr() == self.callback_addr);
184 let _ = callbacks.swap_remove(pos.unwrap());
185 }
186}
187
188pub(super) fn process(trap_frame: &TrapFrame, hw_irq_line: &HwIrqLine) {
189 let inner = &INNERS[(hw_irq_line.irq_num() - IRQ_NUM_MIN) as usize];
190 for callback in &*inner.callbacks.read() {
191 callback(trap_frame);
192 }
193 hw_irq_line.ack();
194}
195
196#[cfg(ktest)]
197mod test {
198 use super::*;
199
200 const IRQ_NUM: u8 = 64;
201 const IRQ_INDEX: usize = (IRQ_NUM - IRQ_NUM_MIN) as usize;
202
203 #[ktest]
204 fn alloc_and_free_irq() {
205 let irq_line = IrqLine::alloc_specific(IRQ_NUM).unwrap();
206 assert!(IrqLine::alloc_specific(IRQ_NUM).is_err());
207
208 let irq_line_cloned = irq_line.clone();
209 assert!(IrqLine::alloc_specific(IRQ_NUM).is_err());
210
211 drop(irq_line);
212 assert!(IrqLine::alloc_specific(IRQ_NUM).is_err());
213
214 drop(irq_line_cloned);
215 assert!(IrqLine::alloc_specific(IRQ_NUM).is_ok());
216 }
217
218 #[ktest]
219 fn register_and_unregister_callback() {
220 let mut irq_line = IrqLine::alloc_specific(IRQ_NUM).unwrap();
221 let mut irq_line_cloned = irq_line.clone();
222
223 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 0);
224
225 irq_line.on_active(|_| {});
226 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 1);
227
228 irq_line_cloned.on_active(|_| {});
229 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 2);
230
231 irq_line_cloned.on_active(|_| {});
232 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 3);
233
234 drop(irq_line);
235 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 2);
236
237 drop(irq_line_cloned);
238 assert_eq!(INNERS[IRQ_INDEX].callbacks.read().len(), 0);
239 }
240}