1use core::{fmt::Debug, mem::ManuallyDrop};
4
5use super::util::{
6 alloc_kva, cvm_need_private_protection, prepare_dma, split_daddr, unprepare_dma,
7};
8use crate::{
9 arch::irq,
10 error::Error,
11 mm::{
12 Daddr, FrameAllocOptions, HasDaddr, HasPaddr, HasPaddrRange, HasSize, Infallible,
13 PAGE_SIZE, Paddr, Segment, Split, VmReader, VmWriter,
14 io::util::{HasVmReaderWriter, VmReaderWriterIdentity},
15 kspace::kvirt_area::KVirtArea,
16 },
17};
18
19#[derive(Debug)]
29pub struct DmaCoherent {
30 inner: Inner,
31 map_daddr: Option<Daddr>,
32 is_cache_coherent: bool,
33}
34
35#[derive(Debug)]
36enum Inner {
37 Segment(Segment<()>),
38 Kva(KVirtArea, Paddr),
39}
40
41impl DmaCoherent {
42 pub fn alloc(nframes: usize, is_cache_coherent: bool) -> Result<Self, Error> {
53 Self::alloc_uninit(nframes, is_cache_coherent).inspect(|dma| {
54 dma.writer().fill_zeros(dma.size());
55 })
56 }
57
58 pub fn alloc_uninit(nframes: usize, is_cache_coherent: bool) -> Result<Self, Error> {
67 debug_assert!(irq::is_local_enabled());
68
69 let cvm = cvm_need_private_protection();
70
71 let (inner, paddr_range) = if is_cache_coherent && !cvm {
72 let segment = FrameAllocOptions::new()
73 .zeroed(false)
74 .alloc_segment(nframes)?;
75 let paddr_range = segment.paddr_range();
76
77 (Inner::Segment(segment), paddr_range)
78 } else {
79 let (kva, paddr) = alloc_kva(nframes, is_cache_coherent)?;
80
81 (Inner::Kva(kva, paddr), paddr..paddr + nframes * PAGE_SIZE)
82 };
83
84 let map_daddr = unsafe { prepare_dma(&paddr_range) };
86
87 Ok(Self {
88 inner,
89 map_daddr,
90 is_cache_coherent,
91 })
92 }
93}
94
95impl Split for DmaCoherent {
96 fn split(self, offset: usize) -> (Self, Self) {
97 assert!(offset.is_multiple_of(PAGE_SIZE));
98 assert!(0 < offset && offset < self.size());
99
100 let (inner, map_daddr, is_cache_coherent) = {
101 let this = ManuallyDrop::new(self);
102 (
103 unsafe { core::ptr::read(&this.inner as *const Inner) },
105 this.map_daddr,
106 this.is_cache_coherent,
107 )
108 };
109
110 let (inner1, inner2) = match inner {
111 Inner::Segment(segment) => {
112 let (s1, s2) = segment.split(offset);
113 (Inner::Segment(s1), Inner::Segment(s2))
114 }
115 Inner::Kva(kva, paddr) => {
116 let (kva1, kva2) = kva.split(offset);
117 let (paddr1, paddr2) = (paddr, paddr + offset);
118 (Inner::Kva(kva1, paddr1), Inner::Kva(kva2, paddr2))
119 }
120 };
121
122 let (daddr1, daddr2) = split_daddr(map_daddr, offset);
123
124 (
125 Self {
126 inner: inner1,
127 map_daddr: daddr1,
128 is_cache_coherent,
129 },
130 Self {
131 inner: inner2,
132 map_daddr: daddr2,
133 is_cache_coherent,
134 },
135 )
136 }
137}
138
139impl Drop for DmaCoherent {
140 fn drop(&mut self) {
141 unsafe { unprepare_dma(&self.paddr_range(), self.map_daddr) };
143 }
144}
145
146impl HasPaddr for DmaCoherent {
147 fn paddr(&self) -> Paddr {
148 match &self.inner {
149 Inner::Segment(segment) => segment.paddr(),
150 Inner::Kva(_, paddr) => *paddr,
151 }
152 }
153}
154
155impl HasDaddr for DmaCoherent {
156 fn daddr(&self) -> Daddr {
157 self.map_daddr.unwrap_or_else(|| self.paddr() as Daddr)
158 }
159}
160
161impl HasSize for DmaCoherent {
162 fn size(&self) -> usize {
163 match &self.inner {
164 Inner::Segment(segment) => segment.size(),
165 Inner::Kva(kva, _) => kva.size(),
166 }
167 }
168}
169
170impl HasVmReaderWriter for DmaCoherent {
171 type Types = VmReaderWriterIdentity;
172
173 fn reader(&self) -> VmReader<'_, Infallible> {
174 match &self.inner {
175 Inner::Segment(seg) => seg.reader(),
176 Inner::Kva(kva, _) => {
177 unsafe { VmReader::from_kernel_space(kva.start() as *const u8, kva.size()) }
182 }
183 }
184 }
185
186 fn writer(&self) -> VmWriter<'_, Infallible> {
187 match &self.inner {
188 Inner::Segment(seg) => seg.writer(),
189 Inner::Kva(kva, _) => {
190 unsafe { VmWriter::from_kernel_space(kva.start() as *mut u8, kva.size()) }
195 }
196 }
197 }
198}