rendy_memory/mapping/
mod.rs

1mod range;
2pub(crate) mod write;
3
4use {
5    crate::{memory::Memory, util::fits_usize},
6    gfx_hal::{device::Device as _, Backend},
7    std::{ops::Range, ptr::NonNull},
8};
9
10pub(crate) use self::range::{
11    mapped_fitting_range, mapped_slice, mapped_slice_mut, mapped_sub_range,
12};
13use self::write::{Write, WriteCoherent, WriteFlush};
14
15/// Non-coherent marker.
16#[derive(Clone, Copy, Debug)]
17pub struct NonCoherent;
18
19/// Coherent marker.
20#[derive(Clone, Copy, Debug)]
21pub struct Coherent;
22
23/// Value that contains either coherent marker or non-coherent marker.
24#[derive(Clone, Copy, Debug)]
25pub struct MaybeCoherent(bool);
26
27/// Represents range of the memory mapped to the host.
28/// Provides methods for safer host access to the memory.
29#[derive(Debug)]
30pub struct MappedRange<'a, B: Backend, C = MaybeCoherent> {
31    /// Memory object that is mapped.
32    memory: &'a Memory<B>,
33
34    /// Pointer to range mapped memory.
35    ptr: NonNull<u8>,
36
37    /// Range of mapped memory.
38    range: Range<u64>,
39
40    /// Coherency marker
41    coherent: C,
42}
43
44impl<'a, B> MappedRange<'a, B>
45where
46    B: Backend,
47{
48    // /// Map range of memory.
49    // /// `range` is in memory object space.
50    // ///
51    // /// # Safety
52    // ///
53    // /// * Only one range for the given memory object can be mapped.
54    // /// * Memory object must be not mapped.
55    // /// * Memory object must be created with device specified.
56    // pub unsafe fn new(
57    //     memory: &'a Memory<B>,
58    //     device: &B::Device,
59    //     range: Range<u64>,
60    // ) -> Result<Self, gfx_hal::device::MapError> {
61    //     assert!(
62    //         range.start < range.end,
63    //         "Memory mapping region must have valid size"
64    //     );
65    //     assert!(
66    //         fits_usize(range.end - range.start),
67    //         "Range length must fit in usize"
68    //     );
69    //     assert!(memory.host_visible());
70
71    //     let ptr = device.map_memory(memory.raw(), range.clone())?;
72    //     assert!(
73    //         (ptr as usize).wrapping_neg() >= (range.end - range.start) as usize,
74    //         "Resulting pointer value + range length must fit in usize. Pointer: {:p}, range {:?}",
75    //         ptr,
76    //         range,
77    //     );
78
79    //     Ok(Self::from_raw(memory, NonNull::new_unchecked(ptr), range))
80    // }
81
82    /// Construct mapped range from raw mapping
83    ///
84    /// # Safety
85    ///
86    /// `memory` `range` must be mapped to host memory region pointer by `ptr`.
87    /// `range` is in memory object space.
88    /// `ptr` points to the `range.start` offset from memory origin.
89    pub unsafe fn from_raw(memory: &'a Memory<B>, ptr: NonNull<u8>, range: Range<u64>) -> Self {
90        assert!(
91            range.start < range.end,
92            "Memory mapping region must have valid size"
93        );
94        MappedRange {
95            ptr,
96            range,
97            memory,
98            coherent: MaybeCoherent(memory.host_coherent()),
99        }
100    }
101
102    /// Get pointer to beginning of memory region.
103    /// i.e. to `range().start` offset from memory origin.
104    pub fn ptr(&self) -> NonNull<u8> {
105        self.ptr
106    }
107
108    /// Get mapped range.
109    pub fn range(&self) -> Range<u64> {
110        self.range.clone()
111    }
112
113    /// Fetch readable slice of sub-range to be read.
114    /// Invalidating range if memory is not coherent.
115    /// `range.end - range.start` must be multiple of `size_of::()`.
116    /// `mapping offset + range.start` must be multiple of `align_of::()`.
117    ///
118    /// # Safety
119    ///
120    /// * Caller must ensure that device won't write to the memory region until the borrowing ends.
121    /// * `T` Must be plain-old-data type compatible with data in mapped region.
122    pub unsafe fn read<'b, T>(
123        &'b mut self,
124        device: &B::Device,
125        range: Range<u64>,
126    ) -> Result<&'b [T], gfx_hal::device::MapError>
127    where
128        'a: 'b,
129        T: Copy,
130    {
131        assert!(
132            range.start < range.end,
133            "Memory mapping region must have valid size"
134        );
135        assert!(
136            fits_usize(range.end - range.start),
137            "Range length must fit in usize"
138        );
139
140        let (ptr, range) = mapped_sub_range(self.ptr, self.range.clone(), range)
141            .ok_or_else(|| gfx_hal::device::MapError::OutOfBounds)?;
142
143        let size = (range.end - range.start) as usize;
144
145        if self.coherent.0 {
146            device
147                .invalidate_mapped_memory_ranges(Some((self.memory.raw(), self.range.clone())))?;
148        }
149
150        let slice = mapped_slice::<T>(ptr, size);
151        Ok(slice)
152    }
153
154    /// Fetch writer to the sub-region.
155    /// This writer will flush data on drop if written at least once.
156    ///
157    /// # Safety
158    ///
159    /// * Caller must ensure that device won't write to or read from the memory region.
160    pub unsafe fn write<'b, T: 'b>(
161        &'b mut self,
162        device: &'b B::Device,
163        range: Range<u64>,
164    ) -> Result<impl Write<T> + 'b, gfx_hal::device::MapError>
165    where
166        'a: 'b,
167        T: Copy,
168    {
169        assert!(
170            range.start < range.end,
171            "Memory mapping region must have valid size"
172        );
173        assert!(
174            fits_usize(range.end - range.start),
175            "Range length must fit in usize"
176        );
177
178        let (ptr, range) = mapped_sub_range(self.ptr, self.range.clone(), range)
179            .ok_or_else(|| gfx_hal::device::MapError::OutOfBounds)?;
180
181        let size = (range.end - range.start) as usize;
182
183        if !self.coherent.0 {
184            device
185                .invalidate_mapped_memory_ranges(Some((self.memory.raw(), self.range.clone())))?;
186        }
187
188        let slice = mapped_slice_mut::<T>(ptr, size);
189
190        let ref memory = self.memory;
191
192        Ok(WriteFlush {
193            slice,
194            flush: if !self.coherent.0 {
195                Some(move || {
196                    device
197                        .flush_mapped_memory_ranges(Some((memory.raw(), range)))
198                        .expect("Should flush successfully");
199                })
200            } else {
201                None
202            },
203        })
204    }
205
206    /// Convert into mapped range with statically known coherency.
207    pub fn coherent(self) -> Result<MappedRange<'a, B, Coherent>, MappedRange<'a, B, NonCoherent>> {
208        if self.coherent.0 {
209            Ok(MappedRange {
210                memory: self.memory,
211                ptr: self.ptr,
212                range: self.range,
213                coherent: Coherent,
214            })
215        } else {
216            Err(MappedRange {
217                memory: self.memory,
218                ptr: self.ptr,
219                range: self.range,
220                coherent: NonCoherent,
221            })
222        }
223    }
224}
225
226impl<'a, B> From<MappedRange<'a, B, Coherent>> for MappedRange<'a, B>
227where
228    B: Backend,
229{
230    fn from(range: MappedRange<'a, B, Coherent>) -> Self {
231        MappedRange {
232            memory: range.memory,
233            ptr: range.ptr,
234            range: range.range,
235            coherent: MaybeCoherent(true),
236        }
237    }
238}
239
240impl<'a, B> From<MappedRange<'a, B, NonCoherent>> for MappedRange<'a, B>
241where
242    B: Backend,
243{
244    fn from(range: MappedRange<'a, B, NonCoherent>) -> Self {
245        MappedRange {
246            memory: range.memory,
247            ptr: range.ptr,
248            range: range.range,
249            coherent: MaybeCoherent(false),
250        }
251    }
252}
253
254impl<'a, B> MappedRange<'a, B, Coherent>
255where
256    B: Backend,
257{
258    /// Fetch writer to the sub-region.
259    ///
260    /// # Safety
261    ///
262    /// * Caller must ensure that device won't write to or read from the memory region.
263    pub unsafe fn write<'b, U: 'b>(
264        &'b mut self,
265        range: Range<u64>,
266    ) -> Result<impl Write<U> + 'b, gfx_hal::device::MapError>
267    where
268        U: Copy,
269    {
270        assert!(
271            range.start < range.end,
272            "Memory mapping region must have valid size"
273        );
274        assert!(
275            fits_usize(range.end - range.start),
276            "Range length must fit in usize"
277        );
278
279        let (ptr, range) = mapped_sub_range(self.ptr, self.range.clone(), range)
280            .ok_or_else(|| gfx_hal::device::MapError::OutOfBounds)?;
281
282        let size = (range.end - range.start) as usize;
283
284        let slice = mapped_slice_mut::<U>(ptr, size);
285
286        Ok(WriteCoherent { slice })
287    }
288}