rendy_memory/allocator/
linear.rs

1use std::{collections::VecDeque, ops::Range, ptr::NonNull};
2
3use {
4    crate::{
5        allocator::{Allocator, Kind},
6        block::Block,
7        mapping::*,
8        memory::*,
9        util::*,
10    },
11    gfx_hal::{device::Device as _, Backend},
12    std::sync::Arc,
13};
14
15/// Memory block allocated from `LinearAllocator`
16pub struct LinearBlock<B: Backend> {
17    memory: Arc<Memory<B>>,
18    linear_index: u64,
19    ptr: NonNull<u8>,
20    range: Range<u64>,
21    relevant: relevant::Relevant,
22}
23
24impl<B> std::fmt::Debug for LinearBlock<B>
25where
26    B: Backend,
27{
28    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
29        fmt.debug_struct("LinearBlock")
30            .field("memory", &*self.memory)
31            .field("linear_index", &self.linear_index)
32            .field("ptr", &self.ptr)
33            .field("range", &self.range)
34            .finish()
35    }
36}
37
38unsafe impl<B> Send for LinearBlock<B> where B: Backend {}
39unsafe impl<B> Sync for LinearBlock<B> where B: Backend {}
40
41impl<B> LinearBlock<B>
42where
43    B: Backend,
44{
45    fn size(&self) -> u64 {
46        self.range.end - self.range.start
47    }
48
49    fn dispose(self) {
50        self.relevant.dispose();
51    }
52}
53
54impl<B> Block<B> for LinearBlock<B>
55where
56    B: Backend,
57{
58    #[inline]
59    fn properties(&self) -> gfx_hal::memory::Properties {
60        self.memory.properties()
61    }
62
63    #[inline]
64    fn memory(&self) -> &B::Memory {
65        self.memory.raw()
66    }
67
68    #[inline]
69    fn range(&self) -> Range<u64> {
70        self.range.clone()
71    }
72
73    #[inline]
74    fn map<'a>(
75        &'a mut self,
76        _device: &B::Device,
77        range: Range<u64>,
78    ) -> Result<MappedRange<'a, B>, gfx_hal::device::MapError> {
79        assert!(
80            range.start < range.end,
81            "Memory mapping region must have valid size"
82        );
83        if !self.memory.host_visible() {
84            //TODO: invalid access error
85            return Err(gfx_hal::device::MapError::MappingFailed);
86        }
87
88        if let Some((ptr, range)) = mapped_sub_range(self.ptr, self.range.clone(), range) {
89            let mapping = unsafe { MappedRange::from_raw(&*self.memory, ptr, range) };
90            Ok(mapping)
91        } else {
92            Err(gfx_hal::device::MapError::OutOfBounds)
93        }
94    }
95
96    #[inline]
97    fn unmap(&mut self, _device: &B::Device) {
98        debug_assert!(self.memory.host_visible());
99    }
100}
101
102/// Config for `LinearAllocator`.
103#[derive(Clone, Copy, Debug)]
104#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
105pub struct LinearConfig {
106    /// Size of the linear chunk.
107    /// Keep it big.
108    pub linear_size: u64,
109}
110
111/// Linear allocator that return memory from chunk sequentially.
112/// It keeps only number of bytes allocated from each chunk.
113/// Once chunk is exhausted it is placed into list.
114/// When all blocks allocated from head of that list are freed,
115/// head is freed as well.
116///
117/// This allocator suites best short-lived types of allocations.
118/// Allocation strategy requires minimal overhead and implementation is fast.
119/// But holding single block will completely stop memory recycling.
120#[derive(Debug)]
121pub struct LinearAllocator<B: Backend> {
122    memory_type: gfx_hal::MemoryTypeId,
123    memory_properties: gfx_hal::memory::Properties,
124    linear_size: u64,
125    offset: u64,
126    lines: VecDeque<Line<B>>,
127}
128
129#[derive(Debug)]
130struct Line<B: Backend> {
131    used: u64,
132    free: u64,
133    memory: Arc<Memory<B>>,
134    ptr: NonNull<u8>,
135}
136
137unsafe impl<B> Send for Line<B> where B: Backend {}
138unsafe impl<B> Sync for Line<B> where B: Backend {}
139
140impl<B> LinearAllocator<B>
141where
142    B: Backend,
143{
144    /// Get properties required by the `LinearAllocator`.
145    pub fn properties_required() -> gfx_hal::memory::Properties {
146        gfx_hal::memory::Properties::CPU_VISIBLE
147    }
148
149    /// Maximum allocation size.
150    pub fn max_allocation(&self) -> u64 {
151        self.linear_size / 2
152    }
153
154    /// Create new `LinearAllocator`
155    /// for `memory_type` with `memory_properties` specified,
156    /// with `LinearConfig` provided.
157    pub fn new(
158        memory_type: gfx_hal::MemoryTypeId,
159        memory_properties: gfx_hal::memory::Properties,
160        config: LinearConfig,
161    ) -> Self {
162        log::trace!(
163            "Create new 'linear' allocator: type: '{:?}', properties: '{:#?}' config: '{:#?}'",
164            memory_type,
165            memory_properties,
166            config
167        );
168        assert!(memory_properties.contains(Self::properties_required()));
169        assert!(
170            fits_usize(config.linear_size),
171            "Linear size must fit in both usize and u64"
172        );
173        LinearAllocator {
174            memory_type,
175            memory_properties,
176            linear_size: config.linear_size,
177            offset: 0,
178            lines: VecDeque::new(),
179        }
180    }
181
182    /// Perform full cleanup of the memory allocated.
183    pub fn dispose(mut self, device: &B::Device) {
184        let _ = self.cleanup(device, 0);
185        if !self.lines.is_empty() {
186            log::error!(
187                "Lines are not empty during allocator disposal. Lines: {:#?}",
188                self.lines
189            );
190        }
191    }
192
193    fn cleanup(&mut self, device: &B::Device, off: usize) -> u64 {
194        let mut freed = 0;
195        while self.lines.len() > off {
196            if self.lines[0].used > self.lines[0].free {
197                break;
198            }
199
200            let line = self.lines.pop_front().unwrap();
201            self.offset += 1;
202
203            unsafe {
204                match Arc::try_unwrap(line.memory) {
205                    Ok(memory) => {
206                        // trace!("Unmap memory: {:#?}", line.memory);
207                        device.unmap_memory(memory.raw());
208
209                        freed += memory.size();
210                        device.free_memory(memory.into_raw());
211                    }
212                    Err(_) => log::error!("Allocated `Line` was freed, but memory is still shared and never will be destroyed"),
213                }
214            }
215        }
216        freed
217    }
218}
219
220impl<B> Allocator<B> for LinearAllocator<B>
221where
222    B: Backend,
223{
224    type Block = LinearBlock<B>;
225
226    fn kind() -> Kind {
227        Kind::Linear
228    }
229
230    fn alloc(
231        &mut self,
232        device: &B::Device,
233        size: u64,
234        align: u64,
235    ) -> Result<(LinearBlock<B>, u64), gfx_hal::device::AllocationError> {
236        debug_assert!(self
237            .memory_properties
238            .contains(gfx_hal::memory::Properties::CPU_VISIBLE));
239
240        assert!(size <= self.linear_size);
241        assert!(align <= self.linear_size);
242
243        let count = self.lines.len() as u64;
244        if let Some(line) = self.lines.back_mut() {
245            let aligned = aligned(line.used, align);
246            let overhead = aligned - line.used;
247            if self.linear_size - size > aligned {
248                line.used = aligned + size;
249                line.free += overhead;
250                let (ptr, range) =
251                    mapped_sub_range(line.ptr, 0..self.linear_size, aligned..aligned + size)
252                        .expect("This sub-range must fit in line mapping");
253
254                return Ok((
255                    LinearBlock {
256                        linear_index: self.offset + count - 1,
257                        memory: line.memory.clone(),
258                        ptr,
259                        range,
260                        relevant: relevant::Relevant,
261                    },
262                    0,
263                ));
264            }
265        }
266
267        let (memory, ptr) = unsafe {
268            let raw = device.allocate_memory(self.memory_type, self.linear_size)?;
269
270            let ptr = match device.map_memory(&raw, 0..self.linear_size) {
271                Ok(ptr) => NonNull::new_unchecked(ptr),
272                Err(gfx_hal::device::MapError::OutOfMemory(error)) => {
273                    device.free_memory(raw);
274                    return Err(error.into());
275                }
276                Err(_) => panic!("Unexpected mapping failure"),
277            };
278
279            let memory = Memory::from_raw(raw, self.linear_size, self.memory_properties);
280
281            (memory, ptr)
282        };
283
284        let line = Line {
285            used: size,
286            free: 0,
287            ptr,
288            memory: Arc::new(memory),
289        };
290
291        let (ptr, range) = mapped_sub_range(ptr, 0..self.linear_size, 0..size)
292            .expect("This sub-range must fit in line mapping");
293
294        let block = LinearBlock {
295            linear_index: self.offset + count,
296            memory: line.memory.clone(),
297            ptr,
298            range,
299            relevant: relevant::Relevant,
300        };
301
302        self.lines.push_back(line);
303        Ok((block, self.linear_size))
304    }
305
306    fn free(&mut self, device: &B::Device, block: Self::Block) -> u64 {
307        let index = block.linear_index - self.offset;
308        assert!(
309            fits_usize(index),
310            "This can't exceed lines list length which fits into usize by definition"
311        );
312        let index = index as usize;
313        assert!(
314            index < self.lines.len(),
315            "Can't be allocated from not yet created line"
316        );
317        {
318            let ref mut line = self.lines[index];
319            line.free += block.size();
320        }
321        block.dispose();
322
323        self.cleanup(device, 1)
324    }
325}