slice_deque/mirrored/
buffer.rs1use super::*;
4
5fn no_required_allocation_units(bytes: usize) -> usize {
7 let ag = allocation_granularity();
8 let r = ((bytes + ag - 1) / ag).max(1);
9 let r = if r % 2 == 0 { r } else { r + 1 };
10 debug_assert!(r * ag >= bytes);
11 debug_assert!(r % 2 == 0);
12 r
13}
14
15pub struct Buffer<T> {
20 ptr: NonNull<T>,
22 len: usize,
28}
29
30impl<T> Buffer<T> {
31 pub fn len(&self) -> usize {
33 self.len
34 }
35
36 pub fn is_empty(&self) -> bool {
38 self.len() == 0
39 }
40
41 pub unsafe fn ptr(&self) -> *mut T {
43 self.ptr.as_ptr()
44 }
45
46 pub unsafe fn as_slice(&self) -> &[T] {
50 slice::from_raw_parts(self.ptr.as_ptr(), self.len())
51 }
52
53 pub unsafe fn as_mut_slice(&mut self) -> &mut [T] {
57 slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len())
58 }
59
60 pub unsafe fn get(&self, i: usize) -> &T {
64 &self.as_slice()[i]
65 }
66
67 pub unsafe fn get_mut(&mut self, i: usize) -> &mut T {
71 &mut self.as_mut_slice()[i]
72 }
73
74 fn empty_len() -> usize {
75 if mem::size_of::<T>() == 0 {
76 isize::max_value() as usize * 2
77 } else {
78 0
79 }
80 }
81
82 pub fn new() -> Self {
84 Self {
87 ptr: NonNull::dangling(),
88 len: Self::empty_len(),
89 }
90 }
91
92 pub unsafe fn from_raw_parts(ptr: *mut T, len: usize) -> Self {
98 assert!(len % 2 == 0);
99 assert!(!ptr.is_null());
100 if mem::size_of::<T>() == 0 {
101 debug_assert_eq!(len, Self::empty_len());
102 }
103 Self {
104 ptr: NonNull::new_unchecked(ptr),
105 len,
106 }
107 }
108
109 pub fn size_in_bytes(len: usize) -> usize {
111 let v = no_required_allocation_units(len * mem::size_of::<T>())
112 * allocation_granularity();
113 debug_assert!(
114 v >= len * mem::size_of::<T>(),
115 "len: {}, so<T>: {}, v: {}",
116 len,
117 mem::size_of::<T>(),
118 v
119 );
120 v
121 }
122
123 pub fn uninitialized(len: usize) -> Result<Self, AllocError> {
126 if mem::size_of::<T>() == 0 {
128 return Ok(Self {
129 ptr: NonNull::dangling(),
130 len: Self::empty_len(),
131 });
132 }
133 assert!(mem::align_of::<T>() <= allocation_granularity());
136 if len == 0 {
140 return Ok(Self::new());
141 }
142 assert!(len % 2 == 0);
143
144 let alloc_size = Self::size_in_bytes(len);
146 debug_assert!(alloc_size > 0);
147 debug_assert!(alloc_size % 2 == 0);
148 debug_assert!(alloc_size % allocation_granularity() == 0);
149 debug_assert!(alloc_size >= len * mem::size_of::<T>());
150
151 let ptr = allocate_mirrored(alloc_size)?;
152 Ok(Self {
153 ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) },
154 len: alloc_size / mem::size_of::<T>(),
155 })
157 }
158}
159
160impl<T> Drop for Buffer<T> {
161 fn drop(&mut self) {
162 if mem::size_of::<T>() == 0 {
163 debug_assert_eq!(self.len, Self::empty_len());
164 return;
165 }
166 if self.is_empty() {
167 return;
168 }
169
170 let buffer_size_in_bytes = Self::size_in_bytes(self.len());
171 let first_half_ptr = self.ptr.as_ptr() as *mut u8;
172 unsafe { deallocate_mirrored(first_half_ptr, buffer_size_in_bytes) };
173 }
174}
175
176impl<T> Clone for Buffer<T>
177where
178 T: Clone,
179{
180 fn clone(&self) -> Self {
181 unsafe {
182 let mid = self.len() / 2;
183 let mut c = Self::uninitialized(self.len())
184 .expect("allocating a new mirrored buffer failed");
185 let (from, _) = self.as_slice().split_at(mid);
186 {
187 let (to, _) = c.as_mut_slice().split_at_mut(mid);
188 to[..mid].clone_from_slice(&from[..mid]);
189 }
190 c
191 }
192 }
193}
194
195impl<T> Default for Buffer<T> {
196 fn default() -> Self {
197 Self::new()
198 }
199}
200
201unsafe impl<T> Send for Buffer<T> where T: Send {}
203unsafe impl<T> Sync for Buffer<T> where T: Sync {}
205
206#[cfg(test)]
207mod tests {
208 use super::*;
209
210 fn is_send_sync<T>() -> bool
211 where
212 T: Send + Sync,
213 {
214 true
215 }
216
217 #[test]
218 fn buffer_send_sync() {
219 assert!(is_send_sync::<Buffer<usize>>());
220 }
221
222 #[test]
223 fn test_new() {
224 let a = Buffer::<u64>::new();
225 assert!(a.is_empty());
226 }
227
228 fn test_alloc(size: usize) {
229 unsafe {
230 let mut a = Buffer::<u64>::uninitialized(size).unwrap();
231 let sz = a.len();
232 assert!(sz >= size);
233 assert_eq!(
234 sz,
235 Buffer::<u64>::size_in_bytes(size) / mem::size_of::<u64>()
236 );
237
238 for i in 0..sz / 2 {
239 *a.get_mut(i) = i as u64;
240 }
241
242 let (first_half_mut, second_half_mut) =
243 a.as_mut_slice().split_at_mut(sz / 2);
244
245 let mut c = 0;
246 for (i, j) in first_half_mut.iter().zip(second_half_mut) {
247 assert_eq!(i, j);
248 c += 1;
249 }
250 assert_eq!(c, sz / 2);
251 }
252 }
253
254 #[test]
255 fn allocations() {
256 let elements_per_alloc_unit =
257 allocation_granularity() / mem::size_of::<u64>();
258 let sizes = [
259 8,
260 elements_per_alloc_unit / 2,
261 elements_per_alloc_unit,
262 elements_per_alloc_unit * 4,
263 ];
264 for &i in &sizes {
265 test_alloc(i);
266 }
267 }
268
269 #[test]
270 fn no_alloc_units_required() {
271 assert_eq!(
273 no_required_allocation_units(allocation_granularity() / 4),
274 2
275 );
276 assert_eq!(
277 no_required_allocation_units(allocation_granularity() / 2),
278 2
279 );
280 assert_eq!(no_required_allocation_units(allocation_granularity()), 2);
281 assert_eq!(
284 no_required_allocation_units(allocation_granularity() + 1),
285 2
286 );
287 assert_eq!(
288 no_required_allocation_units(2 * allocation_granularity()),
289 2
290 );
291 assert_eq!(
292 no_required_allocation_units(3 * allocation_granularity()),
293 4
294 );
295 assert_eq!(
296 no_required_allocation_units(4 * allocation_granularity()),
297 4
298 );
299 assert_eq!(
300 no_required_allocation_units(5 * allocation_granularity()),
301 6
302 );
303 }
304}