1use core::fmt::{Debug, Error, Formatter};
10use core::iter::FromIterator;
11use core::mem::{self, MaybeUninit};
12use core::ops::Index;
13use core::ops::IndexMut;
14use core::ptr;
15use core::slice::{from_raw_parts, from_raw_parts_mut};
16
17#[cfg(feature = "std")]
18use std::collections::{BTreeMap, HashMap};
19
20use bitmaps::{Bitmap, Bits, BitsImpl, Iter as BitmapIter};
21
22mod iter;
23
24pub use self::iter::{Drain, Iter, IterMut, OptionDrain, OptionIter, OptionIterMut};
25
26#[cfg(feature = "refpool")]
27mod refpool;
28
29pub struct SparseChunk<A, const N: usize>
52where
53 BitsImpl<N>: Bits,
54{
55 map: Bitmap<N>,
56 data: MaybeUninit<[A; N]>,
57}
58
59impl<A, const N: usize> Drop for SparseChunk<A, N>
60where
61 BitsImpl<N>: Bits,
62{
63 fn drop(&mut self) {
64 if mem::needs_drop::<A>() {
65 let bits = self.map;
66 for index in &bits {
67 unsafe { ptr::drop_in_place(&mut self.values_mut()[index]) }
68 }
69 }
70 }
71}
72
73impl<A: Clone, const N: usize> Clone for SparseChunk<A, N>
74where
75 BitsImpl<N>: Bits,
76{
77 fn clone(&self) -> Self {
78 let mut out = Self::new();
79 for index in &self.map {
80 out.insert(index, self[index].clone());
81 }
82 out
83 }
84}
85
86impl<A, const N: usize> SparseChunk<A, N>
87where
88 BitsImpl<N>: Bits,
89{
90 pub const CAPACITY: usize = N;
92
93 #[inline]
94 fn values(&self) -> &[A] {
95 unsafe { from_raw_parts(&self.data as *const _ as *const A, N) }
96 }
97
98 #[inline]
99 fn values_mut(&mut self) -> &mut [A] {
100 unsafe { from_raw_parts_mut(&mut self.data as *mut _ as *mut A, N) }
101 }
102
103 #[inline]
105 unsafe fn force_read(index: usize, chunk: &Self) -> A {
106 ptr::read(&chunk.values()[index as usize])
107 }
108
109 #[inline]
111 unsafe fn force_write(index: usize, value: A, chunk: &mut Self) {
112 ptr::write(&mut chunk.values_mut()[index as usize], value)
113 }
114
115 pub fn new() -> Self {
117 Self {
118 map: Bitmap::default(),
119 data: MaybeUninit::uninit(),
120 }
121 }
122
123 pub fn unit(index: usize, value: A) -> Self {
125 let mut chunk = Self::new();
126 chunk.insert(index, value);
127 chunk
128 }
129
130 pub fn pair(index1: usize, value1: A, index2: usize, value2: A) -> Self {
132 let mut chunk = Self::new();
133 chunk.insert(index1, value1);
134 chunk.insert(index2, value2);
135 chunk
136 }
137
138 #[inline]
140 pub fn len(&self) -> usize {
141 self.map.len()
142 }
143
144 #[inline]
146 pub fn is_empty(&self) -> bool {
147 self.map.len() == 0
148 }
149
150 #[inline]
152 pub fn is_full(&self) -> bool {
153 self.len() == N
154 }
155
156 pub fn insert(&mut self, index: usize, value: A) -> Option<A> {
160 if index >= N {
161 panic!("SparseChunk::insert: index out of bounds");
162 }
163 if self.map.set(index, true) {
164 Some(mem::replace(&mut self.values_mut()[index], value))
165 } else {
166 unsafe { SparseChunk::force_write(index, value, self) };
167 None
168 }
169 }
170
171 pub fn remove(&mut self, index: usize) -> Option<A> {
175 if index >= N {
176 panic!("SparseChunk::remove: index out of bounds");
177 }
178 if self.map.set(index, false) {
179 Some(unsafe { SparseChunk::force_read(index, self) })
180 } else {
181 None
182 }
183 }
184
185 pub fn pop(&mut self) -> Option<A> {
189 self.first_index().and_then(|index| self.remove(index))
190 }
191
192 pub fn get(&self, index: usize) -> Option<&A> {
194 if index >= N {
195 return None;
196 }
197 if self.map.get(index) {
198 Some(unsafe { self.get_unchecked(index) })
199 } else {
200 None
201 }
202 }
203
204 pub fn get_mut(&mut self, index: usize) -> Option<&mut A> {
206 if index >= N {
207 return None;
208 }
209 if self.map.get(index) {
210 Some(unsafe { self.get_unchecked_mut(index) })
211 } else {
212 None
213 }
214 }
215
216 pub unsafe fn get_unchecked(&self, index: usize) -> &A {
223 self.values().get_unchecked(index)
224 }
225
226 pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut A {
233 self.values_mut().get_unchecked_mut(index)
234 }
235
236 pub fn indices(&self) -> BitmapIter<'_, N> {
238 self.map.into_iter()
239 }
240
241 pub fn first_index(&self) -> Option<usize> {
243 self.map.first_index()
244 }
245
246 pub fn iter(&self) -> Iter<'_, A, N> {
248 Iter {
249 indices: self.indices(),
250 chunk: self,
251 }
252 }
253
254 pub fn iter_mut(&mut self) -> IterMut<'_, A, N> {
257 IterMut {
258 bitmap: self.map,
259 chunk: self,
260 }
261 }
262
263 pub fn drain(self) -> Drain<A, N> {
265 Drain { chunk: self }
266 }
267
268 pub fn entries(&self) -> impl Iterator<Item = (usize, &A)> {
271 self.indices().zip(self.iter())
272 }
273
274 pub fn option_iter(&self) -> OptionIter<'_, A, N> {
279 OptionIter {
280 chunk: self,
281 index: 0,
282 }
283 }
284
285 pub fn option_iter_mut(&mut self) -> OptionIterMut<'_, A, N> {
290 OptionIterMut {
291 chunk: self,
292 index: 0,
293 }
294 }
295
296 pub fn option_drain(self) -> OptionDrain<A, N> {
301 OptionDrain {
302 chunk: self,
303 index: 0,
304 }
305 }
306}
307
308impl<A, const N: usize> Default for SparseChunk<A, N>
309where
310 BitsImpl<N>: Bits,
311{
312 fn default() -> Self {
313 Self::new()
314 }
315}
316
317impl<A, const N: usize> Index<usize> for SparseChunk<A, N>
318where
319 BitsImpl<N>: Bits,
320{
321 type Output = A;
322
323 #[inline]
324 fn index(&self, index: usize) -> &Self::Output {
325 self.get(index).unwrap()
326 }
327}
328
329impl<A, const N: usize> IndexMut<usize> for SparseChunk<A, N>
330where
331 BitsImpl<N>: Bits,
332{
333 #[inline]
334 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
335 self.get_mut(index).unwrap()
336 }
337}
338
339impl<A, const N: usize> IntoIterator for SparseChunk<A, N>
340where
341 BitsImpl<N>: Bits,
342{
343 type Item = A;
344 type IntoIter = Drain<A, N>;
345
346 #[inline]
347 fn into_iter(self) -> Self::IntoIter {
348 self.drain()
349 }
350}
351
352impl<A, const N: usize> FromIterator<Option<A>> for SparseChunk<A, N>
353where
354 BitsImpl<N>: Bits,
355{
356 fn from_iter<I>(iter: I) -> Self
357 where
358 I: IntoIterator<Item = Option<A>>,
359 {
360 let mut out = Self::new();
361 for (index, value) in iter.into_iter().enumerate() {
362 if let Some(value) = value {
363 out.insert(index, value);
364 }
365 }
366 out
367 }
368}
369
370impl<A, const N: usize> PartialEq for SparseChunk<A, N>
371where
372 A: PartialEq,
373 BitsImpl<N>: Bits,
374{
375 fn eq(&self, other: &Self) -> bool {
376 if self.map != other.map {
377 return false;
378 }
379 for index in self.indices() {
380 if self.get(index) != other.get(index) {
381 return false;
382 }
383 }
384 true
385 }
386}
387
388#[cfg(feature = "std")]
389impl<A, const N: usize> PartialEq<BTreeMap<usize, A>> for SparseChunk<A, N>
390where
391 A: PartialEq,
392 BitsImpl<N>: Bits,
393{
394 fn eq(&self, other: &BTreeMap<usize, A>) -> bool {
395 if self.len() != other.len() {
396 return false;
397 }
398 for index in self.indices() {
399 if self.get(index) != other.get(&index) {
400 return false;
401 }
402 }
403 true
404 }
405}
406
407#[cfg(feature = "std")]
408impl<A, const N: usize> PartialEq<HashMap<usize, A>> for SparseChunk<A, N>
409where
410 A: PartialEq,
411 BitsImpl<N>: Bits,
412{
413 fn eq(&self, other: &HashMap<usize, A>) -> bool {
414 if self.len() != other.len() {
415 return false;
416 }
417 for index in self.indices() {
418 if self.get(index) != other.get(&index) {
419 return false;
420 }
421 }
422 true
423 }
424}
425
426impl<A, const N: usize> Eq for SparseChunk<A, N>
427where
428 A: Eq,
429 BitsImpl<N>: Bits,
430{
431}
432
433impl<A, const N: usize> Debug for SparseChunk<A, N>
434where
435 A: Debug,
436 BitsImpl<N>: Bits,
437{
438 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
439 f.write_str("SparseChunk")?;
440 f.debug_map().entries(self.entries()).finish()
441 }
442}
443
444#[cfg(test)]
445mod test {
446 use super::*;
447
448 #[test]
449 fn insert_remove_iterate() {
450 let mut chunk: SparseChunk<_, 32> = SparseChunk::new();
451 assert_eq!(None, chunk.insert(5, 5));
452 assert_eq!(None, chunk.insert(1, 1));
453 assert_eq!(None, chunk.insert(24, 42));
454 assert_eq!(None, chunk.insert(22, 22));
455 assert_eq!(Some(42), chunk.insert(24, 24));
456 assert_eq!(None, chunk.insert(31, 31));
457 assert_eq!(Some(24), chunk.remove(24));
458 assert_eq!(4, chunk.len());
459 let indices: Vec<_> = chunk.indices().collect();
460 assert_eq!(vec![1, 5, 22, 31], indices);
461 let values: Vec<_> = chunk.into_iter().collect();
462 assert_eq!(vec![1, 5, 22, 31], values);
463 }
464
465 #[test]
466 fn clone_chunk() {
467 let mut chunk: SparseChunk<_, 32> = SparseChunk::new();
468 assert_eq!(None, chunk.insert(5, 5));
469 assert_eq!(None, chunk.insert(1, 1));
470 assert_eq!(None, chunk.insert(24, 42));
471 assert_eq!(None, chunk.insert(22, 22));
472 let cloned = chunk.clone();
473 let right_indices: Vec<_> = chunk.indices().collect();
474 let left_indices: Vec<_> = cloned.indices().collect();
475 let right: Vec<_> = chunk.into_iter().collect();
476 let left: Vec<_> = cloned.into_iter().collect();
477 assert_eq!(left, right);
478 assert_eq!(left_indices, right_indices);
479 assert_eq!(vec![1, 5, 22, 24], left_indices);
480 assert_eq!(vec![1, 5, 22, 24], right_indices);
481 }
482
483 use crate::tests::DropTest;
484 use std::sync::atomic::{AtomicUsize, Ordering};
485
486 #[test]
487 fn dropping() {
488 let counter = AtomicUsize::new(0);
489 {
490 let mut chunk: SparseChunk<DropTest<'_>, 64> = SparseChunk::new();
491 for i in 0..40 {
492 chunk.insert(i, DropTest::new(&counter));
493 }
494 assert_eq!(40, counter.load(Ordering::Relaxed));
495 for i in 0..20 {
496 chunk.remove(i);
497 }
498 assert_eq!(20, counter.load(Ordering::Relaxed));
499 }
500 assert_eq!(0, counter.load(Ordering::Relaxed));
501 }
502
503 #[test]
504 fn equality() {
505 let mut c1 = SparseChunk::<usize, 64>::new();
506 for i in 0..32 {
507 c1.insert(i, i);
508 }
509 let mut c2 = c1.clone();
510 assert_eq!(c1, c2);
511 for i in 4..8 {
512 c2.insert(i, 0);
513 }
514 assert_ne!(c1, c2);
515 c2 = c1.clone();
516 for i in 0..16 {
517 c2.remove(i);
518 }
519 assert_ne!(c1, c2);
520 }
521}