1#[cfg(not(feature = "std"))]
50use alloc::vec::Vec;
51#[cfg(feature = "std")]
52mod rstd {
53 pub use std::*;
54}
55#[cfg(not(feature = "std"))]
56mod rstd {
57 pub use core::*;
58 pub mod collections {
59 pub use alloc::collections::*;
60 pub use vec_deque::VecDeque;
61 }
62}
63
64#[cfg(feature = "std")]
65use std::sync::Arc;
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use rstd::hash::Hash;
73use rstd::{
74 marker::PhantomData,
75 mem::size_of,
76 ops::{Deref, DerefMut, Range},
77};
78#[cfg(feature = "std")]
79use std::hash::BuildHasher;
80#[cfg(feature = "std")]
81use std::os::raw::c_void;
82
83pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
85
86pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
88
89pub struct MallocSizeOfOps {
91 size_of_op: VoidPtrToSizeFn,
93
94 enclosing_size_of_op: Option<VoidPtrToSizeFn>,
99
100 have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
104}
105
106impl MallocSizeOfOps {
107 pub fn new(
108 size_of: VoidPtrToSizeFn,
109 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
110 have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
111 ) -> Self {
112 MallocSizeOfOps {
113 size_of_op: size_of,
114 enclosing_size_of_op: malloc_enclosing_size_of,
115 have_seen_ptr_op: have_seen_ptr,
116 }
117 }
118
119 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
122 return ptr as *const usize as usize <= 256
130 }
131
132 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
135 if MallocSizeOfOps::is_empty(ptr) {
136 0
137 } else {
138 (self.size_of_op)(ptr as *const c_void)
139 }
140 }
141
142 pub fn has_malloc_enclosing_size_of(&self) -> bool {
144 self.enclosing_size_of_op.is_some()
145 }
146
147 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
150 assert!(!MallocSizeOfOps::is_empty(ptr));
151 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
152 }
153
154 pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
156 let have_seen_ptr_op = self.have_seen_ptr_op.as_mut().expect("missing have_seen_ptr_op");
157 have_seen_ptr_op(ptr as *const c_void)
158 }
159}
160
161pub trait MallocSizeOf {
164 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
168
169 fn constant_size() -> Option<usize>
173 where
174 Self: Sized,
175 {
176 None
177 }
178}
179
180pub trait MallocShallowSizeOf {
182 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
187}
188
189pub trait MallocUnconditionalSizeOf {
193 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
196}
197
198pub trait MallocUnconditionalShallowSizeOf {
200 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
202}
203
204pub trait MallocConditionalSizeOf {
208 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
212}
213
214pub trait MallocConditionalShallowSizeOf {
216 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
218}
219
220#[cfg(not(any(
221 all(any(target_os = "macos", target_os = "ios"), not(feature = "jemalloc-global"),),
222 feature = "estimate-heapsize"
223)))]
224pub mod inner_allocator_use {
225
226 use super::*;
227
228 #[cfg(not(feature = "std"))]
229 use alloc::string::String;
230
231 impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
232 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
233 unsafe { ops.malloc_size_of(&**self) }
234 }
235 }
236
237 impl<T> MallocShallowSizeOf for Vec<T> {
238 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
239 unsafe { ops.malloc_size_of(self.as_ptr()) }
240 }
241 }
242
243 #[cfg(feature = "std")]
245 #[cfg(all(feature = "jemalloc-global", not(target_os = "windows")))]
246 impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
247 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
248 unsafe { ops.malloc_size_of(arc_ptr(self)) }
249 }
250 }
251
252 #[cfg(feature = "std")]
253 #[cfg(not(all(feature = "jemalloc-global", not(target_os = "windows"))))]
254 impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
255 fn unconditional_shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
256 size_of::<T>()
257 }
258 }
259
260 impl MallocSizeOf for String {
261 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
262 unsafe { ops.malloc_size_of(self.as_ptr()) }
263 }
264 }
265}
266
267impl<'a, T: ?Sized> MallocSizeOf for &'a T {
268 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
269 0
271 }
272 fn constant_size() -> Option<usize> {
273 Some(0)
274 }
275}
276
277impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
278 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
279 self.shallow_size_of(ops) + (**self).size_of(ops)
280 }
281}
282
283#[impl_trait_for_tuples::impl_for_tuples(12)]
284impl MallocSizeOf for Tuple {
285 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
286 let mut result = 0;
287 for_tuples!( #( result += Tuple.size_of(ops); )* );
288 result
289 }
290 fn constant_size() -> Option<usize> {
291 let mut result = Some(0);
292 for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
293 result
294 }
295}
296
297impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
298 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
299 if let Some(val) = self.as_ref() {
300 val.size_of(ops)
301 } else {
302 0
303 }
304 }
305 fn constant_size() -> Option<usize> {
306 T::constant_size().filter(|s| *s == 0)
307 }
308}
309
310impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
311 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
312 match *self {
313 Ok(ref x) => x.size_of(ops),
314 Err(ref e) => e.size_of(ops),
315 }
316 }
317 fn constant_size() -> Option<usize> {
318 T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
320 }
321}
322
323impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
324 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
325 self.get().size_of(ops)
326 }
327 fn constant_size() -> Option<usize> {
328 T::constant_size()
329 }
330}
331
332impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
333 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
334 self.borrow().size_of(ops)
335 }
336 fn constant_size() -> Option<usize> {
337 T::constant_size()
338 }
339}
340
341#[cfg(feature = "std")]
342impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
343where
344 B::Owned: MallocSizeOf,
345{
346 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
347 match *self {
348 std::borrow::Cow::Borrowed(_) => 0,
349 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
350 }
351 }
352}
353
354impl<T: MallocSizeOf> MallocSizeOf for [T] {
355 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
356 let mut n = 0;
357 if let Some(t) = T::constant_size() {
358 n += self.len() * t;
359 } else {
360 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
361 }
362 n
363 }
364}
365
366impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
367 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
368 let mut n = self.shallow_size_of(ops);
369 if let Some(t) = T::constant_size() {
370 n += self.len() * t;
371 } else {
372 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
373 }
374 n
375 }
376}
377
378impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
379 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380 if ops.has_malloc_enclosing_size_of() {
381 if let Some(front) = self.front() {
382 unsafe { ops.malloc_enclosing_size_of(&*front) }
384 } else {
385 0
387 }
388 } else {
389 self.capacity() * size_of::<T>()
391 }
392 }
393}
394
395impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
396 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
397 let mut n = self.shallow_size_of(ops);
398 if let Some(t) = T::constant_size() {
399 n += self.len() * t;
400 } else {
401 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
402 }
403 n
404 }
405}
406
407#[cfg(feature = "std")]
408impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
409where
410 T: Eq + Hash,
411 S: BuildHasher,
412{
413 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
414 if ops.has_malloc_enclosing_size_of() {
415 self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
420 } else {
421 self.capacity() * (size_of::<T>() + size_of::<usize>())
423 }
424 }
425}
426
427#[cfg(feature = "std")]
428impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
429where
430 T: Eq + Hash + MallocSizeOf,
431 S: BuildHasher,
432{
433 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
434 let mut n = self.shallow_size_of(ops);
435 if let Some(t) = T::constant_size() {
436 n += self.len() * t;
437 } else {
438 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
439 }
440 n
441 }
442}
443
444impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
445 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
446 self.0.size_of(ops)
447 }
448 fn constant_size() -> Option<usize> {
449 I::constant_size()
450 }
451}
452
453#[cfg(feature = "std")]
454impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
455 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
456 if ops.has_malloc_enclosing_size_of() {
458 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
459 } else {
460 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
461 }
462 }
463}
464
465#[cfg(feature = "std")]
466impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
467where
468 K: MallocSizeOf,
469 V: MallocSizeOf,
470{
471 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
472 let mut n = self.shallow_size_of(ops);
473 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
474 n += self.len() * (k + v)
475 } else {
476 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
477 }
478 n
479 }
480}
481
482impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
483 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
484 if ops.has_malloc_enclosing_size_of() {
485 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
486 } else {
487 self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
488 }
489 }
490}
491
492impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
493where
494 K: MallocSizeOf,
495 V: MallocSizeOf,
496{
497 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
498 let mut n = self.shallow_size_of(ops);
499 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
500 n += self.len() * (k + v)
501 } else {
502 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
503 }
504 n
505 }
506}
507
508impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
509 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
510 if ops.has_malloc_enclosing_size_of() {
511 self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
513 } else {
514 self.len() * (size_of::<T>() + size_of::<usize>())
516 }
517 }
518}
519
520impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
521where
522 T: MallocSizeOf,
523{
524 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
525 let mut n = self.shallow_size_of(ops);
526 if let Some(t) = T::constant_size() {
527 n += self.len() * t;
528 } else {
529 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
530 }
531 n
532 }
533}
534
535#[cfg(feature = "std")]
543fn arc_ptr<T>(s: &Arc<T>) -> *const T {
544 &(**s) as *const T
545}
546
547#[cfg(feature = "std")]
548impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
549 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
550 self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
551 }
552}
553
554#[cfg(feature = "std")]
555impl<T> MallocConditionalShallowSizeOf for Arc<T> {
556 fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
557 if ops.have_seen_ptr(arc_ptr(self)) {
558 0
559 } else {
560 self.unconditional_shallow_size_of(ops)
561 }
562 }
563}
564
565#[cfg(feature = "std")]
566impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
567 fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
568 if ops.have_seen_ptr(arc_ptr(self)) {
569 0
570 } else {
571 self.unconditional_size_of(ops)
572 }
573 }
574}
575
576#[cfg(feature = "std")]
585impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
586 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
587 self.lock().unwrap().size_of(ops)
588 }
589}
590
591#[cfg(feature = "std")]
592impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
593 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
594 self.lock().size_of(ops)
595 }
596}
597
598#[cfg(feature = "std")]
599impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
600 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
601 self.read().unwrap().size_of(ops)
602 }
603}
604
605#[cfg(feature = "std")]
606impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
607 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
608 self.read().size_of(ops)
609 }
610}
611
612#[macro_export]
644macro_rules! malloc_size_of_is_0(
645 ($($ty:ty),+) => (
646 $(
647 impl $crate::MallocSizeOf for $ty {
648 #[inline(always)]
649 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
650 0
651 }
652 #[inline(always)]
653 fn constant_size() -> Option<usize> { Some(0) }
654 }
655 )+
656 );
657 (any: $($ty:ident<$($gen:ident),+>),+) => (
658 $(
659 impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
660 #[inline(always)]
661 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
662 0
663 }
664 #[inline(always)]
665 fn constant_size() -> Option<usize> { Some(0) }
666 }
667 )+
668 );
669 ($($ty:ident<$($gen:ident),+>),+) => (
670 $(
671 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
672 #[inline(always)]
673 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
674 0
675 }
676 #[inline(always)]
677 fn constant_size() -> Option<usize> { Some(0) }
678 }
679 )+
680 );
681);
682
683malloc_size_of_is_0!(bool, char, str);
684malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
685malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
686malloc_size_of_is_0!(f32, f64);
687
688malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
689malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
690malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
691
692malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
693malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
694malloc_size_of_is_0!(Range<f32>, Range<f64>);
695malloc_size_of_is_0!(any: PhantomData<T>);
696
697#[derive(Clone)]
700pub struct Measurable<T: MallocSizeOf>(pub T);
701
702impl<T: MallocSizeOf> Deref for Measurable<T> {
703 type Target = T;
704
705 fn deref(&self) -> &T {
706 &self.0
707 }
708}
709
710impl<T: MallocSizeOf> DerefMut for Measurable<T> {
711 fn deref_mut(&mut self) -> &mut T {
712 &mut self.0
713 }
714}
715
716#[cfg(feature = "hashbrown")]
717impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
718 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
719 if ops.has_malloc_enclosing_size_of() {
721 self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
722 } else {
723 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
724 }
725 }
726}
727
728#[cfg(feature = "hashbrown")]
729impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
730where
731 K: MallocSizeOf,
732 V: MallocSizeOf,
733{
734 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
735 let mut n = self.shallow_size_of(ops);
736 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
737 n += self.len() * (k + v)
738 } else {
739 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
740 }
741 n
742 }
743}
744
745#[cfg(feature = "lru")]
746impl<K, V, S> MallocSizeOf for lru::LruCache<K, V, S>
747where
748 K: MallocSizeOf + rstd::cmp::Eq + rstd::hash::Hash,
749 V: MallocSizeOf,
750 S: rstd::hash::BuildHasher,
751{
752 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
753 let mut n = 0;
754 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
755 n += self.len() * (k + v)
756 } else {
757 n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
758 }
759 n
760 }
761}
762
763malloc_size_of_is_0!(
764 [u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10], [u8; 11], [u8; 12],
765 [u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19], [u8; 20], [u8; 21], [u8; 22], [u8; 23],
766 [u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28], [u8; 29], [u8; 30], [u8; 31], [u8; 32]
767);
768
769macro_rules! impl_smallvec {
770 ($size: expr) => {
771 #[cfg(feature = "smallvec")]
772 impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
773 where
774 T: MallocSizeOf,
775 {
776 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
777 let mut n = if self.spilled() { self.capacity() * core::mem::size_of::<T>() } else { 0 };
778 if let Some(t) = T::constant_size() {
779 n += self.len() * t;
780 } else {
781 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
782 }
783 n
784 }
785 }
786 };
787}
788
789impl_smallvec!(32); impl_smallvec!(36); #[cfg(feature = "std")]
793malloc_size_of_is_0!(std::time::Instant);
794#[cfg(feature = "std")]
795malloc_size_of_is_0!(std::time::Duration);
796
797#[cfg(all(test, feature = "std"))] mod tests {
799 use crate::{allocators::new_malloc_size_ops, MallocSizeOf, MallocSizeOfOps};
800 use smallvec::SmallVec;
801 use std::{collections::BTreeSet, mem};
802 impl_smallvec!(3);
803
804 #[test]
805 fn test_smallvec_stack_allocated_type() {
806 let mut v: SmallVec<[u8; 3]> = SmallVec::new();
807 let mut ops = new_malloc_size_ops();
808 assert_eq!(v.size_of(&mut ops), 0);
809 v.push(1);
810 v.push(2);
811 v.push(3);
812 assert_eq!(v.size_of(&mut ops), 0);
813 assert!(!v.spilled());
814 v.push(4);
815 assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
816 assert_eq!(v.size_of(&mut ops), 4); }
818
819 #[test]
820 fn test_smallvec_boxed_stack_allocated_type() {
821 let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
822 let mut ops = new_malloc_size_ops();
823 assert_eq!(v.size_of(&mut ops), 0);
824 v.push(Box::new(1u8));
825 v.push(Box::new(2u8));
826 v.push(Box::new(3u8));
827 assert!(v.size_of(&mut ops) >= 3);
828 assert!(!v.spilled());
829 v.push(Box::new(4u8));
830 assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
831 let mut ops = new_malloc_size_ops();
832 let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
833 assert!(v.size_of(&mut ops) >= expected_min_allocs);
834 }
835
836 #[test]
837 fn test_smallvec_heap_allocated_type() {
838 let mut v: SmallVec<[String; 3]> = SmallVec::new();
839 let mut ops = new_malloc_size_ops();
840 assert_eq!(v.size_of(&mut ops), 0);
841 v.push("COW".into());
842 v.push("PIG".into());
843 v.push("DUCK".into());
844 assert!(!v.spilled());
845 assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
846 v.push("ÖWL".into());
847 assert!(v.spilled());
848 let mut ops = new_malloc_size_ops();
849 let expected_min_allocs = mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
850 assert!(v.size_of(&mut ops) >= expected_min_allocs);
851 }
852
853 #[test]
854 fn test_large_vec() {
855 const N: usize = 128 * 1024 * 1024;
856 let val = vec![1u8; N];
857 let mut ops = new_malloc_size_ops();
858 assert!(val.size_of(&mut ops) >= N);
859 assert!(val.size_of(&mut ops) < 2 * N);
860 }
861
862 #[test]
863 fn btree_set() {
864 let mut set = BTreeSet::new();
865 for t in 0..100 {
866 set.insert(vec![t]);
867 }
868 assert!(crate::malloc_size(&set) > 3000);
870 }
871
872 #[test]
873 fn special_malloc_size_of_0() {
874 struct Data<P> {
875 phantom: std::marker::PhantomData<P>,
876 }
877
878 malloc_size_of_is_0!(any: Data<P>);
879
880 assert_eq!(crate::malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
882 }
883
884 #[test]
885 fn constant_size() {
886 struct AlwaysTwo(Vec<u8>);
887
888 impl MallocSizeOf for AlwaysTwo {
889 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
890 self.0.size_of(ops)
891 }
892 fn constant_size() -> Option<usize> {
893 Some(2)
894 }
895 }
896
897 assert_eq!(AlwaysTwo::constant_size(), Some(2));
898 assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
899 assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
900 assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
901 assert_eq!(Result::<(), ()>::constant_size(), Some(0));
902 assert_eq!(<(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(), Some(2 + 2));
903 assert_eq!(Option::<u8>::constant_size(), Some(0));
904 assert_eq!(<&String>::constant_size(), Some(0));
905
906 assert_eq!(<String>::constant_size(), None);
907 assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
908 assert_eq!(Result::<(), String>::constant_size(), None);
909 assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
910 }
911}