parity_util_mem/
malloc_size.rs

1// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A crate for measuring the heap usage of data structures in a way that
12//! integrates with Firefox's memory reporting, particularly the use of
13//! mozjemalloc and DMD. In particular, it has the following features.
14//! - It isn't bound to a particular heap allocator.
15//! - It provides traits for both "shallow" and "deep" measurement, which gives
16//!   flexibility in the cases where the traits can't be used.
17//! - It allows for measuring blocks even when only an interior pointer can be
18//!   obtained for heap allocations, e.g. `HashSet` and `HashMap`. (This relies
19//!   on the heap allocator having suitable support, which mozjemalloc has.)
20//! - It allows handling of types like `Rc` and `Arc` by providing traits that
21//!   are different to the ones for non-graph structures.
22//!
23//! Suggested uses are as follows.
24//! - When possible, use the `MallocSizeOf` trait. (Deriving support is
25//!   provided by the `malloc_size_of_derive` crate.)
26//! - If you need an additional synchronization argument, provide a function
27//!   that is like the standard trait method, but with the extra argument.
28//! - If you need multiple measurements for a type, provide a function named
29//!   `add_size_of` that takes a mutable reference to a struct that contains
30//!   the multiple measurement fields.
31//! - When deep measurement (via `MallocSizeOf`) cannot be implemented for a
32//!   type, shallow measurement (via `MallocShallowSizeOf`) in combination with
33//!   iteration can be a useful substitute.
34//! - `Rc` and `Arc` are always tricky, which is why `MallocSizeOf` is not (and
35//!   should not be) implemented for them.
36//! - If an `Rc` or `Arc` is known to be a "primary" reference and can always
37//!   be measured, it should be measured via the `MallocUnconditionalSizeOf`
38//!   trait.
39//! - If an `Rc` or `Arc` should be measured only if it hasn't been seen
40//!   before, it should be measured via the `MallocConditionalSizeOf` trait.
41//! - Using universal function call syntax is a good idea when measuring boxed
42//!   fields in structs, because it makes it clear that the Box is being
43//!   measured as well as the thing it points to. E.g.
44//!   `<Box<_> as MallocSizeOf>::size_of(field, ops)`.
45
46//! This is an extended version of the Servo internal malloc_size crate.
47//! We should occasionally track the upstream changes/fixes and reintroduce them here, whenever applicable.
48
49#[cfg(not(feature = "std"))]
50use alloc::vec::Vec;
51#[cfg(feature = "std")]
52mod rstd {
53	pub use std::*;
54}
55#[cfg(not(feature = "std"))]
56mod rstd {
57	pub use core::*;
58	pub mod collections {
59		pub use alloc::collections::*;
60		pub use vec_deque::VecDeque;
61	}
62}
63
64#[cfg(feature = "std")]
65use std::sync::Arc;
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use rstd::hash::Hash;
73use rstd::{
74	marker::PhantomData,
75	mem::size_of,
76	ops::{Deref, DerefMut, Range},
77};
78#[cfg(feature = "std")]
79use std::hash::BuildHasher;
80#[cfg(feature = "std")]
81use std::os::raw::c_void;
82
83/// A C function that takes a pointer to a heap allocation and returns its size.
84pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
85
86/// A closure implementing a stateful predicate on pointers.
87pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
88
89/// Operations used when measuring heap usage of data structures.
90pub struct MallocSizeOfOps {
91	/// A function that returns the size of a heap allocation.
92	size_of_op: VoidPtrToSizeFn,
93
94	/// Like `size_of_op`, but can take an interior pointer. Optional because
95	/// not all allocators support this operation. If it's not provided, some
96	/// memory measurements will actually be computed estimates rather than
97	/// real and accurate measurements.
98	enclosing_size_of_op: Option<VoidPtrToSizeFn>,
99
100	/// Check if a pointer has been seen before, and remember it for next time.
101	/// Useful when measuring `Rc`s and `Arc`s. Optional, because many places
102	/// don't need it.
103	have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
104}
105
106impl MallocSizeOfOps {
107	pub fn new(
108		size_of: VoidPtrToSizeFn,
109		malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
110		have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
111	) -> Self {
112		MallocSizeOfOps {
113			size_of_op: size_of,
114			enclosing_size_of_op: malloc_enclosing_size_of,
115			have_seen_ptr_op: have_seen_ptr,
116		}
117	}
118
119	/// Check if an allocation is empty. This relies on knowledge of how Rust
120	/// handles empty allocations, which may change in the future.
121	fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
122		// The correct condition is this:
123		//   `ptr as usize <= ::std::mem::align_of::<T>()`
124		// But we can't call align_of() on a ?Sized T. So we approximate it
125		// with the following. 256 is large enough that it should always be
126		// larger than the required alignment, but small enough that it is
127		// always in the first page of memory and therefore not a legitimate
128		// address.
129		return ptr as *const usize as usize <= 256
130	}
131
132	/// Call `size_of_op` on `ptr`, first checking that the allocation isn't
133	/// empty, because some types (such as `Vec`) utilize empty allocations.
134	pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
135		if MallocSizeOfOps::is_empty(ptr) {
136			0
137		} else {
138			(self.size_of_op)(ptr as *const c_void)
139		}
140	}
141
142	/// Is an `enclosing_size_of_op` available?
143	pub fn has_malloc_enclosing_size_of(&self) -> bool {
144		self.enclosing_size_of_op.is_some()
145	}
146
147	/// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
148	/// must not be empty.
149	pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
150		assert!(!MallocSizeOfOps::is_empty(ptr));
151		(self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
152	}
153
154	/// Call `have_seen_ptr_op` on `ptr`.
155	pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
156		let have_seen_ptr_op = self.have_seen_ptr_op.as_mut().expect("missing have_seen_ptr_op");
157		have_seen_ptr_op(ptr as *const c_void)
158	}
159}
160
161/// Trait for measuring the "deep" heap usage of a data structure. This is the
162/// most commonly-used of the traits.
163pub trait MallocSizeOf {
164	/// Measure the heap usage of all descendant heap-allocated structures, but
165	/// not the space taken up by the value itself.
166	/// If `T::size_of` is a constant, consider implementing `constant_size` as well.
167	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
168
169	/// Used to optimize `MallocSizeOf` implementation for collections
170	/// like `Vec` and `HashMap` to avoid iterating over them unnecessarily.
171	/// The `Self: Sized` bound is for object safety.
172	fn constant_size() -> Option<usize>
173	where
174		Self: Sized,
175	{
176		None
177	}
178}
179
180/// Trait for measuring the "shallow" heap usage of a container.
181pub trait MallocShallowSizeOf {
182	/// Measure the heap usage of immediate heap-allocated descendant
183	/// structures, but not the space taken up by the value itself. Anything
184	/// beyond the immediate descendants must be measured separately, using
185	/// iteration.
186	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
187}
188
189/// Like `MallocSizeOf`, but with a different name so it cannot be used
190/// accidentally with derive(MallocSizeOf). For use with types like `Rc` and
191/// `Arc` when appropriate (e.g. when measuring a "primary" reference).
192pub trait MallocUnconditionalSizeOf {
193	/// Measure the heap usage of all heap-allocated descendant structures, but
194	/// not the space taken up by the value itself.
195	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
196}
197
198/// `MallocUnconditionalSizeOf` combined with `MallocShallowSizeOf`.
199pub trait MallocUnconditionalShallowSizeOf {
200	/// `unconditional_size_of` combined with `shallow_size_of`.
201	fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
202}
203
204/// Like `MallocSizeOf`, but only measures if the value hasn't already been
205/// measured. For use with types like `Rc` and `Arc` when appropriate (e.g.
206/// when there is no "primary" reference).
207pub trait MallocConditionalSizeOf {
208	/// Measure the heap usage of all heap-allocated descendant structures, but
209	/// not the space taken up by the value itself, and only if that heap usage
210	/// hasn't already been measured.
211	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
212}
213
214/// `MallocConditionalSizeOf` combined with `MallocShallowSizeOf`.
215pub trait MallocConditionalShallowSizeOf {
216	/// `conditional_size_of` combined with `shallow_size_of`.
217	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
218}
219
220#[cfg(not(any(
221	all(any(target_os = "macos", target_os = "ios"), not(feature = "jemalloc-global"),),
222	feature = "estimate-heapsize"
223)))]
224pub mod inner_allocator_use {
225
226	use super::*;
227
228	#[cfg(not(feature = "std"))]
229	use alloc::string::String;
230
231	impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
232		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
233			unsafe { ops.malloc_size_of(&**self) }
234		}
235	}
236
237	impl<T> MallocShallowSizeOf for Vec<T> {
238		fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
239			unsafe { ops.malloc_size_of(self.as_ptr()) }
240		}
241	}
242
243	// currently this seems only fine with jemalloc
244	#[cfg(feature = "std")]
245	#[cfg(all(feature = "jemalloc-global", not(target_os = "windows")))]
246	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
247		fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
248			unsafe { ops.malloc_size_of(arc_ptr(self)) }
249		}
250	}
251
252	#[cfg(feature = "std")]
253	#[cfg(not(all(feature = "jemalloc-global", not(target_os = "windows"))))]
254	impl<T> MallocUnconditionalShallowSizeOf for Arc<T> {
255		fn unconditional_shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
256			size_of::<T>()
257		}
258	}
259
260	impl MallocSizeOf for String {
261		fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
262			unsafe { ops.malloc_size_of(self.as_ptr()) }
263		}
264	}
265}
266
267impl<'a, T: ?Sized> MallocSizeOf for &'a T {
268	fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
269		// Zero makes sense for a non-owning reference.
270		0
271	}
272	fn constant_size() -> Option<usize> {
273		Some(0)
274	}
275}
276
277impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
278	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
279		self.shallow_size_of(ops) + (**self).size_of(ops)
280	}
281}
282
283#[impl_trait_for_tuples::impl_for_tuples(12)]
284impl MallocSizeOf for Tuple {
285	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
286		let mut result = 0;
287		for_tuples!( #( result += Tuple.size_of(ops); )* );
288		result
289	}
290	fn constant_size() -> Option<usize> {
291		let mut result = Some(0);
292		for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
293		result
294	}
295}
296
297impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
298	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
299		if let Some(val) = self.as_ref() {
300			val.size_of(ops)
301		} else {
302			0
303		}
304	}
305	fn constant_size() -> Option<usize> {
306		T::constant_size().filter(|s| *s == 0)
307	}
308}
309
310impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
311	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
312		match *self {
313			Ok(ref x) => x.size_of(ops),
314			Err(ref e) => e.size_of(ops),
315		}
316	}
317	fn constant_size() -> Option<usize> {
318		// Result<T, E> has constant size iff T::constant_size == E::constant_size
319		T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
320	}
321}
322
323impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
324	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
325		self.get().size_of(ops)
326	}
327	fn constant_size() -> Option<usize> {
328		T::constant_size()
329	}
330}
331
332impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
333	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
334		self.borrow().size_of(ops)
335	}
336	fn constant_size() -> Option<usize> {
337		T::constant_size()
338	}
339}
340
341#[cfg(feature = "std")]
342impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
343where
344	B::Owned: MallocSizeOf,
345{
346	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
347		match *self {
348			std::borrow::Cow::Borrowed(_) => 0,
349			std::borrow::Cow::Owned(ref b) => b.size_of(ops),
350		}
351	}
352}
353
354impl<T: MallocSizeOf> MallocSizeOf for [T] {
355	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
356		let mut n = 0;
357		if let Some(t) = T::constant_size() {
358			n += self.len() * t;
359		} else {
360			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
361		}
362		n
363	}
364}
365
366impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
367	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
368		let mut n = self.shallow_size_of(ops);
369		if let Some(t) = T::constant_size() {
370			n += self.len() * t;
371		} else {
372			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
373		}
374		n
375	}
376}
377
378impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
379	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380		if ops.has_malloc_enclosing_size_of() {
381			if let Some(front) = self.front() {
382				// The front element is an interior pointer.
383				unsafe { ops.malloc_enclosing_size_of(&*front) }
384			} else {
385				// This assumes that no memory is allocated when the VecDeque is empty.
386				0
387			}
388		} else {
389			// An estimate.
390			self.capacity() * size_of::<T>()
391		}
392	}
393}
394
395impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
396	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
397		let mut n = self.shallow_size_of(ops);
398		if let Some(t) = T::constant_size() {
399			n += self.len() * t;
400		} else {
401			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
402		}
403		n
404	}
405}
406
407#[cfg(feature = "std")]
408impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
409where
410	T: Eq + Hash,
411	S: BuildHasher,
412{
413	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
414		if ops.has_malloc_enclosing_size_of() {
415			// The first value from the iterator gives us an interior pointer.
416			// `ops.malloc_enclosing_size_of()` then gives us the storage size.
417			// This assumes that the `HashSet`'s contents (values and hashes)
418			// are all stored in a single contiguous heap allocation.
419			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
420		} else {
421			// An estimate.
422			self.capacity() * (size_of::<T>() + size_of::<usize>())
423		}
424	}
425}
426
427#[cfg(feature = "std")]
428impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
429where
430	T: Eq + Hash + MallocSizeOf,
431	S: BuildHasher,
432{
433	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
434		let mut n = self.shallow_size_of(ops);
435		if let Some(t) = T::constant_size() {
436			n += self.len() * t;
437		} else {
438			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
439		}
440		n
441	}
442}
443
444impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
445	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
446		self.0.size_of(ops)
447	}
448	fn constant_size() -> Option<usize> {
449		I::constant_size()
450	}
451}
452
453#[cfg(feature = "std")]
454impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
455	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
456		// See the implementation for std::collections::HashSet for details.
457		if ops.has_malloc_enclosing_size_of() {
458			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
459		} else {
460			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
461		}
462	}
463}
464
465#[cfg(feature = "std")]
466impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
467where
468	K: MallocSizeOf,
469	V: MallocSizeOf,
470{
471	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
472		let mut n = self.shallow_size_of(ops);
473		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
474			n += self.len() * (k + v)
475		} else {
476			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
477		}
478		n
479	}
480}
481
482impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
483	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
484		if ops.has_malloc_enclosing_size_of() {
485			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
486		} else {
487			self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
488		}
489	}
490}
491
492impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
493where
494	K: MallocSizeOf,
495	V: MallocSizeOf,
496{
497	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
498		let mut n = self.shallow_size_of(ops);
499		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
500			n += self.len() * (k + v)
501		} else {
502			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
503		}
504		n
505	}
506}
507
508impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
509	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
510		if ops.has_malloc_enclosing_size_of() {
511			// See implementation for HashSet how this works.
512			self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
513		} else {
514			// An estimate.
515			self.len() * (size_of::<T>() + size_of::<usize>())
516		}
517	}
518}
519
520impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
521where
522	T: MallocSizeOf,
523{
524	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
525		let mut n = self.shallow_size_of(ops);
526		if let Some(t) = T::constant_size() {
527			n += self.len() * t;
528		} else {
529			n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
530		}
531		n
532	}
533}
534
535// XXX: we don't want MallocSizeOf to be defined for Rc and Arc. If negative
536// trait bounds are ever allowed, this code should be uncommented.
537// (We do have a compile-fail test for this:
538// rc_arc_must_not_derive_malloc_size_of.rs)
539// impl<T> !MallocSizeOf for Arc<T> { }
540// impl<T> !MallocShallowSizeOf for Arc<T> { }
541
542#[cfg(feature = "std")]
543fn arc_ptr<T>(s: &Arc<T>) -> *const T {
544	&(**s) as *const T
545}
546
547#[cfg(feature = "std")]
548impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
549	fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
550		self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
551	}
552}
553
554#[cfg(feature = "std")]
555impl<T> MallocConditionalShallowSizeOf for Arc<T> {
556	fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
557		if ops.have_seen_ptr(arc_ptr(self)) {
558			0
559		} else {
560			self.unconditional_shallow_size_of(ops)
561		}
562	}
563}
564
565#[cfg(feature = "std")]
566impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
567	fn conditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
568		if ops.have_seen_ptr(arc_ptr(self)) {
569			0
570		} else {
571			self.unconditional_size_of(ops)
572		}
573	}
574}
575
576/// If a mutex is stored directly as a member of a data type that is being measured,
577/// it is the unique owner of its contents and deserves to be measured.
578///
579/// If a mutex is stored inside of an Arc value as a member of a data type that is being measured,
580/// the Arc will not be automatically measured so there is no risk of overcounting the mutex's
581/// contents.
582///
583/// The same reasoning applies to RwLock.
584#[cfg(feature = "std")]
585impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
586	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
587		self.lock().unwrap().size_of(ops)
588	}
589}
590
591#[cfg(feature = "std")]
592impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
593	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
594		self.lock().size_of(ops)
595	}
596}
597
598#[cfg(feature = "std")]
599impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
600	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
601		self.read().unwrap().size_of(ops)
602	}
603}
604
605#[cfg(feature = "std")]
606impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
607	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
608		self.read().size_of(ops)
609	}
610}
611
612/// Implement notion of 0 allocation size for some type(s).
613///
614/// if used for generics, by default it will require that generaic arguments
615/// should implement `MallocSizeOf`. This can be avoided with passing "any: "
616/// in front of type list.
617///
618/// ```rust
619/// use parity_util_mem::{malloc_size, malloc_size_of_is_0};
620///
621/// struct Data<P> {
622/// 	phantom: std::marker::PhantomData<P>,
623/// }
624///
625/// malloc_size_of_is_0!(any: Data<P>);
626///
627/// // MallocSizeOf is NOT implemented for [u8; 333]
628/// assert_eq!(malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
629/// ```
630///
631/// and when no "any: "
632///
633/// ```rust
634/// use parity_util_mem::{malloc_size, malloc_size_of_is_0};
635///
636/// struct Data<T>(pub T);
637///
638/// // generic argument (`T`) must be `impl MallocSizeOf`
639/// malloc_size_of_is_0!(Data<u8>);
640///
641/// assert_eq!(malloc_size(&Data(0u8)), 0);
642/// ```
643#[macro_export]
644macro_rules! malloc_size_of_is_0(
645	($($ty:ty),+) => (
646		$(
647			impl $crate::MallocSizeOf for $ty {
648				#[inline(always)]
649				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
650					0
651				}
652				#[inline(always)]
653				fn constant_size() -> Option<usize> { Some(0) }
654			}
655		)+
656	);
657	(any: $($ty:ident<$($gen:ident),+>),+) => (
658		$(
659			impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
660				#[inline(always)]
661				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
662					0
663				}
664				#[inline(always)]
665				fn constant_size() -> Option<usize> { Some(0) }
666			}
667		)+
668	);
669	($($ty:ident<$($gen:ident),+>),+) => (
670		$(
671			impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
672				#[inline(always)]
673				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
674					0
675				}
676				#[inline(always)]
677				fn constant_size() -> Option<usize> { Some(0) }
678			}
679		)+
680	);
681);
682
683malloc_size_of_is_0!(bool, char, str);
684malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
685malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
686malloc_size_of_is_0!(f32, f64);
687
688malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
689malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
690malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
691
692malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
693malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
694malloc_size_of_is_0!(Range<f32>, Range<f64>);
695malloc_size_of_is_0!(any: PhantomData<T>);
696
697/// Measurable that defers to inner value and used to verify MallocSizeOf implementation in a
698/// struct.
699#[derive(Clone)]
700pub struct Measurable<T: MallocSizeOf>(pub T);
701
702impl<T: MallocSizeOf> Deref for Measurable<T> {
703	type Target = T;
704
705	fn deref(&self) -> &T {
706		&self.0
707	}
708}
709
710impl<T: MallocSizeOf> DerefMut for Measurable<T> {
711	fn deref_mut(&mut self) -> &mut T {
712		&mut self.0
713	}
714}
715
716#[cfg(feature = "hashbrown")]
717impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
718	fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
719		// See the implementation for std::collections::HashSet for details.
720		if ops.has_malloc_enclosing_size_of() {
721			self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
722		} else {
723			self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
724		}
725	}
726}
727
728#[cfg(feature = "hashbrown")]
729impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
730where
731	K: MallocSizeOf,
732	V: MallocSizeOf,
733{
734	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
735		let mut n = self.shallow_size_of(ops);
736		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
737			n += self.len() * (k + v)
738		} else {
739			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
740		}
741		n
742	}
743}
744
745#[cfg(feature = "lru")]
746impl<K, V, S> MallocSizeOf for lru::LruCache<K, V, S>
747where
748	K: MallocSizeOf + rstd::cmp::Eq + rstd::hash::Hash,
749	V: MallocSizeOf,
750	S: rstd::hash::BuildHasher,
751{
752	fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
753		let mut n = 0;
754		if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
755			n += self.len() * (k + v)
756		} else {
757			n = self.iter().fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
758		}
759		n
760	}
761}
762
763malloc_size_of_is_0!(
764	[u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10], [u8; 11], [u8; 12],
765	[u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19], [u8; 20], [u8; 21], [u8; 22], [u8; 23],
766	[u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28], [u8; 29], [u8; 30], [u8; 31], [u8; 32]
767);
768
769macro_rules! impl_smallvec {
770	($size: expr) => {
771		#[cfg(feature = "smallvec")]
772		impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
773		where
774			T: MallocSizeOf,
775		{
776			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
777				let mut n = if self.spilled() { self.capacity() * core::mem::size_of::<T>() } else { 0 };
778				if let Some(t) = T::constant_size() {
779					n += self.len() * t;
780				} else {
781					n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
782				}
783				n
784			}
785		}
786	};
787}
788
789impl_smallvec!(32); // kvdb uses this
790impl_smallvec!(36); // trie-db uses this
791
792#[cfg(feature = "std")]
793malloc_size_of_is_0!(std::time::Instant);
794#[cfg(feature = "std")]
795malloc_size_of_is_0!(std::time::Duration);
796
797#[cfg(all(test, feature = "std"))] // tests are using std implementations
798mod tests {
799	use crate::{allocators::new_malloc_size_ops, MallocSizeOf, MallocSizeOfOps};
800	use smallvec::SmallVec;
801	use std::{collections::BTreeSet, mem};
802	impl_smallvec!(3);
803
804	#[test]
805	fn test_smallvec_stack_allocated_type() {
806		let mut v: SmallVec<[u8; 3]> = SmallVec::new();
807		let mut ops = new_malloc_size_ops();
808		assert_eq!(v.size_of(&mut ops), 0);
809		v.push(1);
810		v.push(2);
811		v.push(3);
812		assert_eq!(v.size_of(&mut ops), 0);
813		assert!(!v.spilled());
814		v.push(4);
815		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
816		assert_eq!(v.size_of(&mut ops), 4); // 4 u8s on the heap
817	}
818
819	#[test]
820	fn test_smallvec_boxed_stack_allocated_type() {
821		let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
822		let mut ops = new_malloc_size_ops();
823		assert_eq!(v.size_of(&mut ops), 0);
824		v.push(Box::new(1u8));
825		v.push(Box::new(2u8));
826		v.push(Box::new(3u8));
827		assert!(v.size_of(&mut ops) >= 3);
828		assert!(!v.spilled());
829		v.push(Box::new(4u8));
830		assert!(v.spilled(), "SmallVec spills when going beyond the capacity of the inner backing array");
831		let mut ops = new_malloc_size_ops();
832		let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
833		assert!(v.size_of(&mut ops) >= expected_min_allocs);
834	}
835
836	#[test]
837	fn test_smallvec_heap_allocated_type() {
838		let mut v: SmallVec<[String; 3]> = SmallVec::new();
839		let mut ops = new_malloc_size_ops();
840		assert_eq!(v.size_of(&mut ops), 0);
841		v.push("COW".into());
842		v.push("PIG".into());
843		v.push("DUCK".into());
844		assert!(!v.spilled());
845		assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
846		v.push("ÖWL".into());
847		assert!(v.spilled());
848		let mut ops = new_malloc_size_ops();
849		let expected_min_allocs = mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
850		assert!(v.size_of(&mut ops) >= expected_min_allocs);
851	}
852
853	#[test]
854	fn test_large_vec() {
855		const N: usize = 128 * 1024 * 1024;
856		let val = vec![1u8; N];
857		let mut ops = new_malloc_size_ops();
858		assert!(val.size_of(&mut ops) >= N);
859		assert!(val.size_of(&mut ops) < 2 * N);
860	}
861
862	#[test]
863	fn btree_set() {
864		let mut set = BTreeSet::new();
865		for t in 0..100 {
866			set.insert(vec![t]);
867		}
868		// ~36 per value
869		assert!(crate::malloc_size(&set) > 3000);
870	}
871
872	#[test]
873	fn special_malloc_size_of_0() {
874		struct Data<P> {
875			phantom: std::marker::PhantomData<P>,
876		}
877
878		malloc_size_of_is_0!(any: Data<P>);
879
880		// MallocSizeOf is not implemented for [u8; 333]
881		assert_eq!(crate::malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
882	}
883
884	#[test]
885	fn constant_size() {
886		struct AlwaysTwo(Vec<u8>);
887
888		impl MallocSizeOf for AlwaysTwo {
889			fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
890				self.0.size_of(ops)
891			}
892			fn constant_size() -> Option<usize> {
893				Some(2)
894			}
895		}
896
897		assert_eq!(AlwaysTwo::constant_size(), Some(2));
898		assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
899		assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
900		assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
901		assert_eq!(Result::<(), ()>::constant_size(), Some(0));
902		assert_eq!(<(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(), Some(2 + 2));
903		assert_eq!(Option::<u8>::constant_size(), Some(0));
904		assert_eq!(<&String>::constant_size(), Some(0));
905
906		assert_eq!(<String>::constant_size(), None);
907		assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
908		assert_eq!(Result::<(), String>::constant_size(), None);
909		assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
910	}
911}