aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEmilio Cobos Álvarez <emilio@crisal.io>2018-12-11 00:11:18 +0000
committerEmilio Cobos Álvarez <emilio@crisal.io>2018-12-16 13:35:04 +0100
commit3f58e0b069859515844da0565a06a7950af4862f (patch)
treef7e3fd508b38c478f4892131dd50f50f6395ffe3
parent626172d64c0e67d36c82cfe250d080a5c45b4f9c (diff)
downloadservo-3f58e0b069859515844da0565a06a7950af4862f.tar.gz
servo-3f58e0b069859515844da0565a06a7950af4862f.zip
style: Deduplicate a bit the malloc_size_of code for hashmaps / hashsets.
This allows to experiment with other hash maps easily rather than depending on what hashglobe::fake::HashMap dereferences to. In particular I wrote it while trying to get a build working with hashbrown. Differential Revision: https://phabricator.services.mozilla.com/D14098
-rw-r--r--components/malloc_size_of/lib.rs235
1 files changed, 71 insertions, 164 deletions
diff --git a/components/malloc_size_of/lib.rs b/components/malloc_size_of/lib.rs
index 778082b5f06..ddd7b5f2dad 100644
--- a/components/malloc_size_of/lib.rs
+++ b/components/malloc_size_of/lib.rs
@@ -436,126 +436,89 @@ where
}
}
-impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
-where
- T: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- if ops.has_malloc_enclosing_size_of() {
- // The first value from the iterator gives us an interior pointer.
- // `ops.malloc_enclosing_size_of()` then gives us the storage size.
- // This assumes that the `HashSet`'s contents (values and hashes)
- // are all stored in a single contiguous heap allocation.
- self.iter()
- .next()
- .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
- } else {
- // An estimate.
- self.capacity() * (size_of::<T>() + size_of::<usize>())
- }
- }
-}
-
-impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
-where
- T: Eq + Hash + MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- let mut n = self.shallow_size_of(ops);
- for t in self.iter() {
- n += t.size_of(ops);
+macro_rules! malloc_size_of_hash_set {
+ ($ty:ty) => {
+ impl<T, S> MallocShallowSizeOf for $ty
+ where
+ T: Eq + Hash,
+ S: BuildHasher,
+ {
+ fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ if ops.has_malloc_enclosing_size_of() {
+ // The first value from the iterator gives us an interior pointer.
+ // `ops.malloc_enclosing_size_of()` then gives us the storage size.
+ // This assumes that the `HashSet`'s contents (values and hashes)
+ // are all stored in a single contiguous heap allocation.
+ self.iter()
+ .next()
+ .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
+ } else {
+ // An estimate.
+ self.capacity() * (size_of::<T>() + size_of::<usize>())
+ }
+ }
}
- n
- }
-}
-impl<T, S> MallocShallowSizeOf for hashglobe::hash_set::HashSet<T, S>
-where
- T: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- // See the implementation for std::collections::HashSet for details.
- if ops.has_malloc_enclosing_size_of() {
- self.iter()
- .next()
- .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
- } else {
- self.capacity() * (size_of::<T>() + size_of::<usize>())
+ impl<T, S> MallocSizeOf for $ty
+ where
+ T: Eq + Hash + MallocSizeOf,
+ S: BuildHasher,
+ {
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ let mut n = self.shallow_size_of(ops);
+ for t in self.iter() {
+ n += t.size_of(ops);
+ }
+ n
+ }
}
}
}
-impl<T, S> MallocSizeOf for hashglobe::hash_set::HashSet<T, S>
-where
- T: Eq + Hash + MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- let mut n = self.shallow_size_of(ops);
- for t in self.iter() {
- n += t.size_of(ops);
+malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
+malloc_size_of_hash_set!(hashglobe::hash_set::HashSet<T, S>);
+malloc_size_of_hash_set!(hashglobe::fake::HashSet<T, S>);
+
+macro_rules! malloc_size_of_hash_map {
+ ($ty:ty) => {
+ impl<K, V, S> MallocShallowSizeOf for $ty
+ where
+ K: Eq + Hash,
+ S: BuildHasher,
+ {
+ fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ // See the implementation for std::collections::HashSet for details.
+ if ops.has_malloc_enclosing_size_of() {
+ self.values()
+ .next()
+ .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
+ } else {
+ self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
+ }
+ }
}
- n
- }
-}
-impl<T, S> MallocShallowSizeOf for hashglobe::fake::HashSet<T, S>
-where
- T: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- use std::ops::Deref;
- self.deref().shallow_size_of(ops)
- }
-}
-
-impl<T, S> MallocSizeOf for hashglobe::fake::HashSet<T, S>
-where
- T: Eq + Hash + MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- use std::ops::Deref;
- self.deref().size_of(ops)
- }
-}
-
-impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S>
-where
- K: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- // See the implementation for std::collections::HashSet for details.
- if ops.has_malloc_enclosing_size_of() {
- self.values()
- .next()
- .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
- } else {
- self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
+ impl<K, V, S> MallocSizeOf for $ty
+ where
+ K: Eq + Hash + MallocSizeOf,
+ V: MallocSizeOf,
+ S: BuildHasher,
+ {
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ let mut n = self.shallow_size_of(ops);
+ for (k, v) in self.iter() {
+ n += k.size_of(ops);
+ n += v.size_of(ops);
+ }
+ n
+ }
}
}
}
-impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
-where
- K: Eq + Hash + MallocSizeOf,
- V: MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- let mut n = self.shallow_size_of(ops);
- for (k, v) in self.iter() {
- n += k.size_of(ops);
- n += v.size_of(ops);
- }
- n
- }
-}
+malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
+malloc_size_of_hash_map!(hashglobe::hash_map::HashMap<K, V, S>);
+malloc_size_of_hash_map!(hashglobe::fake::HashMap<K, V, S>);
impl<K, V> MallocShallowSizeOf for std::collections::BTreeMap<K, V>
where
@@ -587,62 +550,6 @@ where
}
}
-impl<K, V, S> MallocShallowSizeOf for hashglobe::hash_map::HashMap<K, V, S>
-where
- K: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- // See the implementation for std::collections::HashSet for details.
- if ops.has_malloc_enclosing_size_of() {
- self.values()
- .next()
- .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
- } else {
- self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
- }
- }
-}
-
-impl<K, V, S> MallocSizeOf for hashglobe::hash_map::HashMap<K, V, S>
-where
- K: Eq + Hash + MallocSizeOf,
- V: MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- let mut n = self.shallow_size_of(ops);
- for (k, v) in self.iter() {
- n += k.size_of(ops);
- n += v.size_of(ops);
- }
- n
- }
-}
-
-impl<K, V, S> MallocShallowSizeOf for hashglobe::fake::HashMap<K, V, S>
-where
- K: Eq + Hash,
- S: BuildHasher,
-{
- fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- use std::ops::Deref;
- self.deref().shallow_size_of(ops)
- }
-}
-
-impl<K, V, S> MallocSizeOf for hashglobe::fake::HashMap<K, V, S>
-where
- K: Eq + Hash + MallocSizeOf,
- V: MallocSizeOf,
- S: BuildHasher,
-{
- fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- use std::ops::Deref;
- self.deref().size_of(ops)
- }
-}
-
// PhantomData is always 0.
impl<T> MallocSizeOf for std::marker::PhantomData<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {