aboutsummaryrefslogtreecommitdiffstats
path: root/components/malloc_size_of/lib.rs
diff options
context:
space:
mode:
Diffstat (limited to 'components/malloc_size_of/lib.rs')
-rw-r--r--components/malloc_size_of/lib.rs72
1 files changed, 57 insertions, 15 deletions
diff --git a/components/malloc_size_of/lib.rs b/components/malloc_size_of/lib.rs
index 5ecd29d794a..671e4b9e139 100644
--- a/components/malloc_size_of/lib.rs
+++ b/components/malloc_size_of/lib.rs
@@ -56,17 +56,19 @@
//! measured as well as the thing it points to. E.g.
//! `<Box<_> as MallocSizeOf>::size_of(field, ops)`.
+extern crate app_units;
+extern crate cssparser;
+extern crate euclid;
extern crate hashglobe;
extern crate servo_arc;
extern crate smallbitvec;
extern crate smallvec;
-use hashglobe::hash_map::HashMap;
+use euclid::TypedSize2D;
use servo_arc::Arc;
-use smallbitvec::SmallBitVec;
use smallvec::{Array, SmallVec};
-use std::collections::HashSet;
use std::hash::{BuildHasher, Hash};
+use std::ops::Range;
use std::os::raw::c_void;
/// A C function that takes a pointer to a heap allocation and returns its size.
@@ -186,6 +188,12 @@ pub trait MallocConditionalShallowSizeOf {
fn conditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
+impl MallocSizeOf for String {
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ unsafe { ops.malloc_size_of(self.as_ptr()) }
+ }
+}
+
impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(&**self) }
@@ -263,7 +271,7 @@ impl<A> MallocSizeOf for SmallVec<A>
}
}
-impl<T, S> MallocShallowSizeOf for HashSet<T, S>
+impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
where T: Eq + Hash,
S: BuildHasher
{
@@ -276,7 +284,7 @@ impl<T, S> MallocShallowSizeOf for HashSet<T, S>
}
}
-impl<T, S> MallocSizeOf for HashSet<T, S>
+impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
where T: Eq + Hash + MallocSizeOf,
S: BuildHasher,
{
@@ -289,20 +297,40 @@ impl<T, S> MallocSizeOf for HashSet<T, S>
}
}
-impl<K, V, S> MallocShallowSizeOf for HashMap<K, V, S>
+impl<T, S> MallocShallowSizeOf for hashglobe::hash_set::HashSet<T, S>
+ where T: Eq + Hash,
+ S: BuildHasher
+{
+ fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ // See the implementation for std::collections::HashSet for details.
+ self.iter().next().map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
+ }
+}
+
+impl<T, S> MallocSizeOf for hashglobe::hash_set::HashSet<T, S>
+ where T: Eq + Hash + MallocSizeOf,
+ S: BuildHasher,
+{
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ let mut n = self.shallow_size_of(ops);
+ for t in self.iter() {
+ n += t.size_of(ops);
+ }
+ n
+ }
+}
+
+impl<K, V, S> MallocShallowSizeOf for hashglobe::hash_map::HashMap<K, V, S>
where K: Eq + Hash,
S: BuildHasher
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
- // The first value from the iterator gives us an interior pointer.
- // `ops.malloc_enclosing_size_of()` then gives us the storage size.
- // This assumes that the `HashMap`'s contents (keys, values, and
- // hashes) are all stored in a single contiguous heap allocation.
+ // See the implementation for std::collections::HashSet for details.
self.values().next().map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
}
}
-impl<K, V, S> MallocSizeOf for HashMap<K, V, S>
+impl<K, V, S> MallocSizeOf for hashglobe::hash_map::HashMap<K, V, S>
where K: Eq + Hash + MallocSizeOf,
V: MallocSizeOf,
S: BuildHasher,
@@ -356,6 +384,14 @@ impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
}
}
+impl<T: MallocSizeOf, U> MallocSizeOf for TypedSize2D<T, U> {
+ fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
+ let n = self.width.size_of(ops) + self.width.size_of(ops);
+ assert!(n == 0); // It would be very strange to have a non-zero value here...
+ n
+ }
+}
+
/// For use on types where size_of() returns 0.
#[macro_export]
macro_rules! size_of_is_0(
@@ -381,12 +417,18 @@ macro_rules! size_of_is_0(
);
);
-size_of_is_0!(char, str);
+size_of_is_0!(bool, char, str);
size_of_is_0!(u8, u16, u32, u64, usize);
size_of_is_0!(i8, i16, i32, i64, isize);
-size_of_is_0!(bool, f32, f64);
+size_of_is_0!(f32, f64);
+
+size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
+size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
+size_of_is_0!(Range<f32>, Range<f64>);
+
+size_of_is_0!(app_units::Au);
+size_of_is_0!(cssparser::RGBA, cssparser::TokenSerializationType);
// XXX: once we upgrade smallbitvec to 1.0.4, use the new heap_ptr() method to
// implement this properly
-size_of_is_0!(SmallBitVec);
-
+size_of_is_0!(smallbitvec::SmallBitVec);