diff options
-rw-r--r-- | components/hashglobe/src/diagnostic.rs | 243 | ||||
-rw-r--r-- | components/hashglobe/src/fake.rs | 14 | ||||
-rw-r--r-- | components/hashglobe/src/hash_map.rs | 27 | ||||
-rw-r--r-- | components/hashglobe/src/lib.rs | 1 | ||||
-rw-r--r-- | components/hashglobe/src/table.rs | 148 | ||||
-rw-r--r-- | components/malloc_size_of/lib.rs | 19 | ||||
-rw-r--r-- | components/style/context.rs | 6 | ||||
-rw-r--r-- | components/style/custom_properties.rs | 25 | ||||
-rw-r--r-- | components/style/hash.rs | 7 | ||||
-rw-r--r-- | components/style/invalidation/element/invalidation_map.rs | 34 | ||||
-rw-r--r-- | components/style/selector_map.rs | 74 | ||||
-rw-r--r-- | components/style/selector_parser.rs | 9 | ||||
-rw-r--r-- | components/style/stylist.rs | 40 |
13 files changed, 99 insertions, 548 deletions
diff --git a/components/hashglobe/src/diagnostic.rs b/components/hashglobe/src/diagnostic.rs deleted file mode 100644 index cee5e11d4b1..00000000000 --- a/components/hashglobe/src/diagnostic.rs +++ /dev/null @@ -1,243 +0,0 @@ -use hash_map::HashMap; -use std::borrow::Borrow; -use std::hash::{BuildHasher, Hash}; -use std::ptr; - -use FailedAllocationError; - -#[cfg(target_pointer_width = "32")] -const CANARY: usize = 0x42cafe99; -#[cfg(target_pointer_width = "64")] -const CANARY: usize = 0x42cafe9942cafe99; - -#[cfg(target_pointer_width = "32")] -const POISON: usize = 0xdeadbeef; -#[cfg(target_pointer_width = "64")] -const POISON: usize = 0xdeadbeefdeadbeef; - -#[derive(Clone, Debug)] -enum JournalEntry { - Insert(usize), - GOIW(usize), - Remove(usize), - DidClear(usize), -} - -#[derive(Clone, Debug)] -pub struct DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - S: BuildHasher -{ - map: HashMap<K, (usize, V), S>, - journal: Vec<JournalEntry>, - readonly: bool, -} - -impl<K: Hash + Eq, V, S: BuildHasher> DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - S: BuildHasher -{ - #[inline(always)] - pub fn inner(&self) -> &HashMap<K, (usize, V), S> { - &self.map - } - - #[inline(never)] - pub fn begin_mutation(&mut self) { - self.map.verify(); - assert!(self.readonly); - self.readonly = false; - self.verify(); - } - - #[inline(never)] - pub fn end_mutation(&mut self) { - self.map.verify(); - assert!(!self.readonly); - self.readonly = true; - self.verify(); - } - - fn verify(&self) { - let mut position = 0; - let mut count = 0; - let mut bad_canary = None; - - let mut iter = self.map.iter(); - while let Some((h, _, v)) = iter.next_with_hash() { - let canary_ref = &v.0; - position += 1; - - if *canary_ref == CANARY { - continue; - } - - count += 1; - bad_canary = Some((h, *canary_ref, canary_ref, position)); - } - if let Some(c) = bad_canary { - self.report_corruption(c.0, c.1, c.2, c.3, count); - } - } - - #[inline(always)] - pub fn with_hasher(hash_builder: S) -> Self { - Self { - map: HashMap::<K, (usize, V), S>::with_hasher(hash_builder), - journal: Vec::new(), - readonly: true, - } - } - - #[inline(always)] - pub fn len(&self) -> usize { - self.map.len() - } - - #[inline(always)] - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - #[inline(always)] - pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool - where K: Borrow<Q>, - Q: Hash + Eq - { - self.map.contains_key(k) - } - - #[inline(always)] - pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V> - where K: Borrow<Q>, - Q: Hash + Eq - { - self.map.get(k).map(|v| &v.1) - } - - #[inline(always)] - pub fn try_get_or_insert_with<F: FnOnce() -> V>( - &mut self, - key: K, - default: F - ) -> Result<&mut V, FailedAllocationError> { - assert!(!self.readonly); - self.journal.push(JournalEntry::GOIW(self.map.make_hash(&key).inspect())); - let entry = self.map.try_entry(key)?; - Ok(&mut entry.or_insert_with(|| (CANARY, default())).1) - } - - #[inline(always)] - pub fn try_insert(&mut self, k: K, v: V) -> Result<Option<V>, FailedAllocationError> { - assert!(!self.readonly); - self.journal.push(JournalEntry::Insert(self.map.make_hash(&k).inspect())); - let old = self.map.try_insert(k, (CANARY, v))?; - Ok(old.map(|x| x.1)) - } - - #[inline(always)] - pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V> - where K: Borrow<Q>, - Q: Hash + Eq - { - assert!(!self.readonly); - self.journal.push(JournalEntry::Remove(self.map.make_hash(k).inspect())); - if let Some(v) = self.map.get_mut(k) { - unsafe { ptr::write_volatile(&mut v.0, POISON); } - } - self.map.remove(k).map(|x| x.1) - } - - #[inline(always)] - pub fn clear(&mut self) where K: 'static, V: 'static { - // We handle scoped mutations for the caller here, since callsites that - // invoke clear() don't benefit from the coalescing we do around insertion. - self.begin_mutation(); - self.journal.clear(); - self.journal.push(JournalEntry::DidClear(self.map.raw_capacity())); - self.map.clear(); - self.end_mutation(); - } - - #[inline(never)] - fn report_corruption( - &self, - hash: usize, - canary: usize, - canary_addr: *const usize, - position: usize, - count: usize - ) { - use ::std::ffi::CString; - let key = b"HashMapJournal\0"; - let value = CString::new(format!("{:?}", self.journal)).unwrap(); - unsafe { - Gecko_AnnotateCrashReport( - key.as_ptr() as *const ::std::os::raw::c_char, - value.as_ptr(), - ); - } - - panic!( - concat!("HashMap Corruption (sz={}, buffer_hash_sz={}, cap={}, pairsz={}, hash={:#x}, cnry={:#x}, ", - "count={}, last_pos={}, base_addr={:?}, cnry_addr={:?}, jrnl_len={})"), - self.map.len(), - self.map.diagnostic_count_hashes(), - self.map.raw_capacity(), - ::std::mem::size_of::<(K, (usize, V))>(), - hash, - canary, - count, - position, - self.map.raw_buffer(), - canary_addr, - self.journal.len(), - ); - } -} - -impl<K, V, S> PartialEq for DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - V: PartialEq, - S: BuildHasher -{ - fn eq(&self, other: &Self) -> bool { - self.map.eq(&other.map) - } -} - -impl<K, V, S> Eq for DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - V: Eq, - S: BuildHasher -{ -} - -impl<K, V, S> Default for DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - S: BuildHasher + Default -{ - fn default() -> Self { - Self { - map: HashMap::default(), - journal: Vec::new(), - readonly: true, - } - } -} - -impl<K: Hash + Eq, V, S: BuildHasher> Drop for DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - S: BuildHasher -{ - fn drop(&mut self) { - self.map.verify(); - debug_assert!(self.readonly, "Dropped while mutating"); - self.verify(); - } -} - -extern "C" { - pub fn Gecko_AnnotateCrashReport(key_str: *const ::std::os::raw::c_char, - value_str: *const ::std::os::raw::c_char); -} diff --git a/components/hashglobe/src/fake.rs b/components/hashglobe/src/fake.rs index 1d29296f000..eba21e04148 100644 --- a/components/hashglobe/src/fake.rs +++ b/components/hashglobe/src/fake.rs @@ -76,24 +76,10 @@ impl<K, V, S> HashMap<K, V, S> Ok(self.entry(key)) } - #[inline(always)] - pub fn try_get_or_insert_with<F: FnOnce() -> V>( - &mut self, - key: K, - default: F - ) -> Result<&mut V, FailedAllocationError> { - Ok(self.entry(key).or_insert_with(default)) - } - #[inline] pub fn try_insert(&mut self, k: K, v: V) -> Result<Option<V>, FailedAllocationError> { Ok(self.insert(k, v)) } - - #[inline(always)] - pub fn begin_mutation(&mut self) {} - #[inline(always)] - pub fn end_mutation(&mut self) {} } #[derive(Clone)] diff --git a/components/hashglobe/src/hash_map.rs b/components/hashglobe/src/hash_map.rs index 4c1d73fc0de..5b3f7b7c02b 100644 --- a/components/hashglobe/src/hash_map.rs +++ b/components/hashglobe/src/hash_map.rs @@ -534,7 +534,7 @@ impl<K, V, S> HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { - pub fn make_hash<X: ?Sized>(&self, x: &X) -> SafeHash + fn make_hash<X: ?Sized>(&self, x: &X) -> SafeHash where X: Hash { table::make_hash(&self.hash_builder, x) @@ -683,23 +683,10 @@ impl<K, V, S> HashMap<K, V, S> /// Returns the hash map's raw capacity. #[inline] - pub fn raw_capacity(&self) -> usize { + fn raw_capacity(&self) -> usize { self.table.capacity() } - /// Returns a raw pointer to the table's buffer. - #[inline] - pub fn raw_buffer(&self) -> *const u8 { - assert!(self.len() != 0); - self.table.raw_buffer() - } - - /// Verify that the table metadata is internally consistent. - #[inline] - pub fn verify(&self) { - self.table.verify(); - } - /// Reserves capacity for at least `additional` more elements to be inserted /// in the `HashMap`. The collection may reserve more space to avoid /// frequent reallocations. @@ -1257,10 +1244,6 @@ impl<K, V, S> HashMap<K, V, S> debug_assert!(elems_left == 0 || bucket.index() != start_index); } } - - pub fn diagnostic_count_hashes(&self) -> usize { - self.table.diagnostic_count_hashes() - } } impl<K, V, S> PartialEq for HashMap<K, V, S> @@ -1343,12 +1326,6 @@ impl<'a, K: Debug, V: Debug> fmt::Debug for Iter<'a, K, V> { } } -impl<'a, K: 'a, V: 'a> Iter<'a, K, V> { - pub fn next_with_hash(&mut self) -> Option<(usize, &'a K, &'a V)> { - self.inner.next_with_hash() - } -} - /// A mutable iterator over the entries of a `HashMap`. /// /// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its diff --git a/components/hashglobe/src/lib.rs b/components/hashglobe/src/lib.rs index 254cb2b943d..83012473b9e 100644 --- a/components/hashglobe/src/lib.rs +++ b/components/hashglobe/src/lib.rs @@ -9,7 +9,6 @@ // except according to those terms. pub mod alloc; -pub mod diagnostic; pub mod hash_map; pub mod hash_set; mod shim; diff --git a/components/hashglobe/src/table.rs b/components/hashglobe/src/table.rs index 7f3f251f010..45dcb2673fa 100644 --- a/components/hashglobe/src/table.rs +++ b/components/hashglobe/src/table.rs @@ -43,7 +43,7 @@ struct TaggedHashUintPtr(Unique<HashUint>); impl TaggedHashUintPtr { #[inline] unsafe fn new(ptr: *mut HashUint) -> Self { - assert!(ptr as usize & 1 == 0 || ptr as usize == EMPTY as usize); + debug_assert!(ptr as usize & 1 == 0 || ptr as usize == EMPTY as usize); TaggedHashUintPtr(Unique::new_unchecked(ptr)) } @@ -117,7 +117,6 @@ pub struct RawTable<K, V> { capacity_mask: usize, size: usize, hashes: TaggedHashUintPtr, - bytes_allocated: usize, // Because K/V do not appear directly in any of the types in the struct, // inform rustc that in fact instances of K and V are reachable from here. @@ -183,7 +182,7 @@ pub struct GapThenFull<K, V, M> { /// A hash that is not zero, since we use a hash of zero to represent empty /// buckets. -#[derive(PartialEq, Copy, Clone, Debug)] +#[derive(PartialEq, Copy, Clone)] pub struct SafeHash { hash: HashUint, } @@ -246,23 +245,6 @@ impl<K, V> RawBucket<K, V> { unsafe fn hash_pair(&self) -> (*mut HashUint, *mut (K, V)) { (self.hash(), self.pair()) } - - fn assert_bounds(&self, bytes_allocated: usize, size: Option<usize>) { - let base = self.hash_start as *mut u8; - let (h, p) = unsafe { self.hash_pair() }; - assert!((h as *mut u8) < (p as *mut u8), "HashMap Corruption - hash offset not below pair offset"); - let end = unsafe { p.offset(1) } as *mut u8; - assert!(end > base, "HashMap Corruption - end={:?}, base={:?}, idx={}, alloc={}, size={:?}", end, base, self.idx, bytes_allocated, size); - assert!( - end <= unsafe { base.offset(bytes_allocated as isize) }, - "HashMap Corruption - end={:?}, base={:?}, idx={}, alloc={}, size={:?}", - end, - base, - self.idx, - bytes_allocated, - size, - ); - } } // Buckets hold references to the table. @@ -366,7 +348,8 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> { pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> { // if capacity is 0, then the RawBucket will be populated with bogus pointers. // This is an uncommon case though, so avoid it in release builds. - assert!(table.capacity() > 0, "HashMap Corruption - Table should have capacity at this point"); + debug_assert!(table.capacity() > 0, + "Table should have capacity at this point"); let ib_index = ib_index & table.capacity_mask; Bucket { raw: table.raw_bucket_at(ib_index), @@ -439,13 +422,11 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> { /// Modifies the bucket in place to make it point to the next slot. pub fn next(&mut self) { self.raw.idx = self.raw.idx.wrapping_add(1) & self.table.capacity_mask; - self.raw.assert_bounds(self.table.bytes_allocated, None); } /// Modifies the bucket in place to make it point to the previous slot. pub fn prev(&mut self) { self.raw.idx = self.raw.idx.wrapping_sub(1) & self.table.capacity_mask; - self.raw.assert_bounds(self.table.bytes_allocated, None); } } @@ -568,7 +549,7 @@ impl<'t, K, V> FullBucket<K, V, &'t mut RawTable<K, V>> { /// This works similarly to `put`, building an `EmptyBucket` out of the /// taken bucket. pub fn take(self) -> (EmptyBucket<K, V, &'t mut RawTable<K, V>>, K, V) { - self.table.size = self.table.size.checked_sub(1).unwrap(); + self.table.size -= 1; unsafe { *self.raw.hash() = EMPTY_BUCKET; @@ -683,7 +664,7 @@ impl<K, V, M> GapThenFull<K, V, M> /// Panics if `target_alignment` is not a power of two. #[inline] fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize { - assert!(target_alignment.is_power_of_two(), "HashMap Corruption - alignment not power of two"); + assert!(target_alignment.is_power_of_two()); (unrounded + target_alignment - 1) & !(target_alignment - 1) } @@ -753,11 +734,9 @@ impl<K, V> RawTable<K, V> { size: 0, capacity_mask: capacity.wrapping_sub(1), hashes: TaggedHashUintPtr::new(EMPTY as *mut HashUint), - bytes_allocated: 0, marker: marker::PhantomData, }); } - assert!(capacity.is_power_of_two(), "HashMap Corruption - capacity not power of two"); // No need for `checked_mul` before a more restrictive check performed // later in this method. @@ -798,72 +777,40 @@ impl<K, V> RawTable<K, V> { // FORK NOTE: Uses alloc shim instead of Heap.alloc - let buffer: *mut u8 = alloc(size, alignment); + let buffer = alloc(size, alignment); if buffer.is_null() { return Err(FailedAllocationError { reason: "out of memory when allocating RawTable" }); } - // FORK NOTE: poison the entire buffer rather than leaving it uninitialized. - ptr::write_bytes(buffer, 0xe7, size); - let hashes = buffer.offset(hash_offset as isize) as *mut HashUint; - assert!(hashes as *mut u8 == buffer, "HashMap Corruption - Nonzero hash_offset"); Ok(RawTable { capacity_mask: capacity.wrapping_sub(1), size: 0, hashes: TaggedHashUintPtr::new(hashes), - bytes_allocated: size, marker: marker::PhantomData, }) } fn raw_bucket_at(&self, index: usize) -> RawBucket<K, V> { - self.verify(); let hashes_size = self.capacity() * size_of::<HashUint>(); let pairs_size = self.capacity() * size_of::<(K, V)>(); let (pairs_offset, _, oflo) = calculate_offsets(hashes_size, pairs_size, align_of::<(K, V)>()); - assert!(!oflo, "HashMap Corruption - capacity overflow"); - assert!(pairs_offset as isize > 0, "HashMap Corruption - pairs offset={}", pairs_offset); - assert!(index as isize >= 0, "HashMap Corruption - index={}", index); - assert!(index < self.capacity(), "HashMap Corruption - index={}", index); + debug_assert!(!oflo, "capacity overflow"); let buffer = self.hashes.ptr() as *mut u8; - let bucket = unsafe { + unsafe { RawBucket { hash_start: buffer as *mut HashUint, pair_start: buffer.offset(pairs_offset as isize) as *const (K, V), idx: index, _marker: marker::PhantomData, } - }; - - bucket.assert_bounds(self.bytes_allocated, Some(self.size)); - bucket - } - - /// Returns a raw pointer to the table's buffer. - #[inline] - pub fn raw_buffer(&self) -> *const u8 { - self.hashes.ptr() as *const u8 - } - - /// Verify that the table metadata is internally consistent. - #[inline] - pub fn verify(&self) { - assert!( - self.capacity() == 0 || self.capacity().is_power_of_two(), - "HashMap Corruption: mask={}, sz={}, alloc={}", self.capacity_mask, self.size, self.bytes_allocated, - ); - assert_eq!( - self.capacity() * (size_of::<usize>() + size_of::<(K, V)>()), - self.bytes_allocated, - "HashMap Corruption: mask={}, sz={}, alloc={}", self.capacity_mask, self.size, self.bytes_allocated, - ); + } } /// Creates a new raw table from a given capacity. All buckets are @@ -889,17 +836,12 @@ impl<K, V> RawTable<K, V> { fn raw_buckets(&self) -> RawBuckets<K, V> { RawBuckets { - raw: if self.capacity() == 0 { None } else { Some(self.raw_bucket_at(0)) }, + raw: self.raw_bucket_at(0), elems_left: self.size, - bytes_allocated: self.bytes_allocated, marker: marker::PhantomData, } } - pub fn diagnostic_count_hashes(&self) -> usize { - (0..self.capacity()).filter(|&i| unsafe { *self.raw_bucket_at(i).hash() != EMPTY_BUCKET }).count() - } - pub fn iter(&self) -> Iter<K, V> { Iter { iter: self.raw_buckets(), @@ -914,13 +856,12 @@ impl<K, V> RawTable<K, V> { } pub fn into_iter(self) -> IntoIter<K, V> { - let RawBuckets { raw, elems_left, bytes_allocated, .. } = self.raw_buckets(); + let RawBuckets { raw, elems_left, .. } = self.raw_buckets(); // Replace the marker regardless of lifetime bounds on parameters. IntoIter { iter: RawBuckets { raw, elems_left, - bytes_allocated, marker: marker::PhantomData, }, table: self, @@ -928,13 +869,12 @@ impl<K, V> RawTable<K, V> { } pub fn drain(&mut self) -> Drain<K, V> { - let RawBuckets { raw, elems_left, bytes_allocated, .. } = self.raw_buckets(); + let RawBuckets { raw, elems_left, .. } = self.raw_buckets(); // Replace the marker regardless of lifetime bounds on parameters. Drain { iter: RawBuckets { raw, elems_left, - bytes_allocated, marker: marker::PhantomData, }, table: Shared::from(self), @@ -946,21 +886,17 @@ impl<K, V> RawTable<K, V> { /// state and should only be used for dropping the table's remaining /// entries. It's used in the implementation of Drop. unsafe fn rev_drop_buckets(&mut self) { + // initialize the raw bucket past the end of the table + let mut raw = self.raw_bucket_at(self.capacity()); let mut elems_left = self.size; - if elems_left == 0 { - return; - } - let mut raw = self.raw_bucket_at(self.capacity() - 1); - loop { + + while elems_left != 0 { + raw.idx -= 1; + if *raw.hash() != EMPTY_BUCKET { + elems_left -= 1; ptr::drop_in_place(raw.pair()); - elems_left = elems_left.checked_sub(1).unwrap(); - if elems_left == 0 { - return; - } } - raw.idx = raw.idx.checked_sub(1).unwrap(); - raw.assert_bounds(self.bytes_allocated, Some(self.size)); } } @@ -978,11 +914,8 @@ impl<K, V> RawTable<K, V> { /// A raw iterator. The basis for some other iterators in this module. Although /// this interface is safe, it's not used outside this module. struct RawBuckets<'a, K, V> { - // We use an Option here to avoid ever constructing a RawBucket for - // invalid memory. - raw: Option<RawBucket<K, V>>, + raw: RawBucket<K, V>, elems_left: usize, - bytes_allocated: usize, // Strictly speaking, this should be &'a (K,V), but that would // require that K:'a, and we often use RawBuckets<'static...> for @@ -998,7 +931,6 @@ impl<'a, K, V> Clone for RawBuckets<'a, K, V> { RawBuckets { raw: self.raw, elems_left: self.elems_left, - bytes_allocated: self.bytes_allocated, marker: marker::PhantomData, } } @@ -1015,17 +947,12 @@ impl<'a, K, V> Iterator for RawBuckets<'a, K, V> { loop { unsafe { - let item = self.raw.unwrap(); + let item = self.raw; + self.raw.idx += 1; if *item.hash() != EMPTY_BUCKET { - self.elems_left = self.elems_left.checked_sub(1).unwrap(); - if self.elems_left != 0 { - self.raw.as_mut().unwrap().idx += 1; - self.raw.as_ref().unwrap().assert_bounds(self.bytes_allocated, None); - } + self.elems_left -= 1; return Some(item); } - self.raw.as_mut().unwrap().idx += 1; - self.raw.as_ref().unwrap().assert_bounds(self.bytes_allocated, None); } } } @@ -1134,15 +1061,6 @@ impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { } } -impl<'a, K, V> Iter<'a, K, V> { - pub fn next_with_hash(&mut self) -> Option<(usize, &'a K, &'a V)> { - self.iter.next().map(|raw| unsafe { - let (hash_ptr, pair_ptr) = raw.hash_pair(); - (*hash_ptr, &(*pair_ptr).0, &(*pair_ptr).1) - }) - } -} - impl<'a, K, V> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); @@ -1169,7 +1087,7 @@ impl<K, V> Iterator for IntoIter<K, V> { fn next(&mut self) -> Option<(SafeHash, K, V)> { self.iter.next().map(|raw| { - self.table.size = self.table.size.checked_sub(1).unwrap(); + self.table.size -= 1; unsafe { let (k, v) = ptr::read(raw.pair()); (SafeHash { hash: *raw.hash() }, k, v) @@ -1195,7 +1113,7 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> { fn next(&mut self) -> Option<(SafeHash, K, V)> { self.iter.next().map(|raw| { unsafe { - self.table.as_mut().size = self.table.as_mut().size.checked_sub(1).unwrap(); + self.table.as_mut().size -= 1; let (k, v) = ptr::read(raw.pair()); (SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v) } @@ -1224,28 +1142,18 @@ impl<K: Clone, V: Clone> Clone for RawTable<K, V> { unsafe { let cap = self.capacity(); let mut new_ht = RawTable::new_uninitialized(cap); - if cap == 0 { - return new_ht; - } let mut new_buckets = new_ht.raw_bucket_at(0); let mut buckets = self.raw_bucket_at(0); - loop { + while buckets.idx < cap { *new_buckets.hash() = *buckets.hash(); if *new_buckets.hash() != EMPTY_BUCKET { let pair_ptr = buckets.pair(); let kv = ((*pair_ptr).0.clone(), (*pair_ptr).1.clone()); ptr::write(new_buckets.pair(), kv); } - - if buckets.idx == cap - 1 { - break; - } - buckets.idx += 1; - buckets.assert_bounds(self.bytes_allocated, None); new_buckets.idx += 1; - new_buckets.assert_bounds(new_ht.bytes_allocated, None); } new_ht.size = self.size(); @@ -1284,7 +1192,7 @@ impl<K, V> Drop for RawTable<K, V> { pairs_size, align_of::<(K, V)>()); - assert!(!oflo, "HashMap Corruption - should be impossible"); + debug_assert!(!oflo, "should be impossible"); unsafe { dealloc(self.hashes.ptr() as *mut u8, align); diff --git a/components/malloc_size_of/lib.rs b/components/malloc_size_of/lib.rs index 399f1180637..babb393cc9a 100644 --- a/components/malloc_size_of/lib.rs +++ b/components/malloc_size_of/lib.rs @@ -499,25 +499,6 @@ impl<K, V, S> MallocSizeOf for hashglobe::hash_map::HashMap<K, V, S> } } -impl<K, V, S> MallocShallowSizeOf for hashglobe::diagnostic::DiagnosticHashMap<K, V, S> - where K: Eq + Hash, - S: BuildHasher -{ - fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.inner().shallow_size_of(ops) - } -} - -impl<K, V, S> MallocSizeOf for hashglobe::diagnostic::DiagnosticHashMap<K, V, S> - where K: Eq + Hash + MallocSizeOf, - V: MallocSizeOf, - S: BuildHasher, -{ - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.inner().size_of(ops) - } -} - impl<K, V, S> MallocShallowSizeOf for hashglobe::fake::HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher, diff --git a/components/style/context.rs b/components/style/context.rs index a7fe03ad70c..5ed3f06c6bc 100644 --- a/components/style/context.rs +++ b/components/style/context.rs @@ -322,6 +322,8 @@ pub struct TraversalStatistics { pub selectors: u32, /// The number of revalidation selectors. pub revalidation_selectors: u32, + /// The number of state/attr dependencies in the dependency set. + pub dependency_selectors: u32, /// The number of declarations in the stylist. pub declarations: u32, /// The number of times the stylist was rebuilt. @@ -342,6 +344,7 @@ impl<'a> ops::Add for &'a TraversalStatistics { "traversal_time_ms should be set at the end by the caller"); debug_assert!(self.selectors == 0, "set at the end"); debug_assert!(self.revalidation_selectors == 0, "set at the end"); + debug_assert!(self.dependency_selectors == 0, "set at the end"); debug_assert!(self.declarations == 0, "set at the end"); debug_assert!(self.stylist_rebuilds == 0, "set at the end"); TraversalStatistics { @@ -352,6 +355,7 @@ impl<'a> ops::Add for &'a TraversalStatistics { styles_reused: self.styles_reused + other.styles_reused, selectors: 0, revalidation_selectors: 0, + dependency_selectors: 0, declarations: 0, stylist_rebuilds: 0, traversal_time_ms: 0.0, @@ -379,6 +383,7 @@ impl fmt::Display for TraversalStatistics { writeln!(f, "[PERF],styles_reused,{}", self.styles_reused)?; writeln!(f, "[PERF],selectors,{}", self.selectors)?; writeln!(f, "[PERF],revalidation_selectors,{}", self.revalidation_selectors)?; + writeln!(f, "[PERF],dependency_selectors,{}", self.dependency_selectors)?; writeln!(f, "[PERF],declarations,{}", self.declarations)?; writeln!(f, "[PERF],stylist_rebuilds,{}", self.stylist_rebuilds)?; writeln!(f, "[PERF],traversal_time_ms,{}", self.traversal_time_ms)?; @@ -400,6 +405,7 @@ impl TraversalStatistics { self.traversal_time_ms = (time::precise_time_s() - start) * 1000.0; self.selectors = stylist.num_selectors() as u32; self.revalidation_selectors = stylist.num_revalidation_selectors() as u32; + self.dependency_selectors = stylist.num_invalidations() as u32; self.declarations = stylist.num_declarations() as u32; self.stylist_rebuilds = stylist.num_rebuilds() as u32; } diff --git a/components/style/custom_properties.rs b/components/style/custom_properties.rs index 90446ec02e7..05967a8f922 100644 --- a/components/style/custom_properties.rs +++ b/components/style/custom_properties.rs @@ -10,7 +10,7 @@ use Atom; use cssparser::{Delimiter, Parser, ParserInput, SourcePosition, Token, TokenSerializationType}; use precomputed_hash::PrecomputedHash; use properties::{CSSWideKeyword, DeclaredValue}; -use selector_map::{PrecomputedHashSet, PrecomputedHashMap, PrecomputedDiagnosticHashMap}; +use selector_map::{PrecomputedHashSet, PrecomputedHashMap}; use selectors::parser::SelectorParseErrorKind; use servo_arc::Arc; use smallvec::SmallVec; @@ -88,7 +88,7 @@ where /// Key index. index: Vec<K>, /// Key-value map. - values: PrecomputedDiagnosticHashMap<K, V>, + values: PrecomputedHashMap<K, V>, } impl<K, V> OrderedMap<K, V> @@ -99,7 +99,7 @@ where pub fn new() -> Self { OrderedMap { index: Vec::new(), - values: PrecomputedDiagnosticHashMap::default(), + values: PrecomputedHashMap::default(), } } @@ -108,9 +108,7 @@ where if !self.values.contains_key(&key) { self.index.push(key.clone()); } - self.values.begin_mutation(); - self.values.try_insert(key, value).unwrap(); - self.values.end_mutation(); + self.values.insert(key, value); } /// Get a value given its key. @@ -160,10 +158,7 @@ where None => return None, }; self.index.remove(index); - self.values.begin_mutation(); - let result = self.values.remove(key); - self.values.end_mutation(); - result + self.values.remove(key) } fn remove_set<S>(&mut self, set: &::hash::HashSet<K, S>) @@ -173,13 +168,7 @@ where return; } self.index.retain(|key| !set.contains(key)); - self.values.begin_mutation(); - // XXX It may be better to use retain when we back to use a - // normal hashmap rather than DiagnosticHashMap. - for key in set.iter() { - self.values.remove(key); - } - self.values.end_mutation(); + self.values.retain(|key, _| !set.contains(key)); debug_assert_eq!(self.values.len(), self.index.len()); } } @@ -211,7 +200,7 @@ where }; self.pos += 1; - let value = &self.inner.values.get(key).unwrap(); + let value = &self.inner.values[key]; Some((key, value)) } diff --git a/components/style/hash.rs b/components/style/hash.rs index c99ca3e9e65..305357a85b2 100644 --- a/components/style/hash.rs +++ b/components/style/hash.rs @@ -13,15 +13,10 @@ use fnv; pub use hashglobe::hash_map::HashMap; #[cfg(feature = "gecko")] pub use hashglobe::hash_set::HashSet; -#[cfg(feature = "gecko")] -pub use hashglobe::diagnostic::DiagnosticHashMap; -#[cfg(feature = "servo")] -pub use hashglobe::fake::{HashMap, HashSet}; -/// Alias to use regular HashMaps everywhere in Servo. #[cfg(feature = "servo")] -pub type DiagnosticHashMap<K, V, S> = HashMap<K, V, S>; +pub use hashglobe::fake::{HashMap, HashSet}; /// Appropriate reexports of hash_map types pub mod map { diff --git a/components/style/invalidation/element/invalidation_map.rs b/components/style/invalidation/element/invalidation_map.rs index 1ee1358a91a..3f1e3480c15 100644 --- a/components/style/invalidation/element/invalidation_map.rs +++ b/components/style/invalidation/element/invalidation_map.rs @@ -172,6 +172,18 @@ impl InvalidationMap { } } + /// Returns the number of dependencies stored in the invalidation map. + pub fn len(&self) -> usize { + self.state_affecting_selectors.len() + + self.other_attribute_affecting_selectors.len() + + self.id_to_selector.iter().fold(0, |accum, (_, ref v)| { + accum + v.len() + }) + + self.class_to_selector.iter().fold(0, |accum, (_, ref v)| { + accum + v.len() + }) + } + /// Adds a selector to this `InvalidationMap`. Returns Err(..) to /// signify OOM. pub fn note_selector( @@ -234,7 +246,8 @@ impl InvalidationMap { for class in compound_visitor.classes { self.class_to_selector - .try_get_or_insert_with(class, quirks_mode, SmallVec::new)? + .entry(class, quirks_mode) + .or_insert_with(SmallVec::new) .try_push(Dependency { selector: selector.clone(), selector_offset: sequence_start, @@ -243,7 +256,8 @@ impl InvalidationMap { for id in compound_visitor.ids { self.id_to_selector - .try_get_or_insert_with(id, quirks_mode, SmallVec::new)? + .entry(id, quirks_mode) + .or_insert_with(SmallVec::new) .try_push(Dependency { selector: selector.clone(), selector_offset: sequence_start, @@ -279,22 +293,6 @@ impl InvalidationMap { Ok(()) } - - /// Allows mutation of this InvalidationMap. - pub fn begin_mutation(&mut self) { - self.class_to_selector.begin_mutation(); - self.id_to_selector.begin_mutation(); - self.state_affecting_selectors.begin_mutation(); - self.other_attribute_affecting_selectors.begin_mutation(); - } - - /// Disallows mutation of this InvalidationMap. - pub fn end_mutation(&mut self) { - self.class_to_selector.end_mutation(); - self.id_to_selector.end_mutation(); - self.state_affecting_selectors.end_mutation(); - self.other_attribute_affecting_selectors.end_mutation(); - } } /// A struct that collects invalidations for a given compound selector. diff --git a/components/style/selector_map.rs b/components/style/selector_map.rs index 13b16fd17d7..c86053e8186 100644 --- a/components/style/selector_map.rs +++ b/components/style/selector_map.rs @@ -10,7 +10,8 @@ use applicable_declarations::ApplicableDeclarationBlock; use context::QuirksMode; use dom::TElement; use fallible::FallibleVec; -use hash::{HashMap, HashSet, DiagnosticHashMap}; +use hash::{HashMap, HashSet}; +use hash::map as hash_map; use hashglobe::FailedAllocationError; use precomputed_hash::PrecomputedHash; use rule_tree::CascadeLevel; @@ -36,9 +37,6 @@ impl Default for PrecomputedHasher { /// A simple alias for a hashmap using PrecomputedHasher. pub type PrecomputedHashMap<K, V> = HashMap<K, V, BuildHasherDefault<PrecomputedHasher>>; -/// A simple alias for a hashmap using PrecomputedHasher. -pub type PrecomputedDiagnosticHashMap<K, V> = DiagnosticHashMap<K, V, BuildHasherDefault<PrecomputedHasher>>; - /// A simple alias for a hashset using PrecomputedHasher. pub type PrecomputedHashSet<K> = HashSet<K, BuildHasherDefault<PrecomputedHasher>>; @@ -101,7 +99,7 @@ pub struct SelectorMap<T: 'static> { /// A hash from a class name to rules which contain that class selector. pub class_hash: MaybeCaseInsensitiveHashMap<Atom, SmallVec<[T; 1]>>, /// A hash from local name to rules which contain that local name selector. - pub local_name_hash: PrecomputedDiagnosticHashMap<LocalName, SmallVec<[T; 1]>>, + pub local_name_hash: PrecomputedHashMap<LocalName, SmallVec<[T; 1]>>, /// Rules that don't have ID, class, or element selectors. pub other: SmallVec<[T; 1]>, /// The number of entries in this map. @@ -117,7 +115,7 @@ impl<T: 'static> SelectorMap<T> { SelectorMap { id_hash: MaybeCaseInsensitiveHashMap::new(), class_hash: MaybeCaseInsensitiveHashMap::new(), - local_name_hash: DiagnosticHashMap::default(), + local_name_hash: HashMap::default(), other: SmallVec::new(), count: 0, } @@ -141,20 +139,6 @@ impl<T: 'static> SelectorMap<T> { pub fn len(&self) -> usize { self.count } - - /// Allows mutation of this SelectorMap. - pub fn begin_mutation(&mut self) { - self.id_hash.begin_mutation(); - self.class_hash.begin_mutation(); - self.local_name_hash.begin_mutation(); - } - - /// Disallows mutation of this SelectorMap. - pub fn end_mutation(&mut self) { - self.id_hash.end_mutation(); - self.class_hash.end_mutation(); - self.local_name_hash.end_mutation(); - } } impl SelectorMap<Rule> { @@ -264,10 +248,12 @@ impl<T: SelectorMapEntry> SelectorMap<T> { let vector = match find_bucket(entry.selector()) { Bucket::ID(id) => { - self.id_hash.try_get_or_insert_with(id.clone(), quirks_mode, SmallVec::new)? + self.id_hash.try_entry(id.clone(), quirks_mode)? + .or_insert_with(SmallVec::new) } Bucket::Class(class) => { - self.class_hash.try_get_or_insert_with(class.clone(), quirks_mode, SmallVec::new)? + self.class_hash.try_entry(class.clone(), quirks_mode)? + .or_insert_with(SmallVec::new) } Bucket::LocalName { name, lower_name } => { // If the local name in the selector isn't lowercase, insert it @@ -283,10 +269,12 @@ impl<T: SelectorMapEntry> SelectorMap<T> { // subsequent selector matching work will filter them out. if name != lower_name { self.local_name_hash - .try_get_or_insert_with(lower_name.clone(), SmallVec::new)? + .try_entry(lower_name.clone())? + .or_insert_with(SmallVec::new) .try_push(entry.clone())?; } - self.local_name_hash.try_get_or_insert_with(name.clone(), SmallVec::new)? + self.local_name_hash.try_entry(name.clone())? + .or_insert_with(SmallVec::new) } Bucket::Universal => { &mut self.other @@ -473,7 +461,7 @@ fn find_bucket<'a>(mut iter: SelectorIter<'a, SelectorImpl>) -> Bucket<'a> { /// Wrapper for PrecomputedHashMap that does ASCII-case-insensitive lookup in quirks mode. #[derive(Debug, MallocSizeOf)] -pub struct MaybeCaseInsensitiveHashMap<K: PrecomputedHash + Hash + Eq, V: 'static>(PrecomputedDiagnosticHashMap<K, V>); +pub struct MaybeCaseInsensitiveHashMap<K: PrecomputedHash + Hash + Eq, V: 'static>(PrecomputedHashMap<K, V>); // FIXME(Manishearth) the 'static bound can be removed when // our HashMap fork (hashglobe) is able to use NonZero, @@ -481,20 +469,32 @@ pub struct MaybeCaseInsensitiveHashMap<K: PrecomputedHash + Hash + Eq, V: 'stati impl<V: 'static> MaybeCaseInsensitiveHashMap<Atom, V> { /// Empty map pub fn new() -> Self { - MaybeCaseInsensitiveHashMap(PrecomputedDiagnosticHashMap::default()) + MaybeCaseInsensitiveHashMap(PrecomputedHashMap::default()) } - /// DiagnosticHashMap::try_get_or_insert_with - pub fn try_get_or_insert_with<F: FnOnce() -> V>( + /// HashMap::entry + pub fn entry(&mut self, mut key: Atom, quirks_mode: QuirksMode) -> hash_map::Entry<Atom, V> { + if quirks_mode == QuirksMode::Quirks { + key = key.to_ascii_lowercase() + } + self.0.entry(key) + } + + /// HashMap::try_entry + pub fn try_entry( &mut self, mut key: Atom, - quirks_mode: QuirksMode, - default: F, - ) -> Result<&mut V, FailedAllocationError> { + quirks_mode: QuirksMode + ) -> Result<hash_map::Entry<Atom, V>, FailedAllocationError> { if quirks_mode == QuirksMode::Quirks { key = key.to_ascii_lowercase() } - self.0.try_get_or_insert_with(key, default) + self.0.try_entry(key) + } + + /// HashMap::iter + pub fn iter(&self) -> hash_map::Iter<Atom, V> { + self.0.iter() } /// HashMap::clear @@ -510,14 +510,4 @@ impl<V: 'static> MaybeCaseInsensitiveHashMap<Atom, V> { self.0.get(key) } } - - /// DiagnosticHashMap::begin_mutation - pub fn begin_mutation(&mut self) { - self.0.begin_mutation(); - } - - /// DiagnosticHashMap::end_mutation - pub fn end_mutation(&mut self) { - self.0.end_mutation(); - } } diff --git a/components/style/selector_parser.rs b/components/style/selector_parser.rs index 18b854894f6..62d88f2bf3b 100644 --- a/components/style/selector_parser.rs +++ b/components/style/selector_parser.rs @@ -145,15 +145,6 @@ impl<T> PerPseudoElementMap<T> { *self = Self::default(); } - /// Invokes a callback on each non-None entry. - pub fn for_each<F: FnMut(&mut T)>(&mut self, mut f: F) { - for entry in self.entries.iter_mut() { - if entry.is_some() { - f(entry.as_mut().unwrap()); - } - } - } - /// Set an entry value. /// /// Returns an error if the element is not a simple pseudo. diff --git a/components/style/stylist.rs b/components/style/stylist.rs index 2bf74a6f765..f819d7f7da1 100644 --- a/components/style/stylist.rs +++ b/components/style/stylist.rs @@ -479,6 +479,12 @@ impl Stylist { .map(|(d, _)| d.selectors_for_cache_revalidation.len()).sum() } + /// Returns the number of entries in invalidation maps. + pub fn num_invalidations(&self) -> usize { + self.cascade_data.iter_origins() + .map(|(d, _)| d.invalidation_map.len()).sum() + } + /// Invokes `f` with the `InvalidationMap` for each origin. /// /// NOTE(heycam) This might be better as an `iter_invalidation_maps`, once @@ -1906,32 +1912,6 @@ impl CascadeData { } } - #[cfg(feature = "gecko")] - fn begin_mutation(&mut self, rebuild_kind: &SheetRebuildKind) { - self.element_map.begin_mutation(); - self.pseudos_map.for_each(|m| m.begin_mutation()); - if rebuild_kind.should_rebuild_invalidation() { - self.invalidation_map.begin_mutation(); - self.selectors_for_cache_revalidation.begin_mutation(); - } - } - - #[cfg(feature = "servo")] - fn begin_mutation(&mut self, _: &SheetRebuildKind) {} - - #[cfg(feature = "gecko")] - fn end_mutation(&mut self, rebuild_kind: &SheetRebuildKind) { - self.element_map.end_mutation(); - self.pseudos_map.for_each(|m| m.end_mutation()); - if rebuild_kind.should_rebuild_invalidation() { - self.invalidation_map.end_mutation(); - self.selectors_for_cache_revalidation.end_mutation(); - } - } - - #[cfg(feature = "servo")] - fn end_mutation(&mut self, _: &SheetRebuildKind) {} - /// Collects all the applicable media query results into `results`. /// /// This duplicates part of the logic in `add_stylesheet`, which is @@ -1995,7 +1975,6 @@ impl CascadeData { self.effective_media_query_results.saw_effective(stylesheet); } - self.begin_mutation(&rebuild_kind); for rule in stylesheet.effective_rules(device, guard) { match *rule { CssRule::Style(ref locked) => { @@ -2026,11 +2005,7 @@ impl CascadeData { None => &mut self.element_map, Some(pseudo) => { self.pseudos_map - .get_or_insert_with(&pseudo.canonical(), || { - let mut map = Box::new(SelectorMap::new()); - map.begin_mutation(); - map - }) + .get_or_insert_with(&pseudo.canonical(), || Box::new(SelectorMap::new())) } }; @@ -2125,7 +2100,6 @@ impl CascadeData { _ => {} } } - self.end_mutation(&rebuild_kind); Ok(()) } |