aboutsummaryrefslogtreecommitdiffstats
path: root/components
diff options
context:
space:
mode:
authorJack Moffitt <jack@metajack.im>2015-03-12 16:32:49 -0600
committerJack Moffitt <jack@metajack.im>2015-03-16 12:53:16 -0600
commit237150fa49b050cdc0b8d46bb80170127503cfcb (patch)
treebc85f4f97602320fdf9727fcbc233e3eb3978ed2 /components
parent660ea05ddb3b17b0bb914cb09968cbcf7c6b1aec (diff)
downloadservo-237150fa49b050cdc0b8d46bb80170127503cfcb.tar.gz
servo-237150fa49b050cdc0b8d46bb80170127503cfcb.zip
Fix memory leak in flow tree by adding weak refs.
Cycles were being created in the flow tree since absolutely positioned descendants had pointers to their containing blocks. This adds WeakFlowRef (based on Weak<T>) and makes these backpointers weak references. This also harmonizes our custom Arc<T>, FlowRef, to be consistent with the upstream implementation. Fixes #4915.
Diffstat (limited to 'components')
-rw-r--r--components/layout/flow.rs26
-rw-r--r--components/layout/flow_ref.rs130
-rw-r--r--components/layout/layout_task.rs8
-rw-r--r--components/layout/lib.rs1
4 files changed, 147 insertions, 18 deletions
diff --git a/components/layout/flow.rs b/components/layout/flow.rs
index 1d737348c54..a49b8a18db7 100644
--- a/components/layout/flow.rs
+++ b/components/layout/flow.rs
@@ -31,7 +31,7 @@ use context::LayoutContext;
use display_list_builder::DisplayListBuildingResult;
use floats::Floats;
use flow_list::{FlowList, FlowListIterator, MutFlowListIterator};
-use flow_ref::FlowRef;
+use flow_ref::{FlowRef, WeakFlowRef};
use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo};
use incremental::{self, RECONSTRUCT_FLOW, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage};
use inline::InlineFlow;
@@ -740,7 +740,9 @@ pub struct BaseFlow {
/// NB: Must be the first element.
///
/// The necessity of this will disappear once we have dynamically-sized types.
- ref_count: AtomicUint,
+ strong_ref_count: AtomicUint,
+
+ weak_ref_count: AtomicUint,
pub restyle_damage: RestyleDamage,
@@ -887,7 +889,8 @@ impl Encodable for BaseFlow {
#[unsafe_destructor]
impl Drop for BaseFlow {
fn drop(&mut self) {
- if self.ref_count.load(Ordering::SeqCst) != 0 {
+ if self.strong_ref_count.load(Ordering::SeqCst) != 0 &&
+ self.weak_ref_count.load(Ordering::SeqCst) != 0 {
panic!("Flow destroyed before its ref count hit zero—this is unsafe!")
}
}
@@ -948,7 +951,8 @@ impl BaseFlow {
damage.remove(RECONSTRUCT_FLOW);
BaseFlow {
- ref_count: AtomicUint::new(1),
+ strong_ref_count: AtomicUint::new(1),
+ weak_ref_count: AtomicUint::new(1),
restyle_damage: damage,
children: FlowList::new(),
intrinsic_inline_sizes: IntrinsicISizes::new(),
@@ -978,8 +982,12 @@ impl BaseFlow {
self.children.iter_mut()
}
- pub unsafe fn ref_count<'a>(&'a self) -> &'a AtomicUint {
- &self.ref_count
+ pub unsafe fn strong_ref_count<'a>(&'a self) -> &'a AtomicUint {
+ &self.strong_ref_count
+ }
+
+ pub unsafe fn weak_ref_count<'a>(&'a self) -> &'a AtomicUint {
+ &self.weak_ref_count
}
pub fn debug_id(&self) -> uint {
@@ -1333,7 +1341,7 @@ impl MutableOwnedFlowUtils for FlowRef {
/// FIXME(pcwalton): I think this would be better with a borrow flag instead of `unsafe`.
pub struct ContainingBlockLink {
/// The pointer up to the containing block.
- link: Option<FlowRef>,
+ link: Option<WeakFlowRef>,
}
impl ContainingBlockLink {
@@ -1344,10 +1352,10 @@ impl ContainingBlockLink {
}
fn set(&mut self, link: FlowRef) {
- self.link = Some(link)
+ self.link = Some(link.downgrade())
}
- pub unsafe fn get<'a>(&'a mut self) -> &'a mut Option<FlowRef> {
+ pub unsafe fn get<'a>(&'a mut self) -> &'a mut Option<WeakFlowRef> {
&mut self.link
}
diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs
index 83243df2c8d..e7b23aac698 100644
--- a/components/layout/flow_ref.rs
+++ b/components/layout/flow_ref.rs
@@ -4,18 +4,21 @@
//! Reference-counted pointers to flows.
//!
-//! Eventually, with dynamically sized types in Rust, much of this code will be superfluous.
+//! Eventually, with dynamically sized types in Rust, much of this code will
+//! be superfluous. This design is largely duplicating logic of Arc<T> and
+//! Weak<T>; please see comments there for details.
#![allow(unsafe_blocks)]
use flow::Flow;
use flow;
+use alloc::heap;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::raw;
-use std::sync::atomic::Ordering;
+use std::sync::atomic::{self, Ordering};
#[unsafe_no_drop_flag]
pub struct FlowRef {
@@ -25,6 +28,14 @@ pub struct FlowRef {
unsafe impl Send for FlowRef {}
unsafe impl Sync for FlowRef {}
+#[unsafe_no_drop_flag]
+pub struct WeakFlowRef {
+ object: raw::TraitObject,
+}
+
+unsafe impl Send for WeakFlowRef {}
+unsafe impl Sync for WeakFlowRef {}
+
impl FlowRef {
pub fn new(mut flow: Box<Flow>) -> FlowRef {
unsafe {
@@ -37,6 +48,14 @@ impl FlowRef {
result
}
}
+
+ /// Downgrades the FlowRef to a WeakFlowRef.
+ pub fn downgrade(&self) -> WeakFlowRef {
+ unsafe {
+ flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
+ }
+ WeakFlowRef { object: self.object }
+ }
}
impl<'a> Deref for FlowRef {
@@ -62,19 +81,41 @@ impl Drop for FlowRef {
if self.object.vtable.is_null() {
return
}
- if flow::base(&**self).ref_count().fetch_sub(1, Ordering::SeqCst) > 1 {
+ if flow::base(&**self).strong_ref_count().fetch_sub(1, Ordering::Release) != 1 {
return
}
+ atomic::fence(Ordering::Acquire);
+
+ // Normally we'd call the underlying Drop logic but not free the
+ // allocation, but this is almost impossible without DST in
+ // Rust. Instead we make a fake trait object to run the drop logic
+ // on.
let flow_ref: FlowRef = mem::replace(self, FlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
- drop(mem::transmute::<raw::TraitObject, Box<Flow>>(flow_ref.object));
+
+ let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(flow_ref.object.vtable);
+ let object_size = vtable[1];
+ let object_align = vtable[2];
+
+ let fake_data = heap::allocate(object_size, object_align);
+ ptr::copy_memory(fake_data,
+ flow_ref.object.data as *const u8,
+ object_size);
+
+ let fake_box = raw::TraitObject { vtable: flow_ref.object.vtable, data: fake_data as *mut () };
+ let fake_flow = mem::transmute::<raw::TraitObject, Box<Flow>>(fake_box);
+ drop(fake_flow);
+
+ if flow::base(&*flow_ref).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
+ atomic::fence(Ordering::Acquire);
+ heap::deallocate(flow_ref.object.data as *mut u8, object_size, object_align);
+ }
+
mem::forget(flow_ref);
- self.object.vtable = ptr::null_mut();
- self.object.data = ptr::null_mut();
}
}
}
@@ -82,7 +123,7 @@ impl Drop for FlowRef {
impl Clone for FlowRef {
fn clone(&self) -> FlowRef {
unsafe {
- drop(flow::base(&**self).ref_count().fetch_add(1, Ordering::SeqCst));
+ let _ = flow::base(&**self).strong_ref_count().fetch_add(1, Ordering::Relaxed);
FlowRef {
object: raw::TraitObject {
vtable: self.object.vtable,
@@ -92,3 +133,78 @@ impl Clone for FlowRef {
}
}
}
+
+impl WeakFlowRef {
+ /// Upgrades a WeakFlowRef to a FlowRef.
+ pub fn upgrade(&self) -> Option<FlowRef> {
+ unsafe {
+ let object = flow::base(&**self);
+ // We use a CAS loop to increment the strong count instead of a
+ // fetch_add because once the count hits 0 is must never be above
+ // 0.
+ loop {
+ let n = object.strong_ref_count().load(Ordering::SeqCst);
+ if n == 0 { return None }
+ let old = object.strong_ref_count().compare_and_swap(n, n + 1, Ordering::SeqCst);
+ if old == n {
+ return Some(FlowRef { object: self.object })
+ }
+ }
+ }
+ }
+}
+
+impl<'a> Deref for WeakFlowRef {
+ type Target = Flow + 'a;
+ fn deref(&self) -> &(Flow + 'a) {
+ unsafe {
+ mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)
+ }
+ }
+}
+
+impl DerefMut for WeakFlowRef {
+ fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) {
+ unsafe {
+ mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&self.object)
+ }
+ }
+}
+
+impl Clone for WeakFlowRef {
+ fn clone(&self) -> WeakFlowRef {
+ unsafe {
+ flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
+ }
+ WeakFlowRef { object: self. object }
+ }
+}
+
+impl Drop for WeakFlowRef {
+ fn drop(&mut self) {
+ unsafe {
+ if self.object.vtable.is_null() {
+ return
+ }
+
+ if flow::base(&**self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
+ atomic::fence(Ordering::Acquire);
+
+ // This dance deallocates the Box<Flow> without running its
+ // drop glue. The drop glue is run when the last strong
+ // reference is released.
+ let weak_ref: WeakFlowRef = mem::replace(self, WeakFlowRef {
+ object: raw::TraitObject {
+ vtable: ptr::null_mut(),
+ data: ptr::null_mut(),
+ }
+ });
+ let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(weak_ref.object.vtable);
+ let object_size = vtable[1];
+ let object_align = vtable[2];
+ heap::deallocate(weak_ref.object.data as *mut u8, object_size, object_align);
+ mem::forget(weak_ref);
+ }
+ }
+ }
+}
diff --git a/components/layout/layout_task.rs b/components/layout/layout_task.rs
index ac58f56b2cf..9cf20fedc11 100644
--- a/components/layout/layout_task.rs
+++ b/components/layout/layout_task.rs
@@ -804,8 +804,12 @@ impl LayoutTask {
}
if needs_reflow {
- self.try_get_layout_root(*node).map(
- |mut flow| LayoutTask::reflow_all_nodes(&mut *flow));
+ match self.try_get_layout_root(*node) {
+ None => {}
+ Some(mut flow) => {
+ LayoutTask::reflow_all_nodes(&mut *flow);
+ }
+ }
}
// Create a layout context for use throughout the following passes.
diff --git a/components/layout/lib.rs b/components/layout/lib.rs
index 00bd3896af5..58650b5d3c8 100644
--- a/components/layout/lib.rs
+++ b/components/layout/lib.rs
@@ -49,6 +49,7 @@ extern crate util;
extern crate string_cache_macros;
extern crate string_cache;
+extern crate alloc;
extern crate collections;
extern crate encoding;
extern crate libc;