aboutsummaryrefslogtreecommitdiffstats
path: root/components/layout/flow_ref.rs
diff options
context:
space:
mode:
authorJack Moffitt <jack@metajack.im>2015-03-12 16:32:49 -0600
committerJack Moffitt <jack@metajack.im>2015-03-16 12:53:16 -0600
commit237150fa49b050cdc0b8d46bb80170127503cfcb (patch)
treebc85f4f97602320fdf9727fcbc233e3eb3978ed2 /components/layout/flow_ref.rs
parent660ea05ddb3b17b0bb914cb09968cbcf7c6b1aec (diff)
downloadservo-237150fa49b050cdc0b8d46bb80170127503cfcb.tar.gz
servo-237150fa49b050cdc0b8d46bb80170127503cfcb.zip
Fix memory leak in flow tree by adding weak refs.
Cycles were being created in the flow tree since absolutely positioned descendants had pointers to their containing blocks. This adds WeakFlowRef (based on Weak<T>) and makes these backpointers weak references. This also harmonizes our custom Arc<T>, FlowRef, to be consistent with the upstream implementation. Fixes #4915.
Diffstat (limited to 'components/layout/flow_ref.rs')
-rw-r--r--components/layout/flow_ref.rs130
1 files changed, 123 insertions, 7 deletions
diff --git a/components/layout/flow_ref.rs b/components/layout/flow_ref.rs
index 83243df2c8d..e7b23aac698 100644
--- a/components/layout/flow_ref.rs
+++ b/components/layout/flow_ref.rs
@@ -4,18 +4,21 @@
//! Reference-counted pointers to flows.
//!
-//! Eventually, with dynamically sized types in Rust, much of this code will be superfluous.
+//! Eventually, with dynamically sized types in Rust, much of this code will
+//! be superfluous. This design is largely duplicating logic of Arc<T> and
+//! Weak<T>; please see comments there for details.
#![allow(unsafe_blocks)]
use flow::Flow;
use flow;
+use alloc::heap;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::raw;
-use std::sync::atomic::Ordering;
+use std::sync::atomic::{self, Ordering};
#[unsafe_no_drop_flag]
pub struct FlowRef {
@@ -25,6 +28,14 @@ pub struct FlowRef {
unsafe impl Send for FlowRef {}
unsafe impl Sync for FlowRef {}
+#[unsafe_no_drop_flag]
+pub struct WeakFlowRef {
+ object: raw::TraitObject,
+}
+
+unsafe impl Send for WeakFlowRef {}
+unsafe impl Sync for WeakFlowRef {}
+
impl FlowRef {
pub fn new(mut flow: Box<Flow>) -> FlowRef {
unsafe {
@@ -37,6 +48,14 @@ impl FlowRef {
result
}
}
+
+ /// Downgrades the FlowRef to a WeakFlowRef.
+ pub fn downgrade(&self) -> WeakFlowRef {
+ unsafe {
+ flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
+ }
+ WeakFlowRef { object: self.object }
+ }
}
impl<'a> Deref for FlowRef {
@@ -62,19 +81,41 @@ impl Drop for FlowRef {
if self.object.vtable.is_null() {
return
}
- if flow::base(&**self).ref_count().fetch_sub(1, Ordering::SeqCst) > 1 {
+ if flow::base(&**self).strong_ref_count().fetch_sub(1, Ordering::Release) != 1 {
return
}
+ atomic::fence(Ordering::Acquire);
+
+ // Normally we'd call the underlying Drop logic but not free the
+ // allocation, but this is almost impossible without DST in
+ // Rust. Instead we make a fake trait object to run the drop logic
+ // on.
let flow_ref: FlowRef = mem::replace(self, FlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
- drop(mem::transmute::<raw::TraitObject, Box<Flow>>(flow_ref.object));
+
+ let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(flow_ref.object.vtable);
+ let object_size = vtable[1];
+ let object_align = vtable[2];
+
+ let fake_data = heap::allocate(object_size, object_align);
+ ptr::copy_memory(fake_data,
+ flow_ref.object.data as *const u8,
+ object_size);
+
+ let fake_box = raw::TraitObject { vtable: flow_ref.object.vtable, data: fake_data as *mut () };
+ let fake_flow = mem::transmute::<raw::TraitObject, Box<Flow>>(fake_box);
+ drop(fake_flow);
+
+ if flow::base(&*flow_ref).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
+ atomic::fence(Ordering::Acquire);
+ heap::deallocate(flow_ref.object.data as *mut u8, object_size, object_align);
+ }
+
mem::forget(flow_ref);
- self.object.vtable = ptr::null_mut();
- self.object.data = ptr::null_mut();
}
}
}
@@ -82,7 +123,7 @@ impl Drop for FlowRef {
impl Clone for FlowRef {
fn clone(&self) -> FlowRef {
unsafe {
- drop(flow::base(&**self).ref_count().fetch_add(1, Ordering::SeqCst));
+ let _ = flow::base(&**self).strong_ref_count().fetch_add(1, Ordering::Relaxed);
FlowRef {
object: raw::TraitObject {
vtable: self.object.vtable,
@@ -92,3 +133,78 @@ impl Clone for FlowRef {
}
}
}
+
+impl WeakFlowRef {
+ /// Upgrades a WeakFlowRef to a FlowRef.
+ pub fn upgrade(&self) -> Option<FlowRef> {
+ unsafe {
+ let object = flow::base(&**self);
+ // We use a CAS loop to increment the strong count instead of a
+ // fetch_add because once the count hits 0 is must never be above
+ // 0.
+ loop {
+ let n = object.strong_ref_count().load(Ordering::SeqCst);
+ if n == 0 { return None }
+ let old = object.strong_ref_count().compare_and_swap(n, n + 1, Ordering::SeqCst);
+ if old == n {
+ return Some(FlowRef { object: self.object })
+ }
+ }
+ }
+ }
+}
+
+impl<'a> Deref for WeakFlowRef {
+ type Target = Flow + 'a;
+ fn deref(&self) -> &(Flow + 'a) {
+ unsafe {
+ mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)
+ }
+ }
+}
+
+impl DerefMut for WeakFlowRef {
+ fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) {
+ unsafe {
+ mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&self.object)
+ }
+ }
+}
+
+impl Clone for WeakFlowRef {
+ fn clone(&self) -> WeakFlowRef {
+ unsafe {
+ flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
+ }
+ WeakFlowRef { object: self. object }
+ }
+}
+
+impl Drop for WeakFlowRef {
+ fn drop(&mut self) {
+ unsafe {
+ if self.object.vtable.is_null() {
+ return
+ }
+
+ if flow::base(&**self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
+ atomic::fence(Ordering::Acquire);
+
+ // This dance deallocates the Box<Flow> without running its
+ // drop glue. The drop glue is run when the last strong
+ // reference is released.
+ let weak_ref: WeakFlowRef = mem::replace(self, WeakFlowRef {
+ object: raw::TraitObject {
+ vtable: ptr::null_mut(),
+ data: ptr::null_mut(),
+ }
+ });
+ let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(weak_ref.object.vtable);
+ let object_size = vtable[1];
+ let object_align = vtable[2];
+ heap::deallocate(weak_ref.object.data as *mut u8, object_size, object_align);
+ mem::forget(weak_ref);
+ }
+ }
+ }
+}