Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix memory leak in flow tree by adding weak refs. #5228

Merged
merged 1 commit into from Mar 16, 2015
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -31,7 +31,7 @@ use context::LayoutContext;
use display_list_builder::DisplayListBuildingResult;
use floats::Floats;
use flow_list::{FlowList, FlowListIterator, MutFlowListIterator};
use flow_ref::FlowRef;
use flow_ref::{FlowRef, WeakFlowRef};
use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo};
use incremental::{self, RECONSTRUCT_FLOW, REFLOW, REFLOW_OUT_OF_FLOW, RestyleDamage};
use inline::InlineFlow;
@@ -740,7 +740,9 @@ pub struct BaseFlow {
/// NB: Must be the first element.
///
/// The necessity of this will disappear once we have dynamically-sized types.
ref_count: AtomicUint,
strong_ref_count: AtomicUint,

weak_ref_count: AtomicUint,

pub restyle_damage: RestyleDamage,

@@ -887,7 +889,8 @@ impl Encodable for BaseFlow {
#[unsafe_destructor]
impl Drop for BaseFlow {
fn drop(&mut self) {
if self.ref_count.load(Ordering::SeqCst) != 0 {
if self.strong_ref_count.load(Ordering::SeqCst) != 0 &&
self.weak_ref_count.load(Ordering::SeqCst) != 0 {
panic!("Flow destroyed before its ref count hit zero—this is unsafe!")
}
}
@@ -948,7 +951,8 @@ impl BaseFlow {
damage.remove(RECONSTRUCT_FLOW);

BaseFlow {
ref_count: AtomicUint::new(1),
strong_ref_count: AtomicUint::new(1),
weak_ref_count: AtomicUint::new(1),
restyle_damage: damage,
children: FlowList::new(),
intrinsic_inline_sizes: IntrinsicISizes::new(),
@@ -978,8 +982,12 @@ impl BaseFlow {
self.children.iter_mut()
}

pub unsafe fn ref_count<'a>(&'a self) -> &'a AtomicUint {
&self.ref_count
pub unsafe fn strong_ref_count<'a>(&'a self) -> &'a AtomicUint {
&self.strong_ref_count
}

pub unsafe fn weak_ref_count<'a>(&'a self) -> &'a AtomicUint {
&self.weak_ref_count
}

pub fn debug_id(&self) -> uint {
@@ -1333,7 +1341,7 @@ impl MutableOwnedFlowUtils for FlowRef {
/// FIXME(pcwalton): I think this would be better with a borrow flag instead of `unsafe`.
pub struct ContainingBlockLink {
/// The pointer up to the containing block.
link: Option<FlowRef>,
link: Option<WeakFlowRef>,
}

impl ContainingBlockLink {
@@ -1344,10 +1352,10 @@ impl ContainingBlockLink {
}

fn set(&mut self, link: FlowRef) {
self.link = Some(link)
self.link = Some(link.downgrade())
}

pub unsafe fn get<'a>(&'a mut self) -> &'a mut Option<FlowRef> {
pub unsafe fn get<'a>(&'a mut self) -> &'a mut Option<WeakFlowRef> {
&mut self.link
}

@@ -4,18 +4,21 @@

//! Reference-counted pointers to flows.
//!
//! Eventually, with dynamically sized types in Rust, much of this code will be superfluous.
//! Eventually, with dynamically sized types in Rust, much of this code will
//! be superfluous. This design is largely duplicating logic of Arc<T> and
//! Weak<T>; please see comments there for details.

#![allow(unsafe_blocks)]

use flow::Flow;
use flow;

use alloc::heap;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::raw;
use std::sync::atomic::Ordering;
use std::sync::atomic::{self, Ordering};

#[unsafe_no_drop_flag]
pub struct FlowRef {
@@ -25,6 +28,14 @@ pub struct FlowRef {
unsafe impl Send for FlowRef {}
unsafe impl Sync for FlowRef {}

#[unsafe_no_drop_flag]
pub struct WeakFlowRef {
object: raw::TraitObject,
}

unsafe impl Send for WeakFlowRef {}
unsafe impl Sync for WeakFlowRef {}

impl FlowRef {
pub fn new(mut flow: Box<Flow>) -> FlowRef {
unsafe {
@@ -37,6 +48,14 @@ impl FlowRef {
result
}
}

/// Downgrades the FlowRef to a WeakFlowRef.
pub fn downgrade(&self) -> WeakFlowRef {
unsafe {
flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
}
WeakFlowRef { object: self.object }
}
}

impl<'a> Deref for FlowRef {
@@ -62,27 +81,49 @@ impl Drop for FlowRef {
if self.object.vtable.is_null() {
return
}
if flow::base(&**self).ref_count().fetch_sub(1, Ordering::SeqCst) > 1 {
if flow::base(&**self).strong_ref_count().fetch_sub(1, Ordering::Release) != 1 {
return
}
atomic::fence(Ordering::Acquire);

// Normally we'd call the underlying Drop logic but not free the
// allocation, but this is almost impossible without DST in
// Rust. Instead we make a fake trait object to run the drop logic
// on.
let flow_ref: FlowRef = mem::replace(self, FlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
drop(mem::transmute::<raw::TraitObject, Box<Flow>>(flow_ref.object));

let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(flow_ref.object.vtable);
let object_size = vtable[1];
let object_align = vtable[2];

let fake_data = heap::allocate(object_size, object_align);
ptr::copy_memory(fake_data,
flow_ref.object.data as *const u8,
object_size);

let fake_box = raw::TraitObject { vtable: flow_ref.object.vtable, data: fake_data as *mut () };
let fake_flow = mem::transmute::<raw::TraitObject, Box<Flow>>(fake_box);
drop(fake_flow);

if flow::base(&*flow_ref).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
atomic::fence(Ordering::Acquire);
heap::deallocate(flow_ref.object.data as *mut u8, object_size, object_align);
}

mem::forget(flow_ref);
self.object.vtable = ptr::null_mut();
self.object.data = ptr::null_mut();
}
}
}

impl Clone for FlowRef {
fn clone(&self) -> FlowRef {
unsafe {
drop(flow::base(&**self).ref_count().fetch_add(1, Ordering::SeqCst));
let _ = flow::base(&**self).strong_ref_count().fetch_add(1, Ordering::Relaxed);
FlowRef {
object: raw::TraitObject {
vtable: self.object.vtable,
@@ -92,3 +133,78 @@ impl Clone for FlowRef {
}
}
}

impl WeakFlowRef {
/// Upgrades a WeakFlowRef to a FlowRef.
pub fn upgrade(&self) -> Option<FlowRef> {
unsafe {
let object = flow::base(&**self);

This comment has been minimized.

Copy link
@pcwalton

pcwalton Mar 16, 2015

Contributor

Can you copy the comment from arc.rs:

    // We use a CAS loop to increment the strong count instead of a fetch_add because once the
    // count hits 0 is must never be above 0.
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 is must never be above
// 0.
loop {
let n = object.strong_ref_count().load(Ordering::SeqCst);
if n == 0 { return None }
let old = object.strong_ref_count().compare_and_swap(n, n + 1, Ordering::SeqCst);
if old == n {
return Some(FlowRef { object: self.object })
}
}
}
}
}

impl<'a> Deref for WeakFlowRef {
type Target = Flow + 'a;
fn deref(&self) -> &(Flow + 'a) {
unsafe {
mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)
}
}
}

impl DerefMut for WeakFlowRef {
fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) {
unsafe {
mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&self.object)
}
}
}

impl Clone for WeakFlowRef {
fn clone(&self) -> WeakFlowRef {
unsafe {
flow::base(&**self).weak_ref_count().fetch_add(1, Ordering::Relaxed);
}
WeakFlowRef { object: self. object }
}
}

impl Drop for WeakFlowRef {
fn drop(&mut self) {
unsafe {
if self.object.vtable.is_null() {
return
}

if flow::base(&**self).weak_ref_count().fetch_sub(1, Ordering::Release) == 1 {
atomic::fence(Ordering::Acquire);

// This dance deallocates the Box<Flow> without running its
// drop glue. The drop glue is run when the last strong
// reference is released.
let weak_ref: WeakFlowRef = mem::replace(self, WeakFlowRef {
object: raw::TraitObject {
vtable: ptr::null_mut(),
data: ptr::null_mut(),
}
});
let vtable: &[usize; 3] = mem::transmute::<*mut (), &[usize; 3]>(weak_ref.object.vtable);

This comment has been minimized.

Copy link
@pcwalton

pcwalton Mar 16, 2015

Contributor

Can you add a comment here explaining that this dance is designed to deallocate the flow without running its drop glue?

let object_size = vtable[1];
let object_align = vtable[2];
heap::deallocate(weak_ref.object.data as *mut u8, object_size, object_align);
mem::forget(weak_ref);
}
}
}
}
@@ -804,8 +804,12 @@ impl LayoutTask {
}

if needs_reflow {
self.try_get_layout_root(*node).map(
|mut flow| LayoutTask::reflow_all_nodes(&mut *flow));
match self.try_get_layout_root(*node) {
None => {}
Some(mut flow) => {
LayoutTask::reflow_all_nodes(&mut *flow);
}
}
}

// Create a layout context for use throughout the following passes.
@@ -49,6 +49,7 @@ extern crate util;
extern crate string_cache_macros;
extern crate string_cache;

extern crate alloc;
extern crate collections;
extern crate encoding;
extern crate libc;
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.