Skip to content

Be more careful about the order in which we read the next field #6227

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 4, 2013
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 25 additions & 6 deletions src/libcore/cleanup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,22 +126,29 @@ struct AnnihilateStats {
n_bytes_freed: uint
}

unsafe fn each_live_alloc(f: &fn(box: *mut BoxRepr, uniq: bool) -> bool) {
unsafe fn each_live_alloc(read_next_before: bool,
f: &fn(box: *mut BoxRepr, uniq: bool) -> bool) {
//! Walks the internal list of allocations

use managed;

let task: *Task = transmute(rustrt::rust_get_task());
let box = (*task).boxed_region.live_allocs;
let mut box: *mut BoxRepr = transmute(copy box);
while box != mut_null() {
let next = transmute(copy (*box).header.next);
let next_before = transmute(copy (*box).header.next);
let uniq =
(*box).header.ref_count == managed::raw::RC_MANAGED_UNIQUE;

if ! f(box, uniq) {
break
}

box = next
if read_next_before {
box = next_before;
} else {
box = transmute(copy (*box).header.next);
}
}
}

Expand Down Expand Up @@ -173,7 +180,10 @@ pub unsafe fn annihilate() {
};

// Pass 1: Make all boxes immortal.
for each_live_alloc |box, uniq| {
//
// In this pass, nothing gets freed, so it does not matter whether
// we read the next field before or after the callback.
for each_live_alloc(true) |box, uniq| {
stats.n_total_boxes += 1;
if uniq {
stats.n_unique_boxes += 1;
Expand All @@ -183,7 +193,11 @@ pub unsafe fn annihilate() {
}

// Pass 2: Drop all boxes.
for each_live_alloc |box, uniq| {
//
// In this pass, unique-managed boxes may get freed, but not
// managed boxes, so we must read the `next` field *after* the
// callback, as the original value may have been freed.
for each_live_alloc(false) |box, uniq| {
if !uniq {
let tydesc: *TypeDesc = transmute(copy (*box).header.type_desc);
let drop_glue: DropGlue = transmute(((*tydesc).drop_glue, 0));
Expand All @@ -192,7 +206,12 @@ pub unsafe fn annihilate() {
}

// Pass 3: Free all boxes.
for each_live_alloc |box, uniq| {
//
// In this pass, managed boxes may get freed (but not
// unique-managed boxes, though I think that none of those are
// left), so we must read the `next` field before, since it will
// not be valid after.
for each_live_alloc(true) |box, uniq| {
if !uniq {
stats.n_bytes_freed +=
(*((*box).header.type_desc)).size
Expand Down