Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions oscars/src/collectors/mark_sweep/gc_collections.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,12 +221,12 @@ mod tests {

#[test]
fn gc_alloc_vec_survives_collection() {
let collector = &mut MarkSweepGarbageCollector::default()
let collector = MarkSweepGarbageCollector::default()
.with_page_size(256)
.with_heap_threshold(512);

let vec = GcAllocVec::with_capacity(100, collector);
let gc_vec = Gc::new_in(GcRefCell::new(vec), collector);
let vec = GcAllocVec::with_capacity(100, &collector);
let gc_vec = Gc::new_in(GcRefCell::new(vec), &collector);

for i in 0..100u64 {
gc_vec.borrow_mut().push(i);
Expand All @@ -236,6 +236,11 @@ mod tests {

assert_eq!(gc_vec.borrow().len(), 100);
assert_eq!(gc_vec.borrow()[50], 50);

// Drop the handle and run a normal collection cycle so cleanup happens
// through regular sweep logic instead of collector-drop teardown.
drop(gc_vec);
collector.collect();
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion oscars/src/collectors/mark_sweep/internals/ephemeron.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ pub(crate) const fn vtable_of<K: Trace + 'static, V: Trace + 'static>() -> &'sta
},
finalize_fn: |this| unsafe {
let ephemeron = this.cast::<PoolItem<Ephemeron<K, V>>>().as_ref().value();
Finalize::finalize(ephemeron);
Trace::run_finalizer(ephemeron);
},
_key_type_id: TypeId::of::<K>(),
_key_size: size_of::<WeakGcBox<K>>(),
Expand Down
6 changes: 5 additions & 1 deletion oscars/src/collectors/mark_sweep/internals/gc_box.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::collectors::mark_sweep::Finalize;
use crate::collectors::mark_sweep::internals::gc_header::{GcHeader, HeaderColor};
use crate::collectors::mark_sweep::{Trace, TraceColor};

use super::{DropFn, TraceFn, VTable, vtable_of};
use super::{DropFn, FinalizeFn, TraceFn, VTable, vtable_of};

pub struct NonTraceable(());

Expand Down Expand Up @@ -166,6 +166,10 @@ impl<T: Trace + ?Sized> GcBox<T> {
self.vtable.drop_fn()
}

pub(crate) fn finalize_fn(&self) -> FinalizeFn {
self.vtable.finalize_fn()
}

pub(crate) fn size(&self) -> usize {
self.vtable.size()
}
Expand Down
2 changes: 1 addition & 1 deletion oscars/src/collectors/mark_sweep/internals/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ mod vtable;

pub(crate) use ephemeron::Ephemeron;
pub(crate) use gc_header::{GcHeader, HeaderColor};
pub(crate) use vtable::{DropFn, TraceFn, VTable, vtable_of};
pub(crate) use vtable::{DropFn, FinalizeFn, TraceFn, VTable, vtable_of};

pub use self::gc_box::{GcBox, NonTraceable, WeakGcBox};
14 changes: 14 additions & 0 deletions oscars/src/collectors/mark_sweep/internals/vtable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,20 @@ pub(crate) const fn vtable_of<T: Trace + 'static>() -> &'static VTable {
// SAFETY: The caller must ensure the erased pointer is not dropped or deallocated.
unsafe { core::ptr::drop_in_place(this.as_mut()) };
}

// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
unsafe fn finalize_fn(this: GcErasedPointer) {
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
let value = unsafe { this.cast::<PoolItem<GcBox<Self>>>().as_ref().value() };
Trace::run_finalizer(value);
}
}

impl<T: Trace + 'static> HasVTable for T {
const VTABLE: &'static VTable = &VTable {
trace_fn: T::trace_fn,
drop_fn: T::drop_fn,
finalize_fn: T::finalize_fn,
type_id: TypeId::of::<T>(),
size: size_of::<GcBox<T>>(),
};
Expand All @@ -43,11 +51,13 @@ pub(crate) const fn vtable_of<T: Trace + 'static>() -> &'static VTable {

pub(crate) type TraceFn = unsafe fn(this: GcErasedPointer, color: TraceColor);
pub(crate) type DropFn = unsafe fn(this: GcErasedPointer);
pub(crate) type FinalizeFn = unsafe fn(this: GcErasedPointer);

#[derive(Debug)]
pub(crate) struct VTable {
trace_fn: TraceFn,
drop_fn: DropFn,
finalize_fn: FinalizeFn,
type_id: TypeId,
size: usize,
}
Expand All @@ -61,6 +71,10 @@ impl VTable {
self.drop_fn
}

pub(crate) fn finalize_fn(&self) -> FinalizeFn {
self.finalize_fn
}

pub(crate) const fn type_id(&self) -> TypeId {
self.type_id
}
Expand Down
104 changes: 73 additions & 31 deletions oscars/src/collectors/mark_sweep/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,21 +114,21 @@ impl Drop for MarkSweepGarbageCollector {
}
}

// Reclaim all collector-owned weak maps.
// Single-threaded, so this is safe.
for &map_ptr in self.weak_maps.borrow().iter() {
unsafe {
let _ = rust_alloc::boxed::Box::from_raw(map_ptr.as_ptr());
}
}

// SAFETY:
// `Gc<T>` pointers act as if they live forever (`'static`).
// if the GC drops while they exist, we leak the memory to prevent a UAF
if self.pools_len() > 0
&& (!self.root_queue.borrow().is_empty()
|| !self.pending_root_queue.borrow().is_empty())
{
// if the GC drops while rooted values still exist, we leak memory to prevent UAF.
let has_rooted_values = self
.root_queue
.borrow()
.iter()
.any(|node| unsafe { node.as_ref().value().is_rooted() })
|| self
.pending_root_queue
.borrow()
.iter()
.any(|node| unsafe { node.as_ref().value().is_rooted() });

if self.pools_len() > 0 && has_rooted_values {
// Unrooted items are NOT swept here so they intentionally leak
// instead of triggering a Use-After-Free.
// The underlying arena pools WILL be dropped (and OS memory reclaimed)
Expand All @@ -137,6 +137,7 @@ impl Drop for MarkSweepGarbageCollector {
// No rooted items are alive. Sweep and clean up the remaining
// cycles and loose allocations before the allocator natively drops.
self.sweep_all_queues();
self.reclaim_dead_weak_maps();
}
}
}
Expand Down Expand Up @@ -169,46 +170,87 @@ impl MarkSweepGarbageCollector {
self.allocator.borrow_mut().drop_empty_pools();
}

// Force drops all elements in the internal tracking queues and clears
// them without regard for reachability.
// Force-collect all tracked items in collector teardown.
//
// Phases:
// 1. finalize everything
// 2. drop + free everything
//
// Since this runs only during collector drop (not a normal collection
// cycle), we don't need reachability marking here.
//
// NOTE: This intentionally differs from arena2's sweep_all_queues.
// arena3 uses`free_slot` calls to reclaim memory.
// arena2 uses a bitmap (`mark_dropped`) and reclaims automatically
fn sweep_all_queues(&self) {
let ephemerons = core::mem::take(&mut *self.ephemeron_queue.borrow_mut());
for ephemeron in ephemerons {
let roots = core::mem::take(&mut *self.root_queue.borrow_mut());
let pending_e = core::mem::take(&mut *self.pending_ephemeron_queue.borrow_mut());
let pending_r = core::mem::take(&mut *self.pending_root_queue.borrow_mut());

// Phase 1: finalize everything while all allocations are still alive.
for node in roots.iter().chain(pending_r.iter()).copied() {
let node_ref = unsafe { node.as_ref() };
let gc_box = node_ref.value();
unsafe { gc_box.finalize_fn()(node) };
}

for ephemeron in ephemerons.iter().chain(pending_e.iter()).copied() {
let ephemeron_ref = unsafe { ephemeron.as_ref() };
unsafe { ephemeron_ref.value().drop_fn()(ephemeron) };
self.allocator
.borrow_mut()
.free_slot(ephemeron.cast::<u8>());
let vtable = ephemeron_ref.value();
unsafe { vtable.finalize_fn()(ephemeron) };
}

let roots = core::mem::take(&mut *self.root_queue.borrow_mut());
// Phase 2: drop and free all tracked values.
for node in roots {
let node_ref = unsafe { node.as_ref() };
unsafe { node_ref.value().drop_fn()(node) };
let gc_box = node_ref.value();
unsafe { gc_box.drop_fn()(node) };
self.allocator.borrow_mut().free_slot(node.cast::<u8>());
}

let pending_e = core::mem::take(&mut *self.pending_ephemeron_queue.borrow_mut());
for ephemeron in pending_e {
for node in pending_r {
let node_ref = unsafe { node.as_ref() };
let gc_box = node_ref.value();
unsafe { gc_box.drop_fn()(node) };
self.allocator.borrow_mut().free_slot(node.cast::<u8>());
}

for ephemeron in ephemerons {
let ephemeron_ref = unsafe { ephemeron.as_ref() };
unsafe { ephemeron_ref.value().drop_fn()(ephemeron) };
let vtable = ephemeron_ref.value();
unsafe { vtable.drop_fn()(ephemeron) };
self.allocator
.borrow_mut()
.free_slot(ephemeron.cast::<u8>());
}

let pending_r = core::mem::take(&mut *self.pending_root_queue.borrow_mut());
for node in pending_r {
let node_ref = unsafe { node.as_ref() };
unsafe { node_ref.value().drop_fn()(node) };
self.allocator.borrow_mut().free_slot(node.cast::<u8>());
for ephemeron in pending_e {
let ephemeron_ref = unsafe { ephemeron.as_ref() };
let vtable = ephemeron_ref.value();
unsafe { vtable.drop_fn()(ephemeron) };
self.allocator
.borrow_mut()
.free_slot(ephemeron.cast::<u8>());
}
}

fn reclaim_dead_weak_maps(&self) {
// During collector teardown, reclaim only maps that have already been
// marked dead by `WeakMap::drop`.
self.weak_maps.borrow_mut().retain(|&map_ptr| {
let map = unsafe { map_ptr.as_ref() };
if map.is_alive() {
true
} else {
unsafe {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: safety comment

let _ = rust_alloc::boxed::Box::from_raw(map_ptr.as_ptr());
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: can you please add a note that we reclaim the weak_map with rust_alloc, because it's allocated with rust_alloc and not alloc.

Ideally we eventually have that value better tied to alloc, and not rust_alloc, but in the meantime, we should really note it better, because it's going to be easy to lose track and get confused if we don't

}
false
}
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suggestion: maybe clean up this section a bit

Suggested change
}
let is_map_alive = map.is_alive();
if !is_map_alive {
unsafe {
let _ = rust_alloc::boxed::Box::from_raw(map_ptr.as_ptr());
}
}
is_map_alive

});
}

// Extracts and sweeps items that are considered dead (different trace color).
fn sweep_trace_color(&self, sweep_color: TraceColor) {
// We use retain and manually drop deleted maps to satisfy Miri's
Expand Down Expand Up @@ -294,7 +336,7 @@ impl MarkSweepGarbageCollector {
// Check if the value is not reachable, i.e. dead.
if !gc_box.is_reachable(color) {
// Finalize the dead item
gc_box.finalize();
unsafe { gc_box.finalize_fn()(*node) };
// Recheck if the value is now rooted again after finalization.
if gc_box.is_rooted() {
unsafe { gc_box.trace_fn()(*node, color) };
Expand Down
Loading