Skip to content
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions oscars/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,5 @@ default = ["mark_sweep"]
std = []
mark_sweep = []
mark_sweep2 = ["mark_sweep"]
mark_sweep_branded = ["mark_sweep"]
thin-vec = ["dep:thin-vec", "mark_sweep"]
19 changes: 17 additions & 2 deletions oscars/src/alloc/mempool3/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,17 @@ impl<'pool> ErasedPoolPointer<'pool> {
/// typed pointer into a pool slot
#[derive(Debug, Clone, Copy)]
#[repr(transparent)]
pub struct PoolPointer<'pool, T>(NonNull<PoolItem<T>>, PhantomData<&'pool T>);
pub struct PoolPointer<'pool, T>(NonNull<PoolItem<T>>, PhantomData<(&'pool (), *mut T)>);

impl<'pool, T> PoolPointer<'pool, T> {
pub(crate) unsafe fn from_raw(raw: NonNull<PoolItem<T>>) -> Self {
Self(raw, PhantomData)
}

pub fn as_inner_ref(&self) -> &'pool T {
pub fn as_inner_ref(&self) -> &'pool T
where
T: 'pool,
{
// SAFETY: pointer is valid and properly aligned
unsafe { &(*self.0.as_ptr()).0 }
}
Expand Down Expand Up @@ -310,6 +313,18 @@ impl SlotPool {
self.live.set(self.live.get().saturating_sub(1));
}

/// Iterates over all live (allocated) slot pointers in this pool.
pub(crate) fn iter_live(&self) -> impl Iterator<Item = NonNull<u8>> + '_ {
(0..self.slot_count).filter_map(move |i| {
let chunk = self.bitmap_chunk(i);
if chunk.get() & (1u64 << (i % 64)) != 0 {
Some(self.slot_ptr(i))
} else {
None
}
})
}

/// returns true when the pool is empty and safe to drop
/// `live` tracks the count, so no bitmap scan is needed
pub fn run_drop_check(&self) -> bool {
Expand Down
7 changes: 7 additions & 0 deletions oscars/src/alloc/mempool3/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,13 @@ impl<'alloc> PoolAllocator<'alloc> {
self.current_heap_size
}

/// Iterates over every live slot pointer across all slot pools.
///
/// Yields one `NonNull<u8>` per allocated (not yet freed) slot.
pub fn iter_live_slots(&self) -> impl Iterator<Item = core::ptr::NonNull<u8>> + '_ {
self.slot_pools.iter().flat_map(|pool| pool.iter_live())
}

pub fn is_below_threshold(&self) -> bool {
// keep 25% headroom so collection fires before the last page fills
let margin = self.heap_threshold / 4;
Expand Down
71 changes: 71 additions & 0 deletions oscars/src/collectors/mark_sweep_branded/cell.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
//! Interior mutability for GC-managed values.

use crate::collectors::mark_sweep_branded::trace::{Finalize, Trace, Tracer};
use core::cell::{Ref, RefCell, RefMut};
use core::ops::{Deref, DerefMut};

/// A GC-aware wrapper around [`RefCell<T>`].
pub struct GcRefCell<T: Trace> {
inner: RefCell<T>,
}

impl<T: Trace> GcRefCell<T> {
/// Wraps `value` in a new `GcRefCell`.
pub fn new(value: T) -> Self {
Self {
inner: RefCell::new(value),
}
}

/// Acquires a shared borrow of the inner value.
///
/// # Panics
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow(&self) -> GcRef<'_, T> {
GcRef(self.inner.borrow())
}

/// Acquires a mutable borrow of the inner value.
///
/// # Panics
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut(&self) -> GcRefMut<'_, T> {
GcRefMut(self.inner.borrow_mut())
}
}

/// A shared borrow guard returned by [`GcRefCell::borrow`].
pub struct GcRef<'a, T: Trace>(Ref<'a, T>);

impl<T: Trace> Deref for GcRef<'_, T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}

/// A mutable borrow guard returned by [`GcRefCell::borrow_mut`].
pub struct GcRefMut<'a, T: Trace>(RefMut<'a, T>);

impl<T: Trace> Deref for GcRefMut<'_, T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}

impl<T: Trace> DerefMut for GcRefMut<'_, T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}

impl<T: Trace> Finalize for GcRefCell<T> {}

impl<T: Trace> Trace for GcRefCell<T> {
fn trace(&mut self, tracer: &mut Tracer) {
self.inner.get_mut().trace(tracer);
}
}
48 changes: 48 additions & 0 deletions oscars/src/collectors/mark_sweep_branded/ephemeron.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
use crate::{
alloc::mempool3::PoolItem,
collectors::mark_sweep_branded::{
gc::Gc,
gc_box::GcBox,
mutation_ctx::MutationContext,
trace::{Finalize, Trace, Tracer},
},
};
use core::marker::PhantomData;
use core::ptr::NonNull;

pub struct Ephemeron<'id, K: Trace, V: Trace> {
pub(crate) key_ptr: NonNull<PoolItem<GcBox<K>>>,
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

issue: key_ptr here should be an Option<pointer>

Ephemeron keys are removed and set to None when they underlying ptr is invalid (i.e. null).

pub(crate) key_alloc_id: usize,
pub(crate) value_ptr: NonNull<PoolItem<GcBox<V>>>,
pub(crate) _marker: PhantomData<*mut &'id ()>,
}

impl<'id, K: Trace, V: Trace> Ephemeron<'id, K, V> {
/// Returns the value if the key is alive.
pub fn get_value<'gc>(&self, _cx: &MutationContext<'id, 'gc>) -> Option<Gc<'gc, V>> {
// SAFETY: `_cx` proves the collector is alive; alloc_id guards ABA.
let key_alive = unsafe { (*self.key_ptr.as_ptr()).0.alloc_id == self.key_alloc_id };
if key_alive {
Some(Gc {
ptr: self.value_ptr,
_marker: PhantomData,
})
} else {
None
}
}
}

impl<'id, K: Trace, V: Trace> Clone for Ephemeron<'id, K, V> {
fn clone(&self) -> Self {
*self
}
}

impl<'id, K: Trace, V: Trace> Copy for Ephemeron<'id, K, V> {}

impl<'id, K: Trace, V: Trace> Finalize for Ephemeron<'id, K, V> {}

impl<'id, K: Trace, V: Trace> Trace for Ephemeron<'id, K, V> {
fn trace(&mut self, _tracer: &mut Tracer) {}
}
108 changes: 108 additions & 0 deletions oscars/src/collectors/mark_sweep_branded/gc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
//! Core pointer types.

use crate::{
alloc::mempool3::{PoolAllocator, PoolItem},
collectors::mark_sweep_branded::{
gc_box::GcBox,
mutation_ctx::MutationContext,
root_link::RootLink,
trace::{Finalize, Trace},
},
};
use core::fmt;
use core::marker::PhantomData;
use core::ops::Deref;
use core::ptr::NonNull;

pub(crate) type RootDropFn = unsafe fn(&mut PoolAllocator<'static>, NonNull<u8>);

/// A transient pointer to a GC-managed value.
#[derive(Debug)]
pub struct Gc<'gc, T: Trace + ?Sized + 'gc> {
pub(crate) ptr: NonNull<PoolItem<GcBox<T>>>,
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here and elsewhere: is there a reason for using NonNull<PoolItem<GcBox<T>>>? If there's not a particular reason, then we should be using PoolPointer

pub(crate) _marker: PhantomData<(&'gc T, *const ())>,
}

impl<'gc, T: Trace + ?Sized + 'gc> Copy for Gc<'gc, T> {}
impl<'gc, T: Trace + ?Sized + 'gc> Clone for Gc<'gc, T> {
fn clone(&self) -> Self {
*self
}
}

impl<'gc, T: Trace + 'gc> Gc<'gc, T> {
/// Returns a shared reference to the value.
#[inline]
pub fn get(&self) -> &T {
// SAFETY: `ptr` is non-null and valid for `'gc` by construction.
// The `'gc` lifetime is scoped to a `mutate()` closure, collection only occurs
// via `cx.collect()` within that same closure and `Gc<'gc, T>` can't
// escape the closure.
unsafe { &(*self.ptr.as_ptr()).0.value }
}
}

impl<'gc, T: Trace + fmt::Display + 'gc> fmt::Display for Gc<'gc, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self.get(), f)
}
}

impl<'gc, T: Trace + 'gc> Deref for Gc<'gc, T> {
type Target = T;
fn deref(&self) -> &T {
self.get()
}
}

/// Heap node backing a `Root`.
#[repr(C)]
pub(crate) struct RootNode<'id, T: Trace> {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: it may be better to move RootNode, Root, and RootLink all into their own root module. I think that may be easier to reason about.

/// Intrusive list link
pub(crate) link: RootLink,
/// Pointer to the allocation
pub(crate) gc_ptr: NonNull<PoolItem<GcBox<T>>>,
/// Type-erased drop function for freeing this RootNode
pub(crate) drop_fn: RootDropFn,
/// Raw pointer to the Collector for freeing this node
pub(crate) collector_ptr: *const crate::collectors::mark_sweep_branded::Collector,
pub(crate) _marker: PhantomData<*mut &'id ()>,
}

/// A handle that keeps a GC allocation live.
#[must_use = "dropping a root unregisters it from the GC"]
pub struct Root<'id, T: Trace> {
pub(crate) raw: NonNull<RootNode<'id, T>>,
}

impl<'id, T: Trace> Root<'id, T> {
/// Converts this root into a `Gc` pointer
pub fn get<'gc>(&self, _cx: &MutationContext<'id, 'gc>) -> Gc<'gc, T> {
Gc {
// SAFETY: `raw` is non-null and valid.
ptr: unsafe { self.raw.as_ref().gc_ptr },
_marker: PhantomData,
}
}
}

impl<'id, T: Trace> Drop for Root<'id, T> {
fn drop(&mut self) {
unsafe {
let node_ref = self.raw.as_ref();
if node_ref.link.is_linked() {
RootLink::unlink(NonNull::from(&node_ref.link));
}
// SAFETY: collector_ptr is valid for the lifetime of the GcContext
let collector = &*node_ref.collector_ptr;
collector.free_root_node(self.raw.cast::<u8>(), node_ref.drop_fn);
}
}
}

impl<T: Trace> Finalize for Gc<'_, T> {}
impl<T: Trace> Trace for Gc<'_, T> {
fn trace(&mut self, tracer: &mut crate::collectors::mark_sweep_branded::trace::Tracer) {
tracer.mark(self);
}
}
67 changes: 67 additions & 0 deletions oscars/src/collectors/mark_sweep_branded/gc_box.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
//! The heap header wrapping every GC-managed value.

use core::cell::Cell;
use core::ptr::NonNull;

use crate::alloc::mempool3::{PoolAllocator, PoolItem};
use crate::collectors::mark_sweep_branded::trace::{Trace, TraceFn, Tracer};

pub(crate) type DropFn = unsafe fn(&mut PoolAllocator<'static>, NonNull<u8>);

/// The tri-color marking state of a [`GcBox`]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub(crate) enum GcColor {
/// Not yet reached by mark phase
White = 0,
/// Reached and queued in the worklist, children not yet traced.
Gray = 1,
/// Reached and dequeued from the worklist, all children traced
Black = 2,
}

/// Heap wrapper for a garbage-collected value.
///
/// Allocated via [`PoolAllocator`].
pub(crate) struct GcBox<T: ?Sized> {
/// tricolor marking state, updated by the mark phase
pub(crate) color: Cell<GcColor>,
/// Type-erased trace function.
pub(crate) trace_fn: TraceFn,
/// Type-erased finalize and free fn
pub(crate) drop_fn: DropFn,
/// Allocation ID used to validate weak pointers.
pub(crate) alloc_id: usize,
/// The user value.
pub(crate) value: T,
}

impl<T: ?Sized> GcBox<T> {
pub(crate) const FREED_ALLOC_ID: usize = usize::MAX;
}

impl<T> GcBox<T> {
/// Create a [`GcBox`] for `value`, `color` starts as [`GcColor::White`]
pub(crate) fn new(value: T, trace_fn: TraceFn, drop_fn: DropFn, alloc_id: usize) -> Self {
Self {
color: Cell::new(GcColor::White),
trace_fn,
drop_fn,
alloc_id,
value,
}
}
}

/// type-erased trace function for a `GcBox<T>` slot.
///
/// # Safety
///
/// `ptr` must point to a live `PoolItem<GcBox<T>>` in the pool allocator
pub(crate) unsafe fn trace_value<T: Trace>(ptr: NonNull<u8>, tracer: &mut Tracer<'_>) {
let pool_item_ptr = ptr.cast::<PoolItem<GcBox<T>>>();
unsafe {
(*pool_item_ptr.as_ptr()).0.color.set(GcColor::Black);
(*pool_item_ptr.as_ptr()).0.value.trace(tracer);
}
}
Loading