Skip to content

Commit

Permalink
Make ObjectReferenct non-nullable
Browse files Browse the repository at this point in the history
  • Loading branch information
wks committed Jan 8, 2024
1 parent e7143b9 commit 0d1c7d3
Show file tree
Hide file tree
Showing 19 changed files with 97 additions and 130 deletions.
3 changes: 0 additions & 3 deletions src/memory_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -708,9 +708,6 @@ pub fn is_mmtk_object(addr: Address) -> bool {
/// * `object`: The object reference to query.
pub fn is_in_mmtk_spaces<VM: VMBinding>(object: ObjectReference) -> bool {
use crate::mmtk::SFT_MAP;
if object.is_null() {
return false;
}
SFT_MAP
.get_checked(object.to_address::<VM>())
.is_in_space(object)
Expand Down
8 changes: 3 additions & 5 deletions src/plan/generational/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,16 @@ impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>> ProcessEdg
Self { plan, base }
}
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());

// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
let worker = self.worker();
self.plan
.trace_object_nursery(&mut self.base.nodes, object, worker)
}
fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!self.plan.is_object_in_nursery(new_object));
// Note: If `object` is a mature object, `trace_object` will not call `space.trace_object`,
Expand Down
2 changes: 1 addition & 1 deletion src/plan/tracing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl<'a, E: ProcessEdgesWork> EdgeVisitor<EdgeOf<E>> for ObjectsClosure<'a, E> {
{
use crate::vm::edge_shape::Edge;
trace!(
"(ObjectsClosure) Visit edge {:?} (pointing to {})",
"(ObjectsClosure) Visit edge {:?} (pointing to {:?})",
slot,
slot.load()
);
Expand Down
1 change: 0 additions & 1 deletion src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ impl<VM: VMBinding> CopySpace<VM> {
worker: &mut GCWorker<VM>,
) -> ObjectReference {
trace!("copyspace.trace_object(, {:?}, {:?})", object, semantics,);
debug_assert!(!object.is_null());

// If this is not from space, we do not need to trace it (the object has been copied to the tosapce)
if !self.is_from_space() {
Expand Down
1 change: 0 additions & 1 deletion src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for ImmixSpace
copy: Option<CopySemantics>,
worker: &mut GCWorker<VM>,
) -> ObjectReference {
debug_assert!(!object.is_null());
if KIND == TRACE_KIND_TRANSITIVE_PIN {
self.trace_object_without_moving(queue, object)
} else if KIND == TRACE_KIND_DEFRAG {
Expand Down
1 change: 0 additions & 1 deletion src/policy/immortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
Expand Down
1 change: 0 additions & 1 deletion src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
#[cfg(feature = "vo_bit")]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),
Expand Down
26 changes: 12 additions & 14 deletions src/policy/markcompactspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,7 @@ impl<VM: VMBinding> SFT for MarkCompactSpace<VM> {
}

fn get_forwarded_object(&self, object: ObjectReference) -> Option<ObjectReference> {
let forwarding_pointer = Self::get_header_forwarding_pointer(object);
if forwarding_pointer.is_null() {
None
} else {
Some(forwarding_pointer)
}
Self::get_header_forwarding_pointer(object)
}

fn is_live(&self, object: ObjectReference) -> bool {
Expand Down Expand Up @@ -130,7 +125,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for MarkCompac
_copy: Option<CopySemantics>,
_worker: &mut GCWorker<VM>,
) -> ObjectReference {
debug_assert!(!object.is_null());
debug_assert!(
KIND != TRACE_KIND_TRANSITIVE_PIN,
"MarkCompact does not support transitive pin trace."
Expand Down Expand Up @@ -177,8 +171,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
}

/// Get header forwarding pointer for an object
fn get_header_forwarding_pointer(object: ObjectReference) -> ObjectReference {
unsafe { Self::header_forwarding_pointer_address(object).load::<ObjectReference>() }
fn get_header_forwarding_pointer(object: ObjectReference) -> Option<ObjectReference> {
let addr = unsafe { Self::header_forwarding_pointer_address(object).load::<Address>() };
ObjectReference::from_raw_address(addr)
}

/// Store header forwarding pointer for an object
Expand Down Expand Up @@ -251,7 +246,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
queue.enqueue(object);
}

Self::get_header_forwarding_pointer(object)
Self::get_header_forwarding_pointer(object).unwrap_or_else(|| {
panic!("trace_forward_object called when an object is not forwarded, yet. object: {object}")
})
}

pub fn test_and_mark(object: ObjectReference) -> bool {
Expand Down Expand Up @@ -388,10 +385,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
// clear the VO bit
vo_bit::unset_vo_bit::<VM>(obj);

let forwarding_pointer = Self::get_header_forwarding_pointer(obj);

trace!("Compact {} to {}", obj, forwarding_pointer);
if !forwarding_pointer.is_null() {
let maybe_forwarding_pointer = Self::get_header_forwarding_pointer(obj);
if let Some(forwarding_pointer) = maybe_forwarding_pointer {
trace!("Compact {} to {}", obj, forwarding_pointer);
let new_object = forwarding_pointer;
Self::clear_header_forwarding_pointer(new_object);

Expand All @@ -403,6 +399,8 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
vo_bit::set_vo_bit::<VM>(new_object);
to = new_object.to_object_start::<VM>() + copied_size;
debug_assert_eq!(end_of_new_object, to);
} else {
trace!("Skipping dead object {}", obj);
}
}
}
Expand Down
2 changes: 0 additions & 2 deletions src/policy/marksweepspace/malloc_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,8 +400,6 @@ impl<VM: VMBinding> MallocSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());

assert!(
self.in_space(object),
"Cannot mark an object {} that was not alloced by malloc.",
Expand Down
12 changes: 8 additions & 4 deletions src/policy/marksweepspace/native_ms/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,8 @@ impl Block {
while cell + cell_size <= self.start() + Block::BYTES {
// The invariants we checked earlier ensures that we can use cell and object reference interchangably
// We may not really have an object in this cell, but if we do, this object reference is correct.
let potential_object = ObjectReference::from_raw_address(cell);
// About unsafe: We know `cell` is non-zero here.
let potential_object = unsafe { ObjectReference::from_raw_address_unchecked(cell) };

if !VM::VMObjectModel::LOCAL_MARK_BIT_SPEC
.is_marked::<VM>(potential_object, Ordering::SeqCst)
Expand Down Expand Up @@ -327,9 +328,12 @@ impl Block {

while cell + cell_size <= self.end() {
// possible object ref
let potential_object_ref = ObjectReference::from_raw_address(
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
);
let potential_object_ref = unsafe {
// We know cursor plus an offset cannot be 0.
ObjectReference::from_raw_address_unchecked(
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
)
};
trace!(
"{:?}: cell = {}, last cell in free list = {}, cursor = {}, potential object = {}",
self,
Expand Down
1 change: 0 additions & 1 deletion src/policy/marksweepspace/native_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ impl<VM: VMBinding> MarkSweepSpace<VM> {
queue: &mut Q,
object: ObjectReference,
) -> ObjectReference {
debug_assert!(!object.is_null());
debug_assert!(
self.in_space(object),
"Cannot mark an object {} that was not alloced by free list allocator.",
Expand Down
18 changes: 6 additions & 12 deletions src/scheduler/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,6 @@ impl<E: ProcessEdgesWork> ObjectTracer for ProcessEdgesWorkTracer<E> {
/// Forward the `trace_object` call to the underlying `ProcessEdgesWork`,
/// and flush as soon as the underlying buffer of `process_edges_work` is full.
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());
let result = self.process_edges_work.trace_object(object);
self.flush_if_full();
result
Expand Down Expand Up @@ -659,12 +658,11 @@ pub trait ProcessEdgesWork:
/// Process an edge, including loading the object reference from the memory slot,
/// trace the object and store back the new object reference if necessary.
fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!new_object.is_null());
if Self::OVERWRITE_REFERENCE && new_object != object {
slot.store(new_object);
}
Expand Down Expand Up @@ -722,8 +720,6 @@ impl<VM: VMBinding> ProcessEdgesWork for SFTProcessEdges<VM> {
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
use crate::policy::sft::GCWorkerMutRef;

debug_assert!(!object.is_null());

// Erase <VM> type parameter
let worker = GCWorkerMutRef::new(self.worker());

Expand Down Expand Up @@ -996,20 +992,18 @@ impl<VM: VMBinding, P: PlanTraceObject<VM> + Plan<VM = VM>, const KIND: TraceKin
}

fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
debug_assert!(!object.is_null());
// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
let worker = self.worker();
self.plan
.trace_object::<VectorObjectQueue, KIND>(&mut self.base.nodes, object, worker)
}

fn process_edge(&mut self, slot: EdgeOf<Self>) {
let object = slot.load();
if object.is_null() {
let Some(object) = slot.load() else {
// Skip slots that are not holding an object reference.
return;
}
};
let new_object = self.trace_object(object);
debug_assert!(!new_object.is_null());
if P::may_move_objects::<KIND>() && new_object != object {
slot.store(new_object);
}
Expand Down
47 changes: 20 additions & 27 deletions src/util/address.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use bytemuck::NoUninit;

use std::fmt;
use std::mem;
use std::num::NonZeroUsize;
use std::ops::*;
use std::sync::atomic::Ordering;

Expand Down Expand Up @@ -479,28 +480,33 @@ use crate::vm::VMBinding;
/// the opaque `ObjectReference` type, and we haven't seen a use case for now.
#[repr(transparent)]
#[derive(Copy, Clone, Eq, Hash, PartialOrd, Ord, PartialEq, NoUninit)]
pub struct ObjectReference(usize);
pub struct ObjectReference(NonZeroUsize);

impl ObjectReference {
/// The null object reference, represented as zero.
pub const NULL: ObjectReference = ObjectReference(0);

/// Cast the object reference to its raw address. This method is mostly for the convinience of a binding.
///
/// MMTk should not make any assumption on the actual location of the address with the object reference.
/// MMTk should not assume the address returned by this method is in our allocation. For the purposes of
/// setting object metadata, MMTk should use [`crate::vm::ObjectModel::ref_to_address()`] or [`crate::vm::ObjectModel::ref_to_header()`].
pub fn to_raw_address(self) -> Address {
Address(self.0)
Address(self.0.get())
}

/// Cast a raw address to an object reference. This method is mostly for the convinience of a binding.
/// This is how a binding creates `ObjectReference` instances.
///
/// If `addr` is 0, the result is `None`.
///
/// MMTk should not assume an arbitrary address can be turned into an object reference. MMTk can use [`crate::vm::ObjectModel::address_to_ref()`]
/// to turn addresses that are from [`crate::vm::ObjectModel::ref_to_address()`] back to object.
pub fn from_raw_address(addr: Address) -> ObjectReference {
ObjectReference(addr.0)
pub fn from_raw_address(addr: Address) -> Option<ObjectReference> {
NonZeroUsize::new(addr.0).map(ObjectReference)
}

/// Like `from_raw_address`, but assume `addr` is not zero.
pub unsafe fn from_raw_address_unchecked(addr: Address) -> ObjectReference {
debug_assert!(!addr.is_zero());
ObjectReference(NonZeroUsize::new_unchecked(addr.0))
}

/// Get the in-heap address from an object reference. This method is used by MMTk to get an in-heap address
Expand Down Expand Up @@ -541,54 +547,41 @@ impl ObjectReference {
obj
}

/// is this object reference null reference?
pub fn is_null(self) -> bool {
self.0 == 0
}

/// returns the ObjectReference
pub fn value(self) -> usize {
self.0
self.0.get()
}

/// Is the object reachable, determined by the policy?
/// Note: Objects in ImmortalSpace may have `is_live = true` but are actually unreachable.
pub fn is_reachable(self) -> bool {
if self.is_null() {
false
} else {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_reachable(self)
}
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_reachable(self)
}

/// Is the object live, determined by the policy?
pub fn is_live(self) -> bool {
if self.0 == 0 {
false
} else {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_live(self)
}
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_live(self)
}

/// Can the object be moved?
pub fn is_movable(self) -> bool {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_movable()
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_movable()
}

/// Get forwarding pointer if the object is forwarded.
pub fn get_forwarded_object(self) -> Option<Self> {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.get_forwarded_object(self)
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.get_forwarded_object(self)
}

/// Is the object in any MMTk spaces?
pub fn is_in_any_space(self) -> bool {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_in_space(self)
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_in_space(self)
}

/// Is the object sane?
#[cfg(feature = "sanity")]
pub fn is_sane(self) -> bool {
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_sane()
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_sane()
}
}

Expand Down
10 changes: 8 additions & 2 deletions src/util/metadata/vo_bit/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,10 @@ pub fn is_vo_bit_set<VM: VMBinding>(object: ObjectReference) -> bool {
/// Check if an address can be turned directly into an object reference using the VO bit.
/// If so, return `Some(object)`. Otherwise return `None`.
pub fn is_vo_bit_set_for_addr<VM: VMBinding>(address: Address) -> Option<ObjectReference> {
let potential_object = ObjectReference::from_raw_address(address);
let Some(potential_object) = ObjectReference::from_raw_address(address) else {
return None;
};

let addr = potential_object.to_address::<VM>();

// If we haven't mapped VO bit for the address, it cannot be an object
Expand All @@ -123,7 +126,10 @@ pub fn is_vo_bit_set_for_addr<VM: VMBinding>(address: Address) -> Option<ObjectR
///
/// This is unsafe: check the comment on `side_metadata::load`
pub unsafe fn is_vo_bit_set_unsafe<VM: VMBinding>(address: Address) -> Option<ObjectReference> {
let potential_object = ObjectReference::from_raw_address(address);
let Some(potential_object) = ObjectReference::from_raw_address(address) else {
return None;
};

let addr = potential_object.to_address::<VM>();

// If we haven't mapped VO bit for the address, it cannot be an object
Expand Down
4 changes: 3 additions & 1 deletion src/util/object_forwarding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ pub fn read_forwarding_pointer<VM: VMBinding>(object: ObjectReference) -> Object

// We write the forwarding poiner. We know it is an object reference.
unsafe {
ObjectReference::from_raw_address(crate::util::Address::from_usize(
// We use "unchecked" convertion becasue we guarantee the forwarding pointer we stored
// previously is from a valid `ObjectReference` which is never zero.
ObjectReference::from_raw_address_unchecked(crate::util::Address::from_usize(
VM::VMObjectModel::LOCAL_FORWARDING_POINTER_SPEC.load_atomic::<VM, usize>(
object,
Some(FORWARDING_POINTER_MASK),
Expand Down
Loading

0 comments on commit 0d1c7d3

Please sign in to comment.