diff --git a/src/memory_manager.rs b/src/memory_manager.rs index 67135e9678..8898e57445 100644 --- a/src/memory_manager.rs +++ b/src/memory_manager.rs @@ -708,9 +708,6 @@ pub fn is_mmtk_object(addr: Address) -> bool { /// * `object`: The object reference to query. pub fn is_in_mmtk_spaces(object: ObjectReference) -> bool { use crate::mmtk::SFT_MAP; - if object.is_null() { - return false; - } SFT_MAP .get_checked(object.to_address::()) .is_in_space(object) diff --git a/src/plan/generational/gc_work.rs b/src/plan/generational/gc_work.rs index 3a01b3bcd4..421a2590df 100644 --- a/src/plan/generational/gc_work.rs +++ b/src/plan/generational/gc_work.rs @@ -36,18 +36,16 @@ impl + PlanTraceObject> ProcessEdg Self { plan, base } } fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - debug_assert!(!object.is_null()); - // We cannot borrow `self` twice in a call, so we extract `worker` as a local variable. let worker = self.worker(); self.plan .trace_object_nursery(&mut self.base.nodes, object, worker) } fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - if object.is_null() { + let Some(object) = slot.load() else { + // Skip slots that are not holding an object reference. return; - } + }; let new_object = self.trace_object(object); debug_assert!(!self.plan.is_object_in_nursery(new_object)); // Note: If `object` is a mature object, `trace_object` will not call `space.trace_object`, diff --git a/src/plan/tracing.rs b/src/plan/tracing.rs index adc2ea9462..27e021051f 100644 --- a/src/plan/tracing.rs +++ b/src/plan/tracing.rs @@ -117,7 +117,7 @@ impl<'a, E: ProcessEdgesWork> EdgeVisitor> for ObjectsClosure<'a, E> { { use crate::vm::edge_shape::Edge; trace!( - "(ObjectsClosure) Visit edge {:?} (pointing to {})", + "(ObjectsClosure) Visit edge {:?} (pointing to {:?})", slot, slot.load() ); diff --git a/src/policy/copyspace.rs b/src/policy/copyspace.rs index 8193c7c63c..8d08ec1507 100644 --- a/src/policy/copyspace.rs +++ b/src/policy/copyspace.rs @@ -217,7 +217,6 @@ impl CopySpace { worker: &mut GCWorker, ) -> ObjectReference { trace!("copyspace.trace_object(, {:?}, {:?})", object, semantics,); - debug_assert!(!object.is_null()); // If this is not from space, we do not need to trace it (the object has been copied to the tosapce) if !self.is_from_space() { diff --git a/src/policy/immix/immixspace.rs b/src/policy/immix/immixspace.rs index bf72394c01..3809f7bd24 100644 --- a/src/policy/immix/immixspace.rs +++ b/src/policy/immix/immixspace.rs @@ -184,7 +184,6 @@ impl crate::policy::gc_work::PolicyTraceObject for ImmixSpace copy: Option, worker: &mut GCWorker, ) -> ObjectReference { - debug_assert!(!object.is_null()); if KIND == TRACE_KIND_TRANSITIVE_PIN { self.trace_object_without_moving(queue, object) } else if KIND == TRACE_KIND_DEFRAG { diff --git a/src/policy/immortalspace.rs b/src/policy/immortalspace.rs index 1e4d19eefa..5eeebd58c9 100644 --- a/src/policy/immortalspace.rs +++ b/src/policy/immortalspace.rs @@ -187,7 +187,6 @@ impl ImmortalSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - debug_assert!(!object.is_null()); #[cfg(feature = "vo_bit")] debug_assert!( crate::util::metadata::vo_bit::is_vo_bit_set::(object), diff --git a/src/policy/largeobjectspace.rs b/src/policy/largeobjectspace.rs index 64a13c8f37..ec6b2f7506 100644 --- a/src/policy/largeobjectspace.rs +++ b/src/policy/largeobjectspace.rs @@ -189,7 +189,6 @@ impl LargeObjectSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - debug_assert!(!object.is_null()); #[cfg(feature = "vo_bit")] debug_assert!( crate::util::metadata::vo_bit::is_vo_bit_set::(object), diff --git a/src/policy/markcompactspace.rs b/src/policy/markcompactspace.rs index e6d332919e..e9468caa3c 100644 --- a/src/policy/markcompactspace.rs +++ b/src/policy/markcompactspace.rs @@ -37,12 +37,7 @@ impl SFT for MarkCompactSpace { } fn get_forwarded_object(&self, object: ObjectReference) -> Option { - let forwarding_pointer = Self::get_header_forwarding_pointer(object); - if forwarding_pointer.is_null() { - None - } else { - Some(forwarding_pointer) - } + Self::get_header_forwarding_pointer(object) } fn is_live(&self, object: ObjectReference) -> bool { @@ -130,7 +125,6 @@ impl crate::policy::gc_work::PolicyTraceObject for MarkCompac _copy: Option, _worker: &mut GCWorker, ) -> ObjectReference { - debug_assert!(!object.is_null()); debug_assert!( KIND != TRACE_KIND_TRANSITIVE_PIN, "MarkCompact does not support transitive pin trace." @@ -177,8 +171,9 @@ impl MarkCompactSpace { } /// Get header forwarding pointer for an object - fn get_header_forwarding_pointer(object: ObjectReference) -> ObjectReference { - unsafe { Self::header_forwarding_pointer_address(object).load::() } + fn get_header_forwarding_pointer(object: ObjectReference) -> Option { + let addr = unsafe { Self::header_forwarding_pointer_address(object).load::
() }; + ObjectReference::from_raw_address(addr) } /// Store header forwarding pointer for an object @@ -251,7 +246,9 @@ impl MarkCompactSpace { queue.enqueue(object); } - Self::get_header_forwarding_pointer(object) + Self::get_header_forwarding_pointer(object).unwrap_or_else(|| { + panic!("trace_forward_object called when an object is not forwarded, yet. object: {object}") + }) } pub fn test_and_mark(object: ObjectReference) -> bool { @@ -388,10 +385,9 @@ impl MarkCompactSpace { // clear the VO bit vo_bit::unset_vo_bit::(obj); - let forwarding_pointer = Self::get_header_forwarding_pointer(obj); - - trace!("Compact {} to {}", obj, forwarding_pointer); - if !forwarding_pointer.is_null() { + let maybe_forwarding_pointer = Self::get_header_forwarding_pointer(obj); + if let Some(forwarding_pointer) = maybe_forwarding_pointer { + trace!("Compact {} to {}", obj, forwarding_pointer); let new_object = forwarding_pointer; Self::clear_header_forwarding_pointer(new_object); @@ -403,6 +399,8 @@ impl MarkCompactSpace { vo_bit::set_vo_bit::(new_object); to = new_object.to_object_start::() + copied_size; debug_assert_eq!(end_of_new_object, to); + } else { + trace!("Skipping dead object {}", obj); } } } diff --git a/src/policy/marksweepspace/malloc_ms/global.rs b/src/policy/marksweepspace/malloc_ms/global.rs index 8d42b74f0d..03ab9a025f 100644 --- a/src/policy/marksweepspace/malloc_ms/global.rs +++ b/src/policy/marksweepspace/malloc_ms/global.rs @@ -400,8 +400,6 @@ impl MallocSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - debug_assert!(!object.is_null()); - assert!( self.in_space(object), "Cannot mark an object {} that was not alloced by malloc.", diff --git a/src/policy/marksweepspace/native_ms/block.rs b/src/policy/marksweepspace/native_ms/block.rs index 625a82d851..2cf106f66a 100644 --- a/src/policy/marksweepspace/native_ms/block.rs +++ b/src/policy/marksweepspace/native_ms/block.rs @@ -287,7 +287,8 @@ impl Block { while cell + cell_size <= self.start() + Block::BYTES { // The invariants we checked earlier ensures that we can use cell and object reference interchangably // We may not really have an object in this cell, but if we do, this object reference is correct. - let potential_object = ObjectReference::from_raw_address(cell); + // About unsafe: We know `cell` is non-zero here. + let potential_object = unsafe { ObjectReference::from_raw_address_unchecked(cell) }; if !VM::VMObjectModel::LOCAL_MARK_BIT_SPEC .is_marked::(potential_object, Ordering::SeqCst) @@ -327,9 +328,12 @@ impl Block { while cell + cell_size <= self.end() { // possible object ref - let potential_object_ref = ObjectReference::from_raw_address( - cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND, - ); + let potential_object_ref = unsafe { + // We know cursor plus an offset cannot be 0. + ObjectReference::from_raw_address_unchecked( + cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND, + ) + }; trace!( "{:?}: cell = {}, last cell in free list = {}, cursor = {}, potential object = {}", self, diff --git a/src/policy/marksweepspace/native_ms/global.rs b/src/policy/marksweepspace/native_ms/global.rs index a533fcde33..6257d8a9ca 100644 --- a/src/policy/marksweepspace/native_ms/global.rs +++ b/src/policy/marksweepspace/native_ms/global.rs @@ -241,7 +241,6 @@ impl MarkSweepSpace { queue: &mut Q, object: ObjectReference, ) -> ObjectReference { - debug_assert!(!object.is_null()); debug_assert!( self.in_space(object), "Cannot mark an object {} that was not alloced by free list allocator.", diff --git a/src/scheduler/gc_work.rs b/src/scheduler/gc_work.rs index 692873eb84..59c07c4b02 100644 --- a/src/scheduler/gc_work.rs +++ b/src/scheduler/gc_work.rs @@ -274,7 +274,6 @@ impl ObjectTracer for ProcessEdgesWorkTracer { /// Forward the `trace_object` call to the underlying `ProcessEdgesWork`, /// and flush as soon as the underlying buffer of `process_edges_work` is full. fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - debug_assert!(!object.is_null()); let result = self.process_edges_work.trace_object(object); self.flush_if_full(); result @@ -659,12 +658,11 @@ pub trait ProcessEdgesWork: /// Process an edge, including loading the object reference from the memory slot, /// trace the object and store back the new object reference if necessary. fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - if object.is_null() { + let Some(object) = slot.load() else { + // Skip slots that are not holding an object reference. return; - } + }; let new_object = self.trace_object(object); - debug_assert!(!new_object.is_null()); if Self::OVERWRITE_REFERENCE && new_object != object { slot.store(new_object); } @@ -722,8 +720,6 @@ impl ProcessEdgesWork for SFTProcessEdges { fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { use crate::policy::sft::GCWorkerMutRef; - debug_assert!(!object.is_null()); - // Erase type parameter let worker = GCWorkerMutRef::new(self.worker()); @@ -996,7 +992,6 @@ impl + Plan, const KIND: TraceKin } fn trace_object(&mut self, object: ObjectReference) -> ObjectReference { - debug_assert!(!object.is_null()); // We cannot borrow `self` twice in a call, so we extract `worker` as a local variable. let worker = self.worker(); self.plan @@ -1004,12 +999,11 @@ impl + Plan, const KIND: TraceKin } fn process_edge(&mut self, slot: EdgeOf) { - let object = slot.load(); - if object.is_null() { + let Some(object) = slot.load() else { + // Skip slots that are not holding an object reference. return; - } + }; let new_object = self.trace_object(object); - debug_assert!(!new_object.is_null()); if P::may_move_objects::() && new_object != object { slot.store(new_object); } diff --git a/src/util/address.rs b/src/util/address.rs index 499d8c00fa..f5ad843ee8 100644 --- a/src/util/address.rs +++ b/src/util/address.rs @@ -3,6 +3,7 @@ use bytemuck::NoUninit; use std::fmt; use std::mem; +use std::num::NonZeroUsize; use std::ops::*; use std::sync::atomic::Ordering; @@ -479,28 +480,33 @@ use crate::vm::VMBinding; /// the opaque `ObjectReference` type, and we haven't seen a use case for now. #[repr(transparent)] #[derive(Copy, Clone, Eq, Hash, PartialOrd, Ord, PartialEq, NoUninit)] -pub struct ObjectReference(usize); +pub struct ObjectReference(NonZeroUsize); impl ObjectReference { - /// The null object reference, represented as zero. - pub const NULL: ObjectReference = ObjectReference(0); - /// Cast the object reference to its raw address. This method is mostly for the convinience of a binding. /// /// MMTk should not make any assumption on the actual location of the address with the object reference. /// MMTk should not assume the address returned by this method is in our allocation. For the purposes of /// setting object metadata, MMTk should use [`crate::vm::ObjectModel::ref_to_address()`] or [`crate::vm::ObjectModel::ref_to_header()`]. pub fn to_raw_address(self) -> Address { - Address(self.0) + Address(self.0.get()) } /// Cast a raw address to an object reference. This method is mostly for the convinience of a binding. /// This is how a binding creates `ObjectReference` instances. /// + /// If `addr` is 0, the result is `None`. + /// /// MMTk should not assume an arbitrary address can be turned into an object reference. MMTk can use [`crate::vm::ObjectModel::address_to_ref()`] /// to turn addresses that are from [`crate::vm::ObjectModel::ref_to_address()`] back to object. - pub fn from_raw_address(addr: Address) -> ObjectReference { - ObjectReference(addr.0) + pub fn from_raw_address(addr: Address) -> Option { + NonZeroUsize::new(addr.0).map(ObjectReference) + } + + /// Like `from_raw_address`, but assume `addr` is not zero. + pub unsafe fn from_raw_address_unchecked(addr: Address) -> ObjectReference { + debug_assert!(!addr.is_zero()); + ObjectReference(NonZeroUsize::new_unchecked(addr.0)) } /// Get the in-heap address from an object reference. This method is used by MMTk to get an in-heap address @@ -541,54 +547,41 @@ impl ObjectReference { obj } - /// is this object reference null reference? - pub fn is_null(self) -> bool { - self.0 == 0 - } - /// returns the ObjectReference pub fn value(self) -> usize { - self.0 + self.0.get() } /// Is the object reachable, determined by the policy? /// Note: Objects in ImmortalSpace may have `is_live = true` but are actually unreachable. pub fn is_reachable(self) -> bool { - if self.is_null() { - false - } else { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_reachable(self) - } + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_reachable(self) } /// Is the object live, determined by the policy? pub fn is_live(self) -> bool { - if self.0 == 0 { - false - } else { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_live(self) - } + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_live(self) } /// Can the object be moved? pub fn is_movable(self) -> bool { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_movable() + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_movable() } /// Get forwarding pointer if the object is forwarded. pub fn get_forwarded_object(self) -> Option { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.get_forwarded_object(self) + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.get_forwarded_object(self) } /// Is the object in any MMTk spaces? pub fn is_in_any_space(self) -> bool { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_in_space(self) + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_in_space(self) } /// Is the object sane? #[cfg(feature = "sanity")] pub fn is_sane(self) -> bool { - unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_sane() + unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_sane() } } diff --git a/src/util/metadata/vo_bit/mod.rs b/src/util/metadata/vo_bit/mod.rs index 0382d933d0..efa247b9b2 100644 --- a/src/util/metadata/vo_bit/mod.rs +++ b/src/util/metadata/vo_bit/mod.rs @@ -100,7 +100,10 @@ pub fn is_vo_bit_set(object: ObjectReference) -> bool { /// Check if an address can be turned directly into an object reference using the VO bit. /// If so, return `Some(object)`. Otherwise return `None`. pub fn is_vo_bit_set_for_addr(address: Address) -> Option { - let potential_object = ObjectReference::from_raw_address(address); + let Some(potential_object) = ObjectReference::from_raw_address(address) else { + return None; + }; + let addr = potential_object.to_address::(); // If we haven't mapped VO bit for the address, it cannot be an object @@ -123,7 +126,10 @@ pub fn is_vo_bit_set_for_addr(address: Address) -> Option(address: Address) -> Option { - let potential_object = ObjectReference::from_raw_address(address); + let Some(potential_object) = ObjectReference::from_raw_address(address) else { + return None; + }; + let addr = potential_object.to_address::(); // If we haven't mapped VO bit for the address, it cannot be an object diff --git a/src/util/object_forwarding.rs b/src/util/object_forwarding.rs index 01b3fec447..85c2aaaad5 100644 --- a/src/util/object_forwarding.rs +++ b/src/util/object_forwarding.rs @@ -150,7 +150,9 @@ pub fn read_forwarding_pointer(object: ObjectReference) -> Object // We write the forwarding poiner. We know it is an object reference. unsafe { - ObjectReference::from_raw_address(crate::util::Address::from_usize( + // We use "unchecked" convertion becasue we guarantee the forwarding pointer we stored + // previously is from a valid `ObjectReference` which is never zero. + ObjectReference::from_raw_address_unchecked(crate::util::Address::from_usize( VM::VMObjectModel::LOCAL_FORWARDING_POINTER_SPEC.load_atomic::( object, Some(FORWARDING_POINTER_MASK), diff --git a/src/util/reference_processor.rs b/src/util/reference_processor.rs index 718235ff67..3489d8cab1 100644 --- a/src/util/reference_processor.rs +++ b/src/util/reference_processor.rs @@ -216,7 +216,6 @@ impl ReferenceProcessor { e: &mut E, referent: ObjectReference, ) -> ObjectReference { - debug_assert!(!referent.is_null()); e.trace_object(referent) } @@ -224,7 +223,6 @@ impl ReferenceProcessor { e: &mut E, object: ObjectReference, ) -> ObjectReference { - debug_assert!(!object.is_null()); e.trace_object(object) } @@ -232,7 +230,6 @@ impl ReferenceProcessor { e: &mut E, referent: ObjectReference, ) -> ObjectReference { - debug_assert!(!referent.is_null()); e.trace_object(referent) } @@ -245,10 +242,9 @@ impl ReferenceProcessor { { // For references in the table, the reference needs to be valid, and if the referent is not null, it should be valid as well sync.references.iter().for_each(|reff| { - debug_assert!(!reff.is_null()); debug_assert!(reff.is_in_any_space()); - let referent = VM::VMReferenceGlue::get_referent(*reff); - if !VM::VMReferenceGlue::is_referent_cleared(referent) { + let maybe_referent = VM::VMReferenceGlue::get_referent(*reff); + if let Some(referent) = maybe_referent { debug_assert!( referent.is_in_any_space(), "Referent {:?} (of reference {:?}) is not in any space", @@ -259,10 +255,9 @@ impl ReferenceProcessor { }); // For references that will be enqueue'd, the referent needs to be valid, and the referent needs to be null. sync.enqueued_references.iter().for_each(|reff| { - debug_assert!(!reff.is_null()); debug_assert!(reff.is_in_any_space()); - let referent = VM::VMReferenceGlue::get_referent(*reff); - debug_assert!(VM::VMReferenceGlue::is_referent_cleared(referent)); + let maybe_referent = VM::VMReferenceGlue::get_referent(*reff); + debug_assert!(maybe_referent.is_none()); }); } @@ -287,7 +282,6 @@ impl ReferenceProcessor { trace: &mut E, reference: ObjectReference, ) -> ObjectReference { - let old_referent = ::VMReferenceGlue::get_referent(reference); { use crate::vm::ObjectModel; trace!( @@ -297,7 +291,8 @@ impl ReferenceProcessor { ); } - if !::VMReferenceGlue::is_referent_cleared(old_referent) { + let maybe_old_referent = ::VMReferenceGlue::get_referent(reference); + if let Some(old_referent) = maybe_old_referent { let new_referent = ReferenceProcessor::get_forwarded_referent(trace, old_referent); ::VMReferenceGlue::set_referent(reference, new_referent); @@ -311,11 +306,6 @@ impl ReferenceProcessor { let new_reference = ReferenceProcessor::get_forwarded_reference(trace, reference); trace!(" reference: forwarded to {}", new_reference); - debug_assert!( - !new_reference.is_null(), - "reference {:?}'s forwarding pointer is NULL", - reference - ); new_reference } @@ -393,8 +383,6 @@ impl ReferenceProcessor { ); for reference in sync.references.iter() { - debug_assert!(!reference.is_null()); - trace!("Processing reference: {:?}", reference); if !reference.is_live() { @@ -404,11 +392,11 @@ impl ReferenceProcessor { } // Reference is definitely reachable. Retain the referent. - let referent = ::VMReferenceGlue::get_referent(*reference); - if !::VMReferenceGlue::is_referent_cleared(referent) { + let maybe_referent = ::VMReferenceGlue::get_referent(*reference); + if let Some(referent) = maybe_referent { Self::keep_referent_alive(trace, referent); + trace!(" ~> {:?} (retained)", referent); } - trace!(" ~> {:?} (retained)", referent); } debug!("Ending ReferenceProcessor.retain({:?})", self.semantics); @@ -427,8 +415,6 @@ impl ReferenceProcessor { reference: ObjectReference, enqueued_references: &mut Vec, ) -> Option { - debug_assert!(!reference.is_null()); - trace!("Process reference: {}", reference); // If the reference is dead, we're done with it. Let it (and @@ -440,21 +426,22 @@ impl ReferenceProcessor { return None; } - // The reference object is live + // The reference object is live. let new_reference = Self::get_forwarded_reference(trace, reference); - let old_referent = ::VMReferenceGlue::get_referent(reference); - trace!(" ~> {}", old_referent); + trace!(" => {}", new_reference); + + // Get the old referent. + let maybe_old_referent = ::VMReferenceGlue::get_referent(new_reference); + trace!(" ~> {:?}", maybe_old_referent); // If the application has cleared the referent the Java spec says // this does not cause the Reference object to be enqueued. We // simply allow the Reference object to fall out of our // waiting list. - if ::VMReferenceGlue::is_referent_cleared(old_referent) { + let Some(old_referent) = maybe_old_referent else { trace!(" (cleared referent) "); return None; - } - - trace!(" => {}", new_reference); + }; if old_referent.is_live() { // Referent is still reachable in a way that is as strong as diff --git a/src/vm/edge_shape.rs b/src/vm/edge_shape.rs index 45e7141c25..ffd24c0c0a 100644 --- a/src/vm/edge_shape.rs +++ b/src/vm/edge_shape.rs @@ -49,11 +49,11 @@ pub trait Edge: Copy + Send + Debug + PartialEq + Eq + Hash { /// /// If the slot is not holding an object reference (For example, if it is holding NULL or a /// tagged non-reference value. See trait-level doc comment.), this method should return - /// `ObjectReference::NULL`. + /// `None`. /// /// If the slot holds an object reference with tag bits, the returned value shall be the object /// reference with the tag bits removed. - fn load(&self) -> ObjectReference; + fn load(&self) -> Option; /// Store the object reference `object` into the slot. /// @@ -83,12 +83,14 @@ pub trait Edge: Copy + Send + Debug + PartialEq + Eq + Hash { } } -/// A simple edge implementation that represents a word-sized slot where an ObjectReference value -/// is stored as is. It is the default edge type, and should be suitable for most VMs. +/// A simple edge implementation that represents a word-sized slot which holds the raw address of +/// an `ObjectReference`, or 0 if it is holding a null reference. +/// +/// It is the default edge type, and should be suitable for most VMs. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[repr(transparent)] pub struct SimpleEdge { - slot_addr: *mut Atomic, + slot_addr: *mut Atomic
, } impl SimpleEdge { @@ -113,12 +115,13 @@ impl SimpleEdge { unsafe impl Send for SimpleEdge {} impl Edge for SimpleEdge { - fn load(&self) -> ObjectReference { - unsafe { (*self.slot_addr).load(atomic::Ordering::Relaxed) } + fn load(&self) -> Option { + let addr = unsafe { (*self.slot_addr).load(atomic::Ordering::Relaxed) }; + ObjectReference::from_raw_address(addr) } fn store(&self, object: ObjectReference) { - unsafe { (*self.slot_addr).store(object, atomic::Ordering::Relaxed) } + unsafe { (*self.slot_addr).store(object.to_raw_address(), atomic::Ordering::Relaxed) } } } @@ -133,8 +136,9 @@ impl Edge for SimpleEdge { /// simply as an `ObjectReference`. The intention and the semantics are clearer with /// `SimpleEdge`. impl Edge for Address { - fn load(&self) -> ObjectReference { - unsafe { Address::load(*self) } + fn load(&self) -> Option { + let addr = unsafe { Address::load(*self) }; + ObjectReference::from_raw_address(addr) } fn store(&self, object: ObjectReference) { diff --git a/src/vm/reference_glue.rs b/src/vm/reference_glue.rs index 3b1ef98852..1b27b97ade 100644 --- a/src/vm/reference_glue.rs +++ b/src/vm/reference_glue.rs @@ -23,15 +23,14 @@ pub trait ReferenceGlue { /// /// Arguments: /// * `new_reference`: The reference whose referent is to be cleared. - fn clear_referent(new_reference: ObjectReference) { - Self::set_referent(new_reference, ObjectReference::NULL); - } + fn clear_referent(new_reference: ObjectReference); /// Get the referent from a weak reference object. /// /// Arguments: - /// * `object`: The object reference. - fn get_referent(object: ObjectReference) -> ObjectReference; + /// * `object`: Reference to the referent. `None`` if the object currently does not point to a + /// referent. This may happen if the reference has been cleared. + fn get_referent(object: ObjectReference) -> Option; /// Set the referent in a weak reference object. /// @@ -40,14 +39,6 @@ pub trait ReferenceGlue { /// * `referent`: The referent object reference. fn set_referent(reff: ObjectReference, referent: ObjectReference); - /// Check if the referent has been cleared. - /// - /// Arguments: - /// * `referent`: The referent object reference. - fn is_referent_cleared(referent: ObjectReference) -> bool { - referent.is_null() - } - /// For weak reference types, if the referent is cleared during GC, the reference /// will be added to a queue, and MMTk will call this method to inform /// the VM about the changes for those references. This method is used diff --git a/src/vm/scanning.rs b/src/vm/scanning.rs index 21768f4337..5e1a866c06 100644 --- a/src/vm/scanning.rs +++ b/src/vm/scanning.rs @@ -16,7 +16,7 @@ impl EdgeVisitor for F { fn visit_edge(&mut self, edge: ES) { #[cfg(debug_assertions)] trace!( - "(FunctionClosure) Visit edge {:?} (pointing to {})", + "(FunctionClosure) Visit edge {:?} (pointing to {:?})", edge, edge.load() );