Skip to content

Commit

Permalink
Refactor live and garbage obj check
Browse files Browse the repository at this point in the history
This PR moves the code for `is_live_object` into `is_garbage_object`
because they are opposite of one another. It also replaces calls to
`is_live_object` and `rb_objspace_markable_object_p` with
`!is_garbage_object` and `!rb_objspace_garbage_object_p` respectively.

This refactor reduces the surface area of the GC API.

Co-authored-by: Peter Zhu <peter@peterzhu.ca>
  • Loading branch information
eileencodes and peterzhu2118 committed Mar 26, 2024
1 parent 19752cf commit ea84c29
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 27 deletions.
6 changes: 3 additions & 3 deletions ext/objspace/objspace.c
Expand Up @@ -577,7 +577,7 @@ reachable_object_from_i(VALUE obj, void *data_ptr)
VALUE key = obj;
VALUE val = obj;

if (rb_objspace_markable_object_p(obj)) {
if (!rb_objspace_garbage_object_p(obj)) {
if (NIL_P(rb_hash_lookup(data->refs, key))) {
rb_hash_aset(data->refs, key, Qtrue);

Expand Down Expand Up @@ -643,7 +643,7 @@ collect_values(st_data_t key, st_data_t value, st_data_t data)
static VALUE
reachable_objects_from(VALUE self, VALUE obj)
{
if (rb_objspace_markable_object_p(obj)) {
if (!rb_objspace_garbage_object_p(obj)) {
struct rof_data data;

if (rb_typeddata_is_kind_of(obj, &iow_data_type)) {
Expand Down Expand Up @@ -690,7 +690,7 @@ reachable_object_from_root_i(const char *category, VALUE obj, void *ptr)
rb_hash_aset(data->categories, category_str, category_objects);
}

if (rb_objspace_markable_object_p(obj) &&
if (!rb_objspace_garbage_object_p(obj) &&
obj != data->categories &&
obj != data->last_category_objects) {
if (rb_objspace_internal_object_p(obj)) {
Expand Down
23 changes: 5 additions & 18 deletions gc.c
Expand Up @@ -4386,24 +4386,18 @@ rb_objspace_call_finalizer(rb_objspace_t *objspace)
/* garbage objects will be collected soon. */
static inline bool
is_garbage_object(rb_objspace_t *objspace, VALUE ptr)
{
return is_lazy_sweeping(objspace) && GET_HEAP_PAGE(ptr)->flags.before_sweep &&
!MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(ptr), ptr);
}

static inline bool
is_live_object(rb_objspace_t *objspace, VALUE ptr)
{
switch (BUILTIN_TYPE(ptr)) {
case T_NONE:
case T_MOVED:
case T_ZOMBIE:
return FALSE;
return TRUE;
default:
break;
}

return !is_garbage_object(objspace, ptr);
return is_lazy_sweeping(objspace) && GET_HEAP_PAGE(ptr)->flags.before_sweep &&
!MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(ptr), ptr);
}

static inline int
Expand All @@ -4412,13 +4406,6 @@ is_markable_object(VALUE obj)
return !RB_SPECIAL_CONST_P(obj);
}

int
rb_objspace_markable_object_p(VALUE obj)
{
rb_objspace_t *objspace = &rb_objspace;
return is_markable_object(obj) && is_live_object(objspace, obj);
}

int
rb_objspace_garbage_object_p(VALUE obj)
{
Expand Down Expand Up @@ -4481,7 +4468,7 @@ id2ref(VALUE objid)

VALUE orig;
if (st_lookup(objspace->id_to_obj_tbl, objid, &orig) &&
is_live_object(objspace, orig)) {
!is_garbage_object(objspace, orig)) {
if (!rb_multi_ractor_p() || rb_ractor_shareable_p(orig)) {
return orig;
}
Expand Down Expand Up @@ -7498,7 +7485,7 @@ verify_internal_consistency_i(void *page_start, void *page_end, size_t stride,
for (obj = (VALUE)page_start; obj != (VALUE)page_end; obj += stride) {
void *poisoned = asan_unpoison_object_temporary(obj);

if (is_live_object(objspace, obj)) {
if (!is_garbage_object(objspace, obj)) {
/* count objects */
data->live_object_count++;
data->parent = obj;
Expand Down
4 changes: 2 additions & 2 deletions imemo.c
Expand Up @@ -215,7 +215,7 @@ rb_cc_table_mark(VALUE klass)
static bool
moved_or_living_object_strictly_p(VALUE obj)
{
return obj && (rb_objspace_markable_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
return obj && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
}

static void
Expand Down Expand Up @@ -455,7 +455,7 @@ vm_ccs_free(struct rb_class_cc_entries *ccs, int alive, VALUE klass)
if (!alive) {
void *ptr = asan_unpoison_object_temporary((VALUE)cc);
// ccs can be free'ed.
if (rb_objspace_markable_object_p((VALUE)cc) &&
if (!rb_objspace_garbage_object_p((VALUE)cc) &&
IMEMO_TYPE_P(cc, imemo_callcache) &&
cc->klass == klass) {
// OK. maybe target cc.
Expand Down
3 changes: 1 addition & 2 deletions internal/gc.h
Expand Up @@ -213,7 +213,6 @@ void rb_gc_ractor_newobj_cache_clear(rb_ractor_newobj_cache_t *newobj_cache);
bool rb_gc_size_allocatable_p(size_t size);
size_t *rb_gc_size_pool_sizes(void);
size_t rb_gc_size_pool_id_for_size(size_t size);
int rb_objspace_garbage_object_p(VALUE obj);
bool rb_gc_is_ptr_to_obj(const void *ptr);

void rb_gc_mark_and_move(VALUE *ptr);
Expand All @@ -235,7 +234,7 @@ RUBY_SYMBOL_EXPORT_BEGIN
/* exports for objspace module */
void rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *data);
void rb_objspace_reachable_objects_from_root(void (func)(const char *category, VALUE, void *), void *data);
int rb_objspace_markable_object_p(VALUE obj);
int rb_objspace_garbage_object_p(VALUE obj);
int rb_objspace_internal_object_p(VALUE obj);

void rb_objspace_each_objects(
Expand Down
4 changes: 2 additions & 2 deletions yjit.c
Expand Up @@ -1041,7 +1041,7 @@ rb_yjit_multi_ractor_p(void)
void
rb_assert_iseq_handle(VALUE handle)
{
RUBY_ASSERT_ALWAYS(rb_objspace_markable_object_p(handle));
RUBY_ASSERT_ALWAYS(!rb_objspace_garbage_object_p(handle));
RUBY_ASSERT_ALWAYS(IMEMO_TYPE_P(handle, imemo_iseq));
}

Expand All @@ -1054,7 +1054,7 @@ rb_IMEMO_TYPE_P(VALUE imemo, enum imemo_type imemo_type)
void
rb_assert_cme_handle(VALUE handle)
{
RUBY_ASSERT_ALWAYS(rb_objspace_markable_object_p(handle));
RUBY_ASSERT_ALWAYS(!rb_objspace_garbage_object_p(handle));
RUBY_ASSERT_ALWAYS(IMEMO_TYPE_P(handle, imemo_ment));
}

Expand Down

0 comments on commit ea84c29

Please sign in to comment.