Skip to content
Permalink
Browse files
8273599: Remove cross_threshold method usage around GC
Reviewed-by: sjohanss, ayang
  • Loading branch information
Thomas Schatzl committed Sep 15, 2021
1 parent 02af541 commit 92c30c941be09cd43ca794b180b8a1b6f7f952e1
@@ -405,9 +405,8 @@ void G1BlockOffsetTablePart::zero_bottom_entry_raw() {
_bot->set_offset_array_raw(bottom_index, 0);
}

HeapWord* G1BlockOffsetTablePart::initialize_threshold() {
void G1BlockOffsetTablePart::initialize_threshold() {
_next_offset_threshold = _hr->bottom() + BOTConstants::N_words;
return _next_offset_threshold;
}

void G1BlockOffsetTablePart::set_for_starts_humongous(HeapWord* obj_top, size_t fill_size) {
@@ -193,7 +193,7 @@ class G1BlockOffsetTablePart {

// Initialize the threshold to reflect the first boundary after the
// bottom of the covered region.
HeapWord* initialize_threshold();
void initialize_threshold();

void reset_bot() {
zero_bottom_entry_raw();
@@ -154,7 +154,7 @@ class RemoveSelfForwardPtrObjClosure: public ObjectClosure {

HeapWord* obj_end = obj_addr + obj_size;
_last_forwarded_object_end = obj_end;
_hr->cross_threshold(obj_addr, obj_end);
_hr->alloc_block_in_bot(obj_addr, obj_end);
}
}

@@ -171,13 +171,13 @@ class RemoveSelfForwardPtrObjClosure: public ObjectClosure {
CollectedHeap::fill_with_objects(start, gap_size);

HeapWord* end_first_obj = start + cast_to_oop(start)->size();
_hr->cross_threshold(start, end_first_obj);
_hr->alloc_block_in_bot(start, end_first_obj);
// Fill_with_objects() may have created multiple (i.e. two)
// objects, as the max_fill_size() is half a region.
// After updating the BOT for the first object, also update the
// BOT for the second object to make the BOT complete.
if (end_first_obj != end) {
_hr->cross_threshold(end_first_obj, end);
_hr->alloc_block_in_bot(end_first_obj, end);
#ifdef ASSERT
size_t size_second_obj = cast_to_oop(end_first_obj)->size();
HeapWord* end_of_second_obj = end_first_obj + size_second_obj;
@@ -30,7 +30,6 @@

G1FullGCCompactionPoint::G1FullGCCompactionPoint() :
_current_region(NULL),
_threshold(NULL),
_compaction_top(NULL) {
_compaction_regions = new (ResourceObj::C_HEAP, mtGC) GrowableArray<HeapRegion*>(32, mtGC);
_compaction_region_iterator = _compaction_regions->begin();
@@ -49,7 +48,7 @@ void G1FullGCCompactionPoint::update() {
void G1FullGCCompactionPoint::initialize_values(bool init_threshold) {
_compaction_top = _current_region->compaction_top();
if (init_threshold) {
_threshold = _current_region->initialize_threshold();
_current_region->initialize_bot_threshold();
}
}

@@ -123,9 +122,7 @@ void G1FullGCCompactionPoint::forward(oop object, size_t size) {

// Update compaction values.
_compaction_top += size;
if (_compaction_top > _threshold) {
_threshold = _current_region->cross_threshold(_compaction_top - size, _compaction_top);
}
_current_region->alloc_block_in_bot(_compaction_top - size, _compaction_top);
}

void G1FullGCCompactionPoint::add(HeapRegion* hr) {
@@ -33,7 +33,6 @@ class HeapRegion;

class G1FullGCCompactionPoint : public CHeapObj<mtGC> {
HeapRegion* _current_region;
HeapWord* _threshold;
HeapWord* _compaction_top;
GrowableArray<HeapRegion*>* _compaction_regions;
GrowableArrayIterator<HeapRegion*> _compaction_region_iterator;
@@ -799,13 +799,12 @@ void HeapRegion::mangle_unused_area() {
}
#endif

HeapWord* HeapRegion::initialize_threshold() {
return _bot_part.initialize_threshold();
void HeapRegion::initialize_bot_threshold() {
_bot_part.initialize_threshold();
}

HeapWord* HeapRegion::cross_threshold(HeapWord* start, HeapWord* end) {
void HeapRegion::alloc_block_in_bot(HeapWord* start, HeapWord* end) {
_bot_part.alloc_block(start, end);
return _bot_part.threshold();
}

void HeapRegion::object_iterate(ObjectClosure* blk) {
@@ -167,8 +167,8 @@ class HeapRegion : public CHeapObj<mtGC> {

// Full GC support methods.

HeapWord* initialize_threshold();
HeapWord* cross_threshold(HeapWord* start, HeapWord* end);
void initialize_bot_threshold();
void alloc_block_in_bot(HeapWord* start, HeapWord* end);

// Update heap region that has been compacted to be consistent after Full GC.
void reset_compacted_after_full_gc();
@@ -474,12 +474,11 @@ void BlockOffsetArrayContigSpace::alloc_block_work(HeapWord* blk_start,
#endif
}

HeapWord* BlockOffsetArrayContigSpace::initialize_threshold() {
void BlockOffsetArrayContigSpace::initialize_threshold() {
_next_offset_index = _array->index_for(_bottom);
_next_offset_index++;
_next_offset_threshold =
_array->address_for_index(_next_offset_index);
return _next_offset_threshold;
}

void BlockOffsetArrayContigSpace::zero_bottom_entry() {
@@ -406,9 +406,9 @@ class BlockOffsetArrayContigSpace: public BlockOffsetArray {
void set_contig_space(ContiguousSpace* sp) { set_space((Space*)sp); }

// Initialize the threshold for an empty heap.
HeapWord* initialize_threshold();
void initialize_threshold();
// Zero out the entry for _bottom (offset will be zero)
void zero_bottom_entry();
void zero_bottom_entry();

// Return the next threshold, the point at which the table should be
// updated.
@@ -364,7 +364,7 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size,
}
compact_top = cp->space->bottom();
cp->space->set_compaction_top(compact_top);
cp->threshold = cp->space->initialize_threshold();
cp->space->initialize_threshold();
compaction_max_size = pointer_delta(cp->space->end(), compact_top);
}

@@ -381,12 +381,10 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size,

compact_top += size;

// we need to update the offset table so that the beginnings of objects can be
// We need to update the offset table so that the beginnings of objects can be
// found during scavenge. Note that we are updating the offset table based on
// where the object will be once the compaction phase finishes.
if (compact_top > cp->threshold)
cp->threshold =
cp->space->cross_threshold(compact_top - size, compact_top);
cp->space->alloc_block(compact_top - size, compact_top);
return compact_top;
}

@@ -402,10 +400,9 @@ void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {

if (cp->space == NULL) {
assert(cp->gen != NULL, "need a generation");
assert(cp->threshold == NULL, "just checking");
assert(cp->gen->first_compaction_space() == this, "just checking");
cp->space = cp->gen->first_compaction_space();
cp->threshold = cp->space->initialize_threshold();
cp->space->initialize_threshold();
cp->space->set_compaction_top(cp->space->bottom());
}

@@ -765,13 +762,12 @@ void ContiguousSpace::allocate_temporary_filler(int factor) {
}
}

HeapWord* OffsetTableContigSpace::initialize_threshold() {
return _offsets.initialize_threshold();
void OffsetTableContigSpace::initialize_threshold() {
_offsets.initialize_threshold();
}

HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
void OffsetTableContigSpace::alloc_block(HeapWord* start, HeapWord* end) {
_offsets.alloc_block(start, end);
return _offsets.threshold();
}

OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
@@ -298,10 +298,9 @@ class CompactPoint : public StackObj {
public:
Generation* gen;
CompactibleSpace* space;
HeapWord* threshold;

CompactPoint(Generation* g = NULL) :
gen(g), space(NULL), threshold(0) {}
gen(g), space(NULL) {}
};

// A space that supports compaction operations. This is usually, but not
@@ -377,10 +376,8 @@ class CompactibleSpace: public Space {

// Some contiguous spaces may maintain some data structures that should
// be updated whenever an allocation crosses a boundary. This function
// returns the first such boundary.
// (The default implementation returns the end of the space, so the
// boundary is never crossed.)
virtual HeapWord* initialize_threshold() { return end(); }
// initializes these data structures for further updates.
virtual void initialize_threshold() { }

// "q" is an object of the given "size" that should be forwarded;
// "cp" names the generation ("gen") and containing "this" (which must
@@ -391,9 +388,8 @@ class CompactibleSpace: public Space {
// be one, since compaction must succeed -- we go to the first space of
// the previous generation if necessary, updating "cp"), reset compact_top
// and then forward. In either case, returns the new value of "compact_top".
// If the forwarding crosses "cp->threshold", invokes the "cross_threshold"
// function of the then-current compaction space, and updates "cp->threshold
// accordingly".
// Invokes the "alloc_block" function of the then-current compaction
// space.
virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp,
HeapWord* compact_top);

@@ -408,12 +404,9 @@ class CompactibleSpace: public Space {
HeapWord* _first_dead;
HeapWord* _end_of_live;

// This the function is invoked when an allocation of an object covering
// "start" to "end occurs crosses the threshold; returns the next
// threshold. (The default implementation does nothing.)
virtual HeapWord* cross_threshold(HeapWord* start, HeapWord* the_end) {
return end();
}
// This the function to invoke when an allocation of an object covering
// "start" to "end" occurs to update other internal data structures.
virtual void alloc_block(HeapWord* start, HeapWord* the_end) { }
};

class GenSpaceMangler;
@@ -633,8 +626,8 @@ class OffsetTableContigSpace: public ContiguousSpace {
inline HeapWord* par_allocate(size_t word_size);

// MarkSweep support phase3
virtual HeapWord* initialize_threshold();
virtual HeapWord* cross_threshold(HeapWord* start, HeapWord* end);
virtual void initialize_threshold();
virtual void alloc_block(HeapWord* start, HeapWord* end);

virtual void print_on(outputStream* st) const;

1 comment on commit 92c30c9

@openjdk-notifier

This comment has been minimized.

Copy link

@openjdk-notifier openjdk-notifier bot commented on 92c30c9 Sep 15, 2021

Please sign in to comment.