Skip to content

Commit 8f80827

Browse files
author
William Kemper
committed
8332548: GenShen: Factor generational mode out of gc helpers
Reviewed-by: ysr
1 parent c4e2cf8 commit 8f80827

File tree

6 files changed

+184
-216
lines changed

6 files changed

+184
-216
lines changed

src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.cpp

Lines changed: 71 additions & 171 deletions
Original file line numberDiff line numberDiff line change
@@ -141,19 +141,6 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
141141
// Complete marking under STW, and start evacuation
142142
vmop_entry_final_mark();
143143

144-
// If GC was cancelled before final mark, then the safepoint operation will do nothing
145-
// and the concurrent mark will still be in progress. In this case it is safe to resume
146-
// the degenerated cycle from the marking phase. On the other hand, if the GC is cancelled
147-
// after final mark (but before this check), then the final mark safepoint operation
148-
// will have finished the mark (setting concurrent mark in progress to false). Final mark
149-
// will also have setup state (in concurrent stack processing) that will not be safe to
150-
// resume from the marking phase in the degenerated cycle. That is, if the cancellation
151-
// occurred after final mark, we must resume the degenerated cycle after the marking phase.
152-
if (_generation->is_concurrent_mark_in_progress() && check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_mark)) {
153-
assert(!heap->is_concurrent_weak_root_in_progress(), "Weak roots should not be in progress when concurrent mark is in progress");
154-
return false;
155-
}
156-
157144
// Concurrent stack processing
158145
if (heap->is_evacuation_in_progress()) {
159146
entry_thread_roots();
@@ -231,31 +218,7 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
231218
// We defer generation resizing actions until after cset regions have been recycled. We do this even following an
232219
// abbreviated cycle.
233220
if (heap->mode()->is_generational()) {
234-
if (!heap->old_generation()->is_parseable()) {
235-
// Class unloading may render the card offsets unusable, so we must rebuild them before
236-
// the next remembered set scan. We _could_ let the control thread do this sometime after
237-
// the global cycle has completed and before the next young collection, but under memory
238-
// pressure the control thread may not have the time (that is, because it's running back
239-
// to back GCs). In that scenario, we would have to make the old regions parsable before
240-
// we could start a young collection. This could delay the start of the young cycle and
241-
// throw off the heuristics.
242-
entry_global_coalesce_and_fill();
243-
}
244-
245-
ShenandoahGenerationalHeap::TransferResult result;
246-
{
247-
ShenandoahGenerationalHeap* gen_heap = ShenandoahGenerationalHeap::heap();
248-
ShenandoahHeapLocker locker(gen_heap->lock());
249-
250-
result = gen_heap->balance_generations();
251-
gen_heap->reset_generation_reserves();
252-
}
253-
254-
LogTarget(Info, gc, ergo) lt;
255-
if (lt.is_enabled()) {
256-
LogStream ls(lt);
257-
result.print_on("Concurrent GC", &ls);
258-
}
221+
ShenandoahGenerationalHeap::heap()->complete_concurrent_cycle();
259222
}
260223
return true;
261224
}
@@ -662,6 +625,7 @@ void ShenandoahConcurrentGC::op_init_mark() {
662625
start_mark();
663626

664627
if (_do_old_gc_bootstrap) {
628+
shenandoah_assert_generational();
665629
// Update region state for both young and old regions
666630
// TODO: We should be able to pull this out of the safepoint for the bootstrap
667631
// cycle. The top of an old region will only move when a GC cycle evacuates
@@ -749,138 +713,100 @@ void ShenandoahConcurrentGC::op_final_mark() {
749713
// Has to be done after cset selection
750714
heap->prepare_concurrent_roots();
751715

752-
if (heap->mode()->is_generational()) {
753-
if (!heap->collection_set()->is_empty() || heap->old_generation()->has_in_place_promotions()) {
754-
// Even if the collection set is empty, we need to do evacuation if there are regions to be promoted in place.
755-
// Concurrent evacuation takes responsibility for registering objects and setting the remembered set cards to dirty.
756-
757-
LogTarget(Debug, gc, cset) lt;
758-
if (lt.is_enabled()) {
759-
ResourceMark rm;
760-
LogStream ls(lt);
761-
heap->collection_set()->print_on(&ls);
762-
}
763-
764-
if (ShenandoahVerify) {
765-
heap->verifier()->verify_before_evacuation();
766-
}
767-
768-
heap->set_evacuation_in_progress(true);
769-
770-
// Verify before arming for concurrent processing.
771-
// Otherwise, verification can trigger stack processing.
772-
if (ShenandoahVerify) {
773-
heap->verifier()->verify_during_evacuation();
774-
}
775-
776-
// Generational mode may promote objects in place during the evacuation phase.
777-
// If that is the only reason we are evacuating, we don't need to update references
778-
// and there will be no forwarded objects on the heap.
779-
heap->set_has_forwarded_objects(!heap->collection_set()->is_empty());
780-
781-
// Arm nmethods/stack for concurrent processing
782-
if (!heap->collection_set()->is_empty()) {
783-
// Iff objects will be evaluated, arm the nmethod barriers. These will be disarmed
784-
// under the same condition (established in prepare_concurrent_roots) after strong
785-
// root evacuation has completed (see op_strong_roots).
786-
ShenandoahCodeRoots::arm_nmethods_for_evac();
787-
ShenandoahStackWatermark::change_epoch_id();
788-
}
789-
790-
if (ShenandoahPacing) {
791-
heap->pacer()->setup_for_evac();
792-
}
793-
} else {
794-
if (ShenandoahVerify) {
795-
heap->verifier()->verify_after_concmark();
796-
}
797-
798-
if (VerifyAfterGC) {
799-
Universe::verify();
800-
}
716+
if (!heap->collection_set()->is_empty() || has_in_place_promotions(heap)) {
717+
// Even if the collection set is empty, we need to do evacuation if there are regions to be promoted in place.
718+
// Concurrent evacuation takes responsibility for registering objects and setting the remembered set cards to dirty.
719+
720+
LogTarget(Debug, gc, cset) lt;
721+
if (lt.is_enabled()) {
722+
ResourceMark rm;
723+
LogStream ls(lt);
724+
heap->collection_set()->print_on(&ls);
801725
}
802-
} else {
803-
// Not is_generational()
804-
if (!heap->collection_set()->is_empty()) {
805-
LogTarget(Debug, gc, ergo) lt;
806-
if (lt.is_enabled()) {
807-
ResourceMark rm;
808-
LogStream ls(lt);
809-
heap->collection_set()->print_on(&ls);
810-
}
811726

812-
if (ShenandoahVerify) {
813-
heap->verifier()->verify_before_evacuation();
814-
}
727+
if (ShenandoahVerify) {
728+
heap->verifier()->verify_before_evacuation();
729+
}
815730

816-
heap->set_evacuation_in_progress(true);
731+
// TODO: Do we need to set this if we are only promoting regions in place? We don't need the barriers on for that.
732+
heap->set_evacuation_in_progress(true);
817733

818-
// Verify before arming for concurrent processing.
819-
// Otherwise, verification can trigger stack processing.
820-
if (ShenandoahVerify) {
821-
heap->verifier()->verify_during_evacuation();
822-
}
734+
// Verify before arming for concurrent processing.
735+
// Otherwise, verification can trigger stack processing.
736+
if (ShenandoahVerify) {
737+
heap->verifier()->verify_during_evacuation();
738+
}
823739

824-
// From here on, we need to update references.
825-
heap->set_has_forwarded_objects(true);
740+
// Generational mode may promote objects in place during the evacuation phase.
741+
// If that is the only reason we are evacuating, we don't need to update references
742+
// and there will be no forwarded objects on the heap.
743+
heap->set_has_forwarded_objects(!heap->collection_set()->is_empty());
826744

827-
// Arm nmethods/stack for concurrent processing
745+
// Arm nmethods/stack for concurrent processing
746+
if (!heap->collection_set()->is_empty()) {
747+
// Iff objects will be evaluated, arm the nmethod barriers. These will be disarmed
748+
// under the same condition (established in prepare_concurrent_roots) after strong
749+
// root evacuation has completed (see op_strong_roots).
828750
ShenandoahCodeRoots::arm_nmethods_for_evac();
829751
ShenandoahStackWatermark::change_epoch_id();
752+
}
830753

831-
if (ShenandoahPacing) {
832-
heap->pacer()->setup_for_evac();
833-
}
834-
} else {
835-
if (ShenandoahVerify) {
836-
heap->verifier()->verify_after_concmark();
837-
}
838-
839-
if (VerifyAfterGC) {
840-
Universe::verify();
841-
}
754+
if (ShenandoahPacing) {
755+
heap->pacer()->setup_for_evac();
756+
}
757+
} else {
758+
if (ShenandoahVerify) {
759+
heap->verifier()->verify_after_concmark();
760+
}
761+
762+
if (VerifyAfterGC) {
763+
Universe::verify();
842764
}
843765
}
844766
}
845767
}
846768

769+
bool ShenandoahConcurrentGC::has_in_place_promotions(ShenandoahHeap* heap) {
770+
return heap->mode()->is_generational() && heap->old_generation()->has_in_place_promotions();
771+
}
772+
773+
template<bool GENERATIONAL>
847774
class ShenandoahConcurrentEvacThreadClosure : public ThreadClosure {
848775
private:
849776
OopClosure* const _oops;
850-
851777
public:
852-
ShenandoahConcurrentEvacThreadClosure(OopClosure* oops);
853-
void do_thread(Thread* thread);
854-
};
855-
856-
ShenandoahConcurrentEvacThreadClosure::ShenandoahConcurrentEvacThreadClosure(OopClosure* oops) :
857-
_oops(oops) {
858-
}
778+
explicit ShenandoahConcurrentEvacThreadClosure(OopClosure* oops) : _oops(oops) {}
859779

860-
void ShenandoahConcurrentEvacThreadClosure::do_thread(Thread* thread) {
861-
JavaThread* const jt = JavaThread::cast(thread);
862-
StackWatermarkSet::finish_processing(jt, _oops, StackWatermarkKind::gc);
863-
ShenandoahThreadLocalData::enable_plab_promotions(thread);
864-
}
780+
void do_thread(Thread* thread) override {
781+
JavaThread* const jt = JavaThread::cast(thread);
782+
StackWatermarkSet::finish_processing(jt, _oops, StackWatermarkKind::gc);
783+
if (GENERATIONAL) {
784+
ShenandoahThreadLocalData::enable_plab_promotions(thread);
785+
}
786+
}
787+
};
865788

789+
template<bool GENERATIONAL>
866790
class ShenandoahConcurrentEvacUpdateThreadTask : public WorkerTask {
867791
private:
868792
ShenandoahJavaThreadsIterator _java_threads;
869793

870794
public:
871-
ShenandoahConcurrentEvacUpdateThreadTask(uint n_workers) :
795+
explicit ShenandoahConcurrentEvacUpdateThreadTask(uint n_workers) :
872796
WorkerTask("Shenandoah Evacuate/Update Concurrent Thread Roots"),
873797
_java_threads(ShenandoahPhaseTimings::conc_thread_roots, n_workers) {
874798
}
875799

876-
void work(uint worker_id) {
877-
Thread* worker_thread = Thread::current();
878-
ShenandoahThreadLocalData::enable_plab_promotions(worker_thread);
800+
void work(uint worker_id) override {
801+
if (GENERATIONAL) {
802+
Thread* worker_thread = Thread::current();
803+
ShenandoahThreadLocalData::enable_plab_promotions(worker_thread);
804+
}
879805

880806
// ShenandoahEvacOOMScope has to be setup by ShenandoahContextEvacuateUpdateRootsClosure.
881807
// Otherwise, may deadlock with watermark lock
882808
ShenandoahContextEvacuateUpdateRootsClosure oops_cl;
883-
ShenandoahConcurrentEvacThreadClosure thr_cl(&oops_cl);
809+
ShenandoahConcurrentEvacThreadClosure<GENERATIONAL> thr_cl(&oops_cl);
884810
_java_threads.threads_do(&thr_cl, worker_id);
885811
}
886812
};
@@ -889,8 +815,13 @@ void ShenandoahConcurrentGC::op_thread_roots() {
889815
ShenandoahHeap* const heap = ShenandoahHeap::heap();
890816
assert(heap->is_evacuation_in_progress(), "Checked by caller");
891817
ShenandoahGCWorkerPhase worker_phase(ShenandoahPhaseTimings::conc_thread_roots);
892-
ShenandoahConcurrentEvacUpdateThreadTask task(heap->workers()->active_workers());
893-
heap->workers()->run_task(&task);
818+
if (heap->mode()->is_generational()) {
819+
ShenandoahConcurrentEvacUpdateThreadTask<true> task(heap->workers()->active_workers());
820+
heap->workers()->run_task(&task);
821+
} else {
822+
ShenandoahConcurrentEvacUpdateThreadTask<false> task(heap->workers()->active_workers());
823+
heap->workers()->run_task(&task);
824+
}
894825
}
895826

896827
void ShenandoahConcurrentGC::op_weak_refs() {
@@ -1268,41 +1199,10 @@ void ShenandoahConcurrentGC::op_final_roots() {
12681199
heap->old_generation()->transfer_pointers_from_satb();
12691200
}
12701201

1271-
ShenandoahMarkingContext *ctx = heap->complete_marking_context();
1272-
for (size_t i = 0; i < heap->num_regions(); i++) {
1273-
ShenandoahHeapRegion *r = heap->get_region(i);
1274-
if (r->is_active() && r->is_young()) {
1275-
HeapWord* tams = ctx->top_at_mark_start(r);
1276-
HeapWord* top = r->top();
1277-
if (top > tams) {
1278-
r->reset_age();
1279-
} else if (ShenandoahGenerationalHeap::heap()->is_aging_cycle()) {
1280-
r->increment_age();
1281-
}
1282-
}
1283-
}
1202+
ShenandoahGenerationalHeap::heap()->update_region_ages();
12841203
}
12851204
}
12861205

1287-
void ShenandoahConcurrentGC::entry_global_coalesce_and_fill() {
1288-
ShenandoahHeap* const heap = ShenandoahHeap::heap();
1289-
1290-
const char* msg = "Coalescing and filling old regions in global collect";
1291-
ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_coalesce_and_fill);
1292-
1293-
TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
1294-
EventMark em("%s", msg);
1295-
ShenandoahWorkerScope scope(heap->workers(),
1296-
ShenandoahWorkerPolicy::calc_workers_for_conc_marking(),
1297-
"concurrent coalesce and fill");
1298-
1299-
op_global_coalesce_and_fill();
1300-
}
1301-
1302-
void ShenandoahConcurrentGC::op_global_coalesce_and_fill() {
1303-
ShenandoahGenerationalHeap::heap()->coalesce_and_fill_old_regions(true);
1304-
}
1305-
13061206
void ShenandoahConcurrentGC::op_cleanup_complete() {
13071207
ShenandoahHeap::heap()->free_set()->recycle_trash();
13081208
}

src/hotspot/share/gc/shenandoah/shenandoahConcurrentGC.hpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ class ShenandoahConcurrentGC : public ShenandoahGC {
104104
void entry_evacuate();
105105
void entry_update_thread_roots();
106106
void entry_updaterefs();
107-
void entry_global_coalesce_and_fill();
107+
108108
void entry_cleanup_complete();
109109

110110
// Actual work for the phases
@@ -124,7 +124,7 @@ class ShenandoahConcurrentGC : public ShenandoahGC {
124124
void op_update_thread_roots();
125125
void op_final_updaterefs();
126126
void op_final_roots();
127-
void op_global_coalesce_and_fill();
127+
128128
void op_cleanup_complete();
129129

130130
protected:
@@ -133,6 +133,8 @@ class ShenandoahConcurrentGC : public ShenandoahGC {
133133
private:
134134
void start_mark();
135135

136+
static bool has_in_place_promotions(ShenandoahHeap* heap) ;
137+
136138
// Messages for GC trace events, they have to be immortal for
137139
// passing around the logging/tracing systems
138140
const char* init_mark_event_message() const;

0 commit comments

Comments
 (0)