Skip to content
Permalink
Browse files
8269596: Snapshot soft ref policy before marking/copying
Reviewed-by: tschatzl, kbarrett
  • Loading branch information
albertnetymk committed Jul 2, 2021
1 parent 4107dcf commit f8bcbf0172af25ac17b110d22232bd618cfd621a
Showing 11 changed files with 20 additions and 39 deletions.
@@ -2990,7 +2990,7 @@ void G1CollectedHeap::do_collection_pause_at_safepoint_helper(double target_paus
// Please see comment in g1CollectedHeap.hpp and
// G1CollectedHeap::ref_processing_init() to see how
// reference processing currently works in G1.
_ref_processor_stw->enable_discovery();
_ref_processor_stw->start_discovery(false /* always_clear */);

// We want to temporarily turn off discovery by the
// CM ref processor, if necessary, and turn it back on
@@ -3269,8 +3269,6 @@ void G1CollectedHeap::process_discovered_references(G1ParScanThreadStateSet* per
pss->set_ref_discoverer(NULL);
assert(pss->queue_is_empty(), "pre-condition");

// Setup the soft refs policy...
rp->setup_policy(false);

ReferenceProcessorPhaseTimes& pt = *phase_times()->ref_phase_times();

@@ -716,9 +716,7 @@ void G1ConcurrentMark::pre_concurrent_start(GCCause::Cause cause) {
void G1ConcurrentMark::post_concurrent_mark_start() {
// Start Concurrent Marking weak-reference discovery.
ReferenceProcessor* rp = _g1h->ref_processor_cm();
// enable ("weak") refs discovery
rp->enable_discovery();
rp->setup_policy(false); // snapshot the soft ref policy to be used in this cycle
rp->start_discovery(false /* always_clear */);

SATBMarkQueueSet& satb_mq_set = G1BarrierSet::satb_mark_queue_set();
// This is the start of the marking cycle, we're expected all
@@ -1125,7 +1123,7 @@ void G1ConcurrentMark::remark() {

bool const mark_finished = !has_overflown();
if (mark_finished) {
weak_refs_work(false /* clear_all_soft_refs */);
weak_refs_work();

SATBMarkQueueSet& satb_mq_set = G1BarrierSet::satb_mark_queue_set();
// We're done with marking.
@@ -1492,7 +1490,7 @@ class G1CMRefProcProxyTask : public RefProcProxyTask {
}
};

void G1ConcurrentMark::weak_refs_work(bool clear_all_soft_refs) {
void G1ConcurrentMark::weak_refs_work() {
ResourceMark rm;

// Is alive closure.
@@ -1506,8 +1504,6 @@ void G1ConcurrentMark::weak_refs_work(bool clear_all_soft_refs) {
// See the comment in G1CollectedHeap::ref_processing_init()
// about how reference processing currently works in G1.

// Set the soft reference policy
rp->setup_policy(clear_all_soft_refs);
assert(_global_mark_stack.is_empty(), "mark stack should be empty");

// We need at least one active thread. If reference processing
@@ -362,8 +362,7 @@ class G1ConcurrentMark : public CHeapObj<mtGC> {

void finalize_marking();

void weak_refs_work_parallel_part(BoolObjectClosure* is_alive, bool purged_classes);
void weak_refs_work(bool clear_all_soft_refs);
void weak_refs_work();

void report_object_count(bool mark_completed);

@@ -182,8 +182,7 @@ void G1FullCollector::prepare_collection() {
PrepareRegionsClosure cl(this);
_heap->heap_region_iterate(&cl);

reference_processor()->enable_discovery();
reference_processor()->setup_policy(scope()->should_clear_soft_refs());
reference_processor()->start_discovery(scope()->should_clear_soft_refs());

// Clear and activate derived pointer collection.
clear_and_activate_derived_pointers();
@@ -1786,10 +1786,7 @@ bool PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
DerivedPointerTable::clear();
#endif

ref_processor()->enable_discovery();
ref_processor()->setup_policy(maximum_heap_compaction);

bool marked_for_unloading = false;
ref_processor()->start_discovery(maximum_heap_compaction);

marking_start.update();
marking_phase(vmthread_cm, maximum_heap_compaction, &_gc_tracer);
@@ -448,8 +448,7 @@ bool PSScavenge::invoke_no_policy() {
DerivedPointerTable::clear();
#endif

reference_processor()->enable_discovery();
reference_processor()->setup_policy(false);
reference_processor()->start_discovery(false /* always_clear */);

const PreGenGCValues pre_gc_values = heap->get_pre_gc_values();

@@ -485,7 +484,6 @@ bool PSScavenge::invoke_no_policy() {
{
GCTraceTime(Debug, gc, phases) tm("Reference Processing", &_gc_timer);

reference_processor()->setup_policy(false); // not always_clear
reference_processor()->set_active_mt_degree(active_workers);
ReferenceProcessorStats stats;
ReferenceProcessorPhaseTimes pt(&_gc_timer, reference_processor()->max_num_queues());
@@ -584,7 +584,6 @@ void DefNewGeneration::collect(bool full,

FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
ReferenceProcessor* rp = ref_processor();
rp->setup_policy(clear_all_soft_refs);
ReferenceProcessorPhaseTimes pt(_gc_timer, rp->max_num_queues());
SerialGCRefProcProxyTask task(is_alive, keep_alive, evacuate_followers);
const ReferenceProcessorStats& stats = rp->process_discovered_references(task, pt);
@@ -77,7 +77,6 @@ void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_so
assert(ref_processor() == NULL, "no stomping");
assert(rp != NULL, "should be non-NULL");
set_ref_processor(rp);
rp->setup_policy(clear_all_softrefs);

gch->trace_heap_before_gc(_gc_tracer);

@@ -198,7 +197,6 @@ void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
{
GCTraceTime(Debug, gc, phases) tm_m("Reference Processing", gc_timer());

ref_processor()->setup_policy(clear_all_softrefs);
ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->max_num_queues());
SerialGCRefProcProxyTask task(is_alive, keep_alive, follow_stack_closure);
const ReferenceProcessorStats& stats = ref_processor()->process_discovered_references(task, pt);
@@ -28,7 +28,6 @@
#include "gc/serial/markSweep.hpp"

class GenMarkSweep : public MarkSweep {
friend class VM_MarkSweep;
public:
static void invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs);

@@ -478,16 +478,10 @@ void GenCollectedHeap::collect_generation(Generation* gen, bool full, size_t siz
// enqueue_discovered_references if the generation returns
// without doing any work.
ReferenceProcessor* rp = gen->ref_processor();
// If the discovery of ("weak") refs in this generation is
// atomic wrt other collectors in this configuration, we
// are guaranteed to have empty discovered ref lists.
if (rp->discovery_is_atomic()) {
rp->enable_discovery();
rp->setup_policy(clear_soft_refs);
} else {
// collect() below will enable discovery as appropriate
}
rp->start_discovery(clear_soft_refs);

gen->collect(full, clear_soft_refs, size, is_tlab);

rp->disable_discovery();
rp->verify_no_references_recorded();
}
@@ -285,18 +285,22 @@ class ReferenceProcessor : public ReferenceDiscoverer {
OopClosure* keep_alive,
VoidClosure* complete_gc);


void setup_policy(bool always_clear) {
_current_soft_ref_policy = always_clear ?
_always_clear_soft_ref_policy : _default_soft_ref_policy;
_current_soft_ref_policy->setup(); // snapshot the policy threshold
}
public:
static int number_of_subclasses_of_ref() { return (REF_PHANTOM - REF_OTHER); }

uint num_queues() const { return _num_queues; }
uint max_num_queues() const { return _max_num_queues; }
void set_active_mt_degree(uint v);

ReferencePolicy* setup_policy(bool always_clear) {
_current_soft_ref_policy = always_clear ?
_always_clear_soft_ref_policy : _default_soft_ref_policy;
_current_soft_ref_policy->setup(); // snapshot the policy threshold
return _current_soft_ref_policy;
void start_discovery(bool always_clear) {
enable_discovery();
setup_policy(always_clear);
}

// "Preclean" all the discovered reference lists by removing references that

1 comment on commit f8bcbf0

@openjdk-notifier
Copy link

@openjdk-notifier openjdk-notifier bot commented on f8bcbf0 Jul 2, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.