23
23
24
24
#include " precompiled.hpp"
25
25
#include " classfile/classLoaderData.hpp"
26
+ #include " gc/shared/barrierSetNMethod.hpp"
26
27
#include " gc/shared/taskqueue.inline.hpp"
27
28
#include " gc/z/zAddress.inline.hpp"
28
29
#include " gc/z/zGlobals.hpp"
29
30
#include " gc/z/zGranuleMap.inline.hpp"
30
31
#include " gc/z/zHeapIterator.hpp"
31
32
#include " gc/z/zLock.inline.hpp"
33
+ #include " gc/z/zNMethod.hpp"
32
34
#include " gc/z/zOop.inline.hpp"
33
35
#include " memory/iterator.inline.hpp"
34
36
#include " utilities/bitMap.inline.hpp"
@@ -92,8 +94,8 @@ class ZHeapIteratorContext {
92
94
}
93
95
};
94
96
95
- template <bool Concurrent, bool Weak>
96
- class ZHeapIteratorRootOopClosure : public ZRootsIteratorClosure {
97
+ template <bool Weak>
98
+ class ZHeapIteratorRootOopClosure : public OopClosure {
97
99
private:
98
100
const ZHeapIteratorContext& _context;
99
101
@@ -102,11 +104,7 @@ class ZHeapIteratorRootOopClosure : public ZRootsIteratorClosure {
102
104
return NativeAccess<AS_NO_KEEPALIVE | ON_PHANTOM_OOP_REF>::oop_load (p);
103
105
}
104
106
105
- if (Concurrent) {
106
- return NativeAccess<AS_NO_KEEPALIVE>::oop_load (p);
107
- }
108
-
109
- return RawAccess<>::oop_load (p);
107
+ return NativeAccess<AS_NO_KEEPALIVE>::oop_load (p);
110
108
}
111
109
112
110
public:
@@ -121,22 +119,6 @@ class ZHeapIteratorRootOopClosure : public ZRootsIteratorClosure {
121
119
virtual void do_oop (narrowOop* p) {
122
120
ShouldNotReachHere ();
123
121
}
124
-
125
- virtual void do_thread (Thread* thread) {
126
- CodeBlobToOopClosure code_cl (this , false /* fix_oop_relocations */ );
127
- thread->oops_do (this , &code_cl);
128
- }
129
-
130
- virtual ZNMethodEntry nmethod_entry () const {
131
- if (ClassUnloading) {
132
- // All encountered nmethods should have been "entered" during stack walking
133
- return ZNMethodEntry::VerifyDisarmed;
134
- } else {
135
- // All nmethods are considered roots and will be visited.
136
- // Make sure that the unvisited gets fixed and disarmed before proceeding.
137
- return ZNMethodEntry::PreBarrier;
138
- }
139
- }
140
122
};
141
123
142
124
template <bool VisitReferents>
@@ -180,7 +162,7 @@ ZHeapIterator::ZHeapIterator(uint nworkers, bool visit_weaks) :
180
162
_bitmaps_lock(),
181
163
_queues(nworkers),
182
164
_array_queues(nworkers),
183
- _concurrent_roots(),
165
+ _concurrent_roots(ClassLoaderData::_claim_other ),
184
166
_weak_roots(),
185
167
_concurrent_weak_roots(),
186
168
_terminator(nworkers, &_queues) {
@@ -255,10 +237,83 @@ bool ZHeapIterator::mark_object(oop obj) {
255
237
return bitmap->try_set_bit (index);
256
238
}
257
239
258
- template <bool Concurrent, bool Weak, typename RootsIterator>
259
- void ZHeapIterator::push_roots (const ZHeapIteratorContext& context, RootsIterator& iter) {
260
- ZHeapIteratorRootOopClosure<Concurrent, Weak> cl (context);
261
- iter.oops_do (&cl);
240
+ typedef ClaimingCLDToOopClosure<ClassLoaderData::_claim_other> ZHeapIteratorCLDCLosure;
241
+
242
+ class ZHeapIteratorNMethodClosure : public NMethodClosure {
243
+ private:
244
+ OopClosure* const _cl;
245
+ BarrierSetNMethod* const _bs_nm;
246
+
247
+ public:
248
+ ZHeapIteratorNMethodClosure (OopClosure* cl) :
249
+ _cl (cl),
250
+ _bs_nm (BarrierSet::barrier_set()->barrier_set_nmethod ()) {}
251
+
252
+ virtual void do_nmethod (nmethod* nm) {
253
+ assert (!ClassUnloading, " Only used if class unloading is turned off" );
254
+
255
+ // ClassUnloading is turned off, all nmethods are considered strong,
256
+ // not only those on the call stacks. The heap iteration might happen
257
+ // before the concurrent processign of the code cache, make sure that
258
+ // all nmethods have been processed before visiting the oops.
259
+ _bs_nm->nmethod_entry_barrier (nm);
260
+
261
+ ZNMethod::nmethod_oops_do (nm, _cl);
262
+ }
263
+ };
264
+
265
+ class ZHeapIteratorThreadClosure : public ThreadClosure {
266
+ private:
267
+ OopClosure* const _cl;
268
+
269
+ class NMethodVisitor : public CodeBlobToOopClosure {
270
+ public:
271
+ NMethodVisitor (OopClosure* cl) :
272
+ CodeBlobToOopClosure (cl, false /* fix_oop_relocations */ ) {}
273
+
274
+ void do_code_blob (CodeBlob* cb) {
275
+ assert (!cb->is_nmethod () || !ZNMethod::is_armed (cb->as_nmethod ()),
276
+ " NMethods on stack should have been fixed and disarmed" );
277
+
278
+ CodeBlobToOopClosure::do_code_blob (cb);
279
+ }
280
+ };
281
+
282
+ public:
283
+ ZHeapIteratorThreadClosure (OopClosure* cl) : _cl(cl) {}
284
+
285
+ void do_thread (Thread* thread) {
286
+ NMethodVisitor code_cl (_cl);
287
+ thread->oops_do (_cl, &code_cl);
288
+ }
289
+ };
290
+
291
+ void ZHeapIterator::push_strong_roots (const ZHeapIteratorContext& context) {
292
+ ZHeapIteratorRootOopClosure<false /* Weak */ > cl (context);
293
+ ZHeapIteratorCLDCLosure cld_cl (&cl);
294
+ ZHeapIteratorNMethodClosure nm_cl (&cl);
295
+ ZHeapIteratorThreadClosure thread_cl (&cl);
296
+
297
+ _concurrent_roots.apply (&cl,
298
+ &cld_cl,
299
+ &thread_cl,
300
+ &nm_cl);
301
+ }
302
+
303
+ void ZHeapIterator::push_weak_roots (const ZHeapIteratorContext& context) {
304
+ ZHeapIteratorRootOopClosure<true /* Weak */ > cl (context);
305
+ _concurrent_weak_roots.apply (&cl);
306
+
307
+ AlwaysTrueClosure is_alive;
308
+ _weak_roots.apply (&is_alive, &cl);
309
+ }
310
+
311
+ template <bool VisitWeaks>
312
+ void ZHeapIterator::push_roots (const ZHeapIteratorContext& context) {
313
+ push_strong_roots (context);
314
+ if (VisitWeaks) {
315
+ push_weak_roots (context);
316
+ }
262
317
}
263
318
264
319
template <bool VisitReferents>
@@ -343,14 +398,9 @@ void ZHeapIterator::drain_and_steal(const ZHeapIteratorContext& context, ObjectC
343
398
}
344
399
345
400
template <bool VisitWeaks>
346
- void ZHeapIterator::object_iterate_inner (const ZHeapIteratorContext& context, ObjectClosure* cl) {
347
- push_roots<true /* Concurrent */ , false /* Weak */ >(context, _concurrent_roots);
348
- if (VisitWeaks) {
349
- push_roots<false /* Concurrent */ , true /* Weak */ >(context, _weak_roots);
350
- push_roots<true /* Concurrent */ , true /* Weak */ >(context, _concurrent_weak_roots);
351
- }
352
-
353
- drain_and_steal<VisitWeaks>(context, cl);
401
+ void ZHeapIterator::object_iterate_inner (const ZHeapIteratorContext& context, ObjectClosure* object_cl) {
402
+ push_roots<VisitWeaks>(context);
403
+ drain_and_steal<VisitWeaks>(context, object_cl);
354
404
}
355
405
356
406
void ZHeapIterator::object_iterate (ObjectClosure* cl, uint worker_id) {
0 commit comments