Skip to content

Commit 92bde67

Browse files
committed
8271946: Cleanup leftovers in Space and subclasses
Reviewed-by: stefank, tschatzl
1 parent db9834f commit 92bde67

File tree

3 files changed

+164
-263
lines changed

3 files changed

+164
-263
lines changed

src/hotspot/share/gc/shared/space.cpp

Lines changed: 163 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,84 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size,
393393
#if INCLUDE_SERIALGC
394394

395395
void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
396-
scan_and_forward(this, cp);
396+
// Compute the new addresses for the live objects and store it in the mark
397+
// Used by universe::mark_sweep_phase2()
398+
399+
// We're sure to be here before any objects are compacted into this
400+
// space, so this is a good time to initialize this:
401+
set_compaction_top(bottom());
402+
403+
if (cp->space == NULL) {
404+
assert(cp->gen != NULL, "need a generation");
405+
assert(cp->threshold == NULL, "just checking");
406+
assert(cp->gen->first_compaction_space() == this, "just checking");
407+
cp->space = cp->gen->first_compaction_space();
408+
cp->threshold = cp->space->initialize_threshold();
409+
cp->space->set_compaction_top(cp->space->bottom());
410+
}
411+
412+
HeapWord* compact_top = cp->space->compaction_top(); // This is where we are currently compacting to.
413+
414+
DeadSpacer dead_spacer(this);
415+
416+
HeapWord* end_of_live = bottom(); // One byte beyond the last byte of the last live object.
417+
HeapWord* first_dead = NULL; // The first dead object.
418+
419+
const intx interval = PrefetchScanIntervalInBytes;
420+
421+
HeapWord* cur_obj = bottom();
422+
HeapWord* scan_limit = top();
423+
424+
while (cur_obj < scan_limit) {
425+
if (cast_to_oop(cur_obj)->is_gc_marked()) {
426+
// prefetch beyond cur_obj
427+
Prefetch::write(cur_obj, interval);
428+
size_t size = cast_to_oop(cur_obj)->size();
429+
compact_top = cp->space->forward(cast_to_oop(cur_obj), size, cp, compact_top);
430+
cur_obj += size;
431+
end_of_live = cur_obj;
432+
} else {
433+
// run over all the contiguous dead objects
434+
HeapWord* end = cur_obj;
435+
do {
436+
// prefetch beyond end
437+
Prefetch::write(end, interval);
438+
end += cast_to_oop(end)->size();
439+
} while (end < scan_limit && !cast_to_oop(end)->is_gc_marked());
440+
441+
// see if we might want to pretend this object is alive so that
442+
// we don't have to compact quite as often.
443+
if (cur_obj == compact_top && dead_spacer.insert_deadspace(cur_obj, end)) {
444+
oop obj = cast_to_oop(cur_obj);
445+
compact_top = cp->space->forward(obj, obj->size(), cp, compact_top);
446+
end_of_live = end;
447+
} else {
448+
// otherwise, it really is a free region.
449+
450+
// cur_obj is a pointer to a dead object. Use this dead memory to store a pointer to the next live object.
451+
*(HeapWord**)cur_obj = end;
452+
453+
// see if this is the first dead region.
454+
if (first_dead == NULL) {
455+
first_dead = cur_obj;
456+
}
457+
}
458+
459+
// move on to the next object
460+
cur_obj = end;
461+
}
462+
}
463+
464+
assert(cur_obj == scan_limit, "just checking");
465+
_end_of_live = end_of_live;
466+
if (first_dead != NULL) {
467+
_first_dead = first_dead;
468+
} else {
469+
_first_dead = end_of_live;
470+
}
471+
472+
// save the compaction_top of the compaction space.
473+
cp->space->set_compaction_top(compact_top);
397474
}
398475

399476
void CompactibleSpace::adjust_pointers() {
@@ -402,11 +479,94 @@ void CompactibleSpace::adjust_pointers() {
402479
return; // Nothing to do.
403480
}
404481

405-
scan_and_adjust_pointers(this);
482+
// adjust all the interior pointers to point at the new locations of objects
483+
// Used by MarkSweep::mark_sweep_phase3()
484+
485+
HeapWord* cur_obj = bottom();
486+
HeapWord* const end_of_live = _end_of_live; // Established by prepare_for_compaction().
487+
HeapWord* const first_dead = _first_dead; // Established by prepare_for_compaction().
488+
489+
assert(first_dead <= end_of_live, "Stands to reason, no?");
490+
491+
const intx interval = PrefetchScanIntervalInBytes;
492+
493+
debug_only(HeapWord* prev_obj = NULL);
494+
while (cur_obj < end_of_live) {
495+
Prefetch::write(cur_obj, interval);
496+
if (cur_obj < first_dead || cast_to_oop(cur_obj)->is_gc_marked()) {
497+
// cur_obj is alive
498+
// point all the oops to the new location
499+
size_t size = MarkSweep::adjust_pointers(cast_to_oop(cur_obj));
500+
debug_only(prev_obj = cur_obj);
501+
cur_obj += size;
502+
} else {
503+
debug_only(prev_obj = cur_obj);
504+
// cur_obj is not a live object, instead it points at the next live object
505+
cur_obj = *(HeapWord**)cur_obj;
506+
assert(cur_obj > prev_obj, "we should be moving forward through memory, cur_obj: " PTR_FORMAT ", prev_obj: " PTR_FORMAT, p2i(cur_obj), p2i(prev_obj));
507+
}
508+
}
509+
510+
assert(cur_obj == end_of_live, "just checking");
406511
}
407512

408513
void CompactibleSpace::compact() {
409-
scan_and_compact(this);
514+
// Copy all live objects to their new location
515+
// Used by MarkSweep::mark_sweep_phase4()
516+
517+
verify_up_to_first_dead(this);
518+
519+
HeapWord* const start = bottom();
520+
HeapWord* const end_of_live = _end_of_live;
521+
522+
assert(_first_dead <= end_of_live, "Invariant. _first_dead: " PTR_FORMAT " <= end_of_live: " PTR_FORMAT, p2i(_first_dead), p2i(end_of_live));
523+
if (_first_dead == end_of_live && (start == end_of_live || !cast_to_oop(start)->is_gc_marked())) {
524+
// Nothing to compact. The space is either empty or all live object should be left in place.
525+
clear_empty_region(this);
526+
return;
527+
}
528+
529+
const intx scan_interval = PrefetchScanIntervalInBytes;
530+
const intx copy_interval = PrefetchCopyIntervalInBytes;
531+
532+
assert(start < end_of_live, "bottom: " PTR_FORMAT " should be < end_of_live: " PTR_FORMAT, p2i(start), p2i(end_of_live));
533+
HeapWord* cur_obj = start;
534+
if (_first_dead > cur_obj && !cast_to_oop(cur_obj)->is_gc_marked()) {
535+
// All object before _first_dead can be skipped. They should not be moved.
536+
// A pointer to the first live object is stored at the memory location for _first_dead.
537+
cur_obj = *(HeapWord**)(_first_dead);
538+
}
539+
540+
debug_only(HeapWord* prev_obj = NULL);
541+
while (cur_obj < end_of_live) {
542+
if (!cast_to_oop(cur_obj)->is_gc_marked()) {
543+
debug_only(prev_obj = cur_obj);
544+
// The first word of the dead object contains a pointer to the next live object or end of space.
545+
cur_obj = *(HeapWord**)cur_obj;
546+
assert(cur_obj > prev_obj, "we should be moving forward through memory");
547+
} else {
548+
// prefetch beyond q
549+
Prefetch::read(cur_obj, scan_interval);
550+
551+
// size and destination
552+
size_t size = cast_to_oop(cur_obj)->size();
553+
HeapWord* compaction_top = cast_from_oop<HeapWord*>(cast_to_oop(cur_obj)->forwardee());
554+
555+
// prefetch beyond compaction_top
556+
Prefetch::write(compaction_top, copy_interval);
557+
558+
// copy object and reinit its mark
559+
assert(cur_obj != compaction_top, "everything in this pass should be moving");
560+
Copy::aligned_conjoint_words(cur_obj, compaction_top, size);
561+
cast_to_oop(compaction_top)->init_mark();
562+
assert(cast_to_oop(compaction_top)->klass() != NULL, "should have a class");
563+
564+
debug_only(prev_obj = cur_obj);
565+
cur_obj += size;
566+
}
567+
}
568+
569+
clear_empty_region(this);
410570
}
411571

412572
#endif // INCLUDE_SERIALGC

src/hotspot/share/gc/shared/space.hpp

Lines changed: 0 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -308,49 +308,12 @@ class CompactPoint : public StackObj {
308308
// necessarily, a space that is normally contiguous. But, for example, a
309309
// free-list-based space whose normal collection is a mark-sweep without
310310
// compaction could still support compaction in full GC's.
311-
//
312-
// The compaction operations are implemented by the
313-
// scan_and_{adjust_pointers,compact,forward} function templates.
314-
// The following are, non-virtual, auxiliary functions used by these function templates:
315-
// - scan_limit()
316-
// - scanned_block_is_obj()
317-
// - scanned_block_size()
318-
// - adjust_obj_size()
319-
// - obj_size()
320-
// These functions are to be used exclusively by the scan_and_* function templates,
321-
// and must be defined for all (non-abstract) subclasses of CompactibleSpace.
322-
//
323-
// NOTE: Any subclasses to CompactibleSpace wanting to change/define the behavior
324-
// in any of the auxiliary functions must also override the corresponding
325-
// prepare_for_compaction/adjust_pointers/compact functions using them.
326-
// If not, such changes will not be used or have no effect on the compaction operations.
327-
//
328-
// This translates to the following dependencies:
329-
// Overrides/definitions of
330-
// - scan_limit
331-
// - scanned_block_is_obj
332-
// - scanned_block_size
333-
// require override/definition of prepare_for_compaction().
334-
// Similar dependencies exist between
335-
// - adjust_obj_size and adjust_pointers()
336-
// - obj_size and compact().
337-
//
338-
// Additionally, this also means that changes to block_size() or block_is_obj() that
339-
// should be effective during the compaction operations must provide a corresponding
340-
// definition of scanned_block_size/scanned_block_is_obj respectively.
341311
class CompactibleSpace: public Space {
342312
friend class VMStructs;
343313
private:
344314
HeapWord* _compaction_top;
345315
CompactibleSpace* _next_compaction_space;
346316

347-
// Auxiliary functions for scan_and_{forward,adjust_pointers,compact} support.
348-
inline size_t adjust_obj_size(size_t size) const {
349-
return size;
350-
}
351-
352-
inline size_t obj_size(const HeapWord* addr) const;
353-
354317
template <class SpaceType>
355318
static inline void verify_up_to_first_dead(SpaceType* space) NOT_DEBUG_RETURN;
356319

@@ -451,27 +414,6 @@ class CompactibleSpace: public Space {
451414
virtual HeapWord* cross_threshold(HeapWord* start, HeapWord* the_end) {
452415
return end();
453416
}
454-
455-
// Below are template functions for scan_and_* algorithms (avoiding virtual calls).
456-
// The space argument should be a subclass of CompactibleSpace, implementing
457-
// scan_limit(), scanned_block_is_obj(), and scanned_block_size(),
458-
// and possibly also overriding obj_size(), and adjust_obj_size().
459-
// These functions should avoid virtual calls whenever possible.
460-
461-
#if INCLUDE_SERIALGC
462-
// Frequently calls adjust_obj_size().
463-
template <class SpaceType>
464-
static inline void scan_and_adjust_pointers(SpaceType* space);
465-
#endif
466-
467-
// Frequently calls obj_size().
468-
template <class SpaceType>
469-
static inline void scan_and_compact(SpaceType* space);
470-
471-
// Frequently calls scanned_block_is_obj() and scanned_block_size().
472-
// Requires the scan_limit() function.
473-
template <class SpaceType>
474-
static inline void scan_and_forward(SpaceType* space, CompactPoint* cp);
475417
};
476418

477419
class GenSpaceMangler;
@@ -480,22 +422,6 @@ class GenSpaceMangler;
480422
// faster allocation, and compaction.
481423
class ContiguousSpace: public CompactibleSpace {
482424
friend class VMStructs;
483-
// Allow scan_and_forward function to call (private) overrides for auxiliary functions on this class
484-
template <typename SpaceType>
485-
friend void CompactibleSpace::scan_and_forward(SpaceType* space, CompactPoint* cp);
486-
487-
private:
488-
// Auxiliary functions for scan_and_forward support.
489-
// See comments for CompactibleSpace for more information.
490-
inline HeapWord* scan_limit() const {
491-
return top();
492-
}
493-
494-
inline bool scanned_block_is_obj(const HeapWord* addr) const {
495-
return true; // Always true, since scan_limit is top
496-
}
497-
498-
inline size_t scanned_block_size(const HeapWord* addr) const;
499425

500426
protected:
501427
HeapWord* _top;

0 commit comments

Comments
 (0)