Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ public final class GCImpl implements GC {

private final CollectionPolicy policy;
private boolean completeCollection = false;
private boolean outOfMemoryCollection = false;
private UnsignedWord collectionEpoch = Word.zero();
private long lastWholeHeapExaminedNanos = -1;

Expand Down Expand Up @@ -299,13 +300,12 @@ private boolean collectImpl(GCCause cause, long beginNanoTime, boolean forceFull
try {
outOfMemory = doCollectImpl(cause, beginNanoTime, forceFullGC, false);
if (outOfMemory) {
// Avoid running out of memory with a full GC that reclaims softly reachable objects
ReferenceObjectProcessing.setSoftReferencesAreWeak(true);
outOfMemoryCollection = true; // increase eagerness to free memory
try {
verifyHeap(During);
outOfMemory = doCollectImpl(cause, System.nanoTime(), true, true);
} finally {
ReferenceObjectProcessing.setSoftReferencesAreWeak(false);
outOfMemoryCollection = false;
}
}
} finally {
Expand Down Expand Up @@ -517,11 +517,22 @@ public void collectCompletely(GCCause cause) {
collect(cause, true);
}

@AlwaysInline("GC performance")
@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
public boolean isCompleteCollection() {
return completeCollection;
}

/**
* Whether the current collection is intended to be more aggressive as a last resort to avoid an
* out of memory condition.
*/
@AlwaysInline("GC performance")
@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
public boolean isOutOfMemoryCollection() {
return outOfMemoryCollection;
}

/** Collect, either incrementally or completely, and process discovered references. */
private void doCollectCore(boolean incremental) {
GreyToBlackObjRefVisitor.Counters counters = greyToBlackObjRefVisitor.openCounters();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import com.oracle.svm.core.heap.ReferenceInternals;
import com.oracle.svm.core.hub.DynamicHub;
import com.oracle.svm.core.snippets.KnownIntrinsics;
import com.oracle.svm.core.thread.VMOperation;
import com.oracle.svm.core.util.UnsignedUtils;

import jdk.graal.compiler.word.Word;
Expand All @@ -61,9 +60,6 @@ final class ReferenceObjectProcessing {
*/
private static UnsignedWord maxSoftRefAccessIntervalMs = UnsignedUtils.MAX_VALUE;

/** Treat all soft references as weak, typically to reclaim space when low on memory. */
private static boolean softReferencesAreWeak = false;

/**
* The first timestamp that was set as {@link SoftReference} clock, for examining references
* that were created earlier than that.
Expand All @@ -73,13 +69,14 @@ final class ReferenceObjectProcessing {
private ReferenceObjectProcessing() { // all static
}

/*
* Enables (or disables) reclaiming all objects that are softly reachable only, typically as a
* last resort to avoid running out of memory.
/**
* Whether to treat all soft references as weak, typically as a last resort to reclaim extra
* objects when running out of memory.
*/
public static void setSoftReferencesAreWeak(boolean enabled) {
assert VMOperation.isGCInProgress();
softReferencesAreWeak = enabled;
@AlwaysInline("GC performance")
@Uninterruptible(reason = "Called from uninterruptible code.", mayBeInlined = true)
private static boolean areAllSoftReferencesWeak() {
return GCImpl.getGCImpl().isOutOfMemoryCollection();
}

@AlwaysInline("GC performance")
Expand Down Expand Up @@ -128,7 +125,7 @@ private static void discover(Object obj, ObjectReferenceVisitor refVisitor) {
RememberedSet.get().dirtyCardIfNecessary(dr, refObject, getReferentFieldAddress(dr));
return;
}
if (!softReferencesAreWeak && dr instanceof SoftReference) {
if (!areAllSoftReferencesWeak() && dr instanceof SoftReference) {
long clock = ReferenceInternals.getSoftReferenceClock();
long timestamp = ReferenceInternals.getSoftReferenceTimestamp((SoftReference<?>) dr);
if (timestamp == 0) { // created or last accessed before the clock was initialized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

import com.oracle.svm.core.config.ConfigurationValues;
import com.oracle.svm.core.genscavenge.AlignedHeapChunk;
import com.oracle.svm.core.genscavenge.GCImpl;
import com.oracle.svm.core.genscavenge.HeapChunk;
import com.oracle.svm.core.genscavenge.ObjectHeaderImpl;
import com.oracle.svm.core.genscavenge.Space;
Expand Down Expand Up @@ -121,7 +122,7 @@ public boolean visitChunk(AlignedHeapChunk.AlignedHeader chunk) {

} else { // not marked, i.e. not alive and start of a gap of yet unknown size
if (objSeqSize.notEqual(0)) { // end of an object sequence
Pointer newAddress = sweeping ? objSeq : allocate(objSeqSize);
Pointer newAddress = sweeping ? objSeq : allocate(objSeqSize, chunk);
ObjectMoveInfo.setNewAddress(objSeq, newAddress);
ObjectMoveInfo.setObjectSeqSize(objSeq, objSeqSize);

Expand All @@ -144,14 +145,13 @@ public boolean visitChunk(AlignedHeapChunk.AlignedHeader chunk) {
UnsignedWord newTopOffset = chunk.getTopOffset(CHUNK_HEADER_TOP_IDENTITY).subtract(gapSize);
chunk.setTopOffset(newTopOffset, CHUNK_HEADER_TOP_IDENTITY);
} else if (objSeqSize.notEqual(0)) {
Pointer newAddress = sweeping ? objSeq : allocate(objSeqSize);
Pointer newAddress = sweeping ? objSeq : allocate(objSeqSize, chunk);
ObjectMoveInfo.setNewAddress(objSeq, newAddress);
ObjectMoveInfo.setObjectSeqSize(objSeq, objSeqSize);
}

if (sweeping && chunk.equal(allocChunk)) {
/* Continue allocating for compaction after the swept memory. */
allocPointer = HeapChunk.getTopPointer(chunk);
allocPointer = getSweptChunkAllocationPointer(chunk);
}

/* Set remaining brick table entries at chunk end. */
Expand All @@ -164,20 +164,34 @@ public boolean visitChunk(AlignedHeapChunk.AlignedHeader chunk) {
return true;
}

private Pointer allocate(UnsignedWord size) {
private Pointer allocate(UnsignedWord size, AlignedHeapChunk.AlignedHeader currentChunk) {
assert size.belowOrEqual(AlignedHeapChunk.getUsableSizeForObjects());
Pointer p = allocPointer;
allocPointer = p.add(size);
while (allocPointer.aboveThan(AlignedHeapChunk.getObjectsEnd(allocChunk))) {
assert !allocChunk.equal(currentChunk) : "must not advance past currently processed chunk";
allocChunk = HeapChunk.getNext(allocChunk);
assert allocChunk.isNonNull();
if (allocChunk.getShouldSweepInsteadOfCompact()) {
p = HeapChunk.getTopPointer(allocChunk); // use any free memory at the end
p = getSweptChunkAllocationPointer(allocChunk);
} else {
p = AlignedHeapChunk.getObjectsStart(allocChunk);
}
allocPointer = p.add(size);
}
return p;
}

private static Pointer getSweptChunkAllocationPointer(AlignedHeapChunk.AlignedHeader chunk) {
assert chunk.getShouldSweepInsteadOfCompact();
if (GCImpl.getGCImpl().isOutOfMemoryCollection()) {
return HeapChunk.getTopPointer(chunk);
}
/*
* Continue allocation for compaction in the next chunk. Moving in other objects is likely
* to increase future fragmentation and sweeping effort until the chunk can participate in
* compaction again.
*/
return AlignedHeapChunk.getObjectsEnd(chunk);
}
}