Permalink
Browse files

Bug 782337 - Stack clobbering (r=bhackett,a=lsblakk)

  • Loading branch information...
1 parent cfd424c commit 24907de111f2220ba12684aa59a93a06ecede848 @bill-mccloskey bill-mccloskey committed Aug 16, 2012
Showing with 29 additions and 19 deletions.
  1. +8 −1 js/src/jsgc.cpp
  2. +18 −16 js/src/vm/Stack.cpp
  3. +3 −2 js/src/vm/Stack.h
View
@@ -2468,7 +2468,7 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
mjit::ExpandInlineFrames(c);
#endif
- rt->stackSpace.mark(trc);
+ rt->stackSpace.markAndClobber(trc);
rt->debugScopes->mark(trc);
/* The embedding can register additional roots here. */
@@ -3436,6 +3436,13 @@ SweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool *startBackgroundSweep)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
+ /*
+ * Eliminate any garbage values from the VM stack that may have been
+ * left by the JIT in between incremental GC slices. We need to do this
+ * before discarding analysis data during JSCompartment::sweep.
+ */
+ rt->stackSpace.markAndClobber(NULL);
+
bool releaseTypes = ReleaseObservedTypes(rt);
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isCollecting())
View
@@ -614,13 +614,14 @@ StackSpace::containingSegment(const StackFrame *target) const
}
void
-StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
+StackSpace::markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
{
Value *slotsBegin = fp->slots();
if (!fp->isScriptFrame()) {
JS_ASSERT(fp->isDummyFrame());
- gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
+ if (trc)
+ gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
return;
}
@@ -629,7 +630,8 @@ StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsby
JSScript *script = fp->script();
if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
- gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
+ if (trc)
+ gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
return;
}
@@ -641,6 +643,7 @@ StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsby
* results are thrown away during the sweeping phase, so we always have at
* least one GC to do this.
*/
+ JSRuntime *rt = script->compartment()->rt;
analyze::AutoEnterAnalysis aea(script->compartment());
analyze::ScriptAnalysis *analysis = script->analysis();
uint32_t offset = pc - script->code;
@@ -650,8 +653,9 @@ StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsby
/* Will this slot be synced by the JIT? */
if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset)) {
- gc::MarkValueRoot(trc, vp, "vm_stack");
- } else if (script->compartment()->isDiscardingJitCode(trc)) {
+ if (trc)
+ gc::MarkValueRoot(trc, vp, "vm_stack");
+ } else if (!trc || script->compartment()->isDiscardingJitCode(trc)) {
/*
* If we're throwing away analysis information, we need to replace
* non-live Values with ones that can safely be marked in later
@@ -676,7 +680,7 @@ StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsby
else if (type == JSVAL_TYPE_BOOLEAN)
*vp = BooleanValue(false);
else if (type == JSVAL_TYPE_STRING)
- *vp = StringValue(trc->runtime->atomState.nullAtom);
+ *vp = StringValue(rt->atomState.nullAtom);
else if (type == JSVAL_TYPE_NULL)
*vp = NullValue();
else if (type == JSVAL_TYPE_OBJECT)
@@ -685,17 +689,13 @@ StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsby
}
}
- gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
+ if (trc)
+ gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
}
void
-StackSpace::mark(JSTracer *trc)
+StackSpace::markAndClobber(JSTracer *trc)
{
- /*
- * JIT code can leave values in an incoherent (i.e., unsafe for precise
- * marking) state, hence MarkStackRangeConservatively.
- */
-
/* NB: this depends on the continuity of segments in memory. */
Value *nextSegEnd = firstUnused();
for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
@@ -713,16 +713,18 @@ StackSpace::mark(JSTracer *trc)
jsbytecode *pc = seg->maybepc();
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
/* Mark from fp->slots() to slotsEnd. */
- markFrameValues(trc, fp, slotsEnd, pc);
+ markAndClobberFrame(trc, fp, slotsEnd, pc);
- fp->mark(trc);
+ if (trc)
+ fp->mark(trc);
slotsEnd = (Value *)fp;
InlinedSite *site;
pc = fp->prevpc(&site);
JS_ASSERT_IF(fp->prev(), !site);
}
- gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
+ if (trc)
+ gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
nextSegEnd = (Value *)seg;
}
}
View
@@ -1360,6 +1360,8 @@ class StackSpace
return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
}
+ void markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+
public:
StackSpace();
bool init();
@@ -1411,8 +1413,7 @@ class StackSpace
bool tryBumpLimit(JSContext *cx, Value *from, unsigned nvals, Value **limit);
/* Called during GC: mark segments, frames, and slots under firstUnused. */
- void mark(JSTracer *trc);
- void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+ void markAndClobber(JSTracer *trc);
/* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments();

0 comments on commit 24907de

Please sign in to comment.