Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Fix stack GC invariant

When a context is activated, if it's mature, it's remembered. This keeps
us from having to run the write barrier when the stack of the context is
accessed.
  • Loading branch information...
commit dbe822928513c5ecb3a16bb6150bf3829184055a 1 parent 9f6c3f2
Evan Phoenix authored
View
6 rakelib/vm.rake
@@ -5,7 +5,11 @@ task :vm => 'vm/vm'
############################################################
# Files, Flags, & Constants
-LLVM_STYLE = "Release"
+if ENV['LLVM_DEBUG']
+ LLVM_STYLE = "Debug"
+else
+ LLVM_STYLE = "Release"
+end
ENV.delete 'CDPATH' # confuses llvm_config
LLVM_CONFIG = "vm/external_libs/llvm/#{LLVM_STYLE}/bin/llvm-config"
View
4 vm/builtin/contexts.hpp
@@ -82,6 +82,10 @@ namespace rubinius {
*js.stack = val;
}
+ OBJECT stack_at(size_t pos) {
+ return stk[pos];
+ }
+
void position_stack(int pos) {
js.stack = stk + pos;
}
View
11 vm/builtin/task.cpp
@@ -102,6 +102,12 @@ namespace rubinius {
SET(this, active, ctx);
SET(this, home, ctx->home);
SET(this, self, home->self);
+
+ /* Stack Management procedures. Make sure that we don't
+ * miss object stored into the stack of a context */
+ if(ctx->zone == MatureObjectZone) {
+ state->om->remember_object(ctx);
+ }
}
void Task::make_active(MethodContext* ctx) {
@@ -499,6 +505,8 @@ namespace rubinius {
return mod;
}
+ /* Used only in debugging and testing. Direct access to the stack
+ * can be dangerous. */
OBJECT* Task::current_stack() {
return active->stk;
}
@@ -515,8 +523,9 @@ namespace rubinius {
return active->top();
}
+ /* Retrieve the object at position +pos+ in the current context */
OBJECT Task::stack_at(size_t pos) {
- return active->stk[pos];
+ return active->stack_at(pos);
}
int Task::calculate_sp() {
View
1  vm/doc/context_cache.txt
@@ -9,6 +9,7 @@ Squeak context cache
*) On context allocation (from heap or cache, reclaimable count is incremented
*) On return, if reclaimable is greater than 0, recycle the context we were using,
and decrement reclaimable.
+ *) Only try and reclaim contexts in the young area.
*) When switching to a new Process, zero out reclaimable.
*) This protects all existing contexts from being reused.
*) Pushing the active context also zero's out reclaimable, so that context
View
17 vm/doc/stack_management.txt
@@ -0,0 +1,17 @@
+=== Stack Management ===
+A stack, rather than being a seperate Tuple object, is simply a variable
+size array located inside a MethodContext (meaning MethodContext objects
+are variable size).
+
+When running code, the stack of the current MethodContext is stored into
+directly, without running any write barrier. This means though, that if a
+MethodContext is old, and a new object is stored into it's stack, the
+MethodContext must be check at young GC collect time.
+
+A simple solution is to check if the MethodContext is old before it's
+activated. If it is, add it to the remember set if it's not already.
+
+Any Task method that touches the stack of a context not active needs
+to also check and run the write barrier.
+
+
View
9 vm/objectmemory.cpp
@@ -91,6 +91,15 @@ namespace rubinius {
}
}
+ /* Store an object into the remember set. Called when we've calculated
+ * externally that the object in question needs to be remembered */
+ void ObjectMemory::remember_object(OBJECT target) {
+ /* If it's already remembered, ignore this request */
+ if(target->Remember) return;
+ target->Remember = 1;
+ remember_set->push_back(target);
+ }
+
void ObjectMemory::store_object(OBJECT target, size_t index, OBJECT val) {
if(target->field_count <= index) {
throw new ObjectBoundsExceeded(target, index);
View
2  vm/objectmemory.hpp
@@ -46,6 +46,8 @@ namespace rubinius {
~ObjectMemory();
void write_barrier(OBJECT target, OBJECT val);
+ void remember_object(OBJECT target);
+
void store_object(OBJECT target, size_t index, OBJECT val);
void set_class(OBJECT target, OBJECT obj);
OBJECT allocate_object(size_t fields);
View
4 vm/oop.hpp
@@ -96,7 +96,7 @@ to be a simple test for that bit pattern.
typedef size_t hashval;
- /* the sizeof(struct rubinius_object) must an increment of the platform
+ /* the sizeof(class ObjectHeader) must an increment of the platform
pointer size, so that the bytes located directly after a
struct rubinius_object can hold a pointer which can be
dereferenced. (an 32 bit platforms, pointers must be aligned
@@ -129,10 +129,8 @@ to be a simple test for that bit pattern.
unsigned int IsBlockContext : 1;
unsigned int IsMeta : 1;
- unsigned int CTXFast : 1;
unsigned int IsTainted : 1;
unsigned int IsFrozen : 1;
- unsigned int IsLittleEndian : 1;
unsigned int RefsAreWeak : 1;
};
uint32_t all_flags;
View
18 vm/test/test_task.hpp
@@ -817,4 +817,22 @@ class TestTask : public CxxTest::TestSuite {
TS_ASSERT(!state->om->collect_young_now);
TS_ASSERT(!state->om->collect_mature_now);
}
+
+ void test_old_contexts_are_remembered_on_activate() {
+ Task* task = Task::create(state);
+ TS_ASSERT(!task->active->Remember);
+
+ /* Evil, but lets us test this easy. Don't do this in real
+ * code */
+ task->active->zone = MatureObjectZone;
+
+ task->restore_context(task->active);
+ TS_ASSERT(task->active->Remember);
+
+ /* Check it only happens to old contexts. */
+ task = Task::create(state);
+ TS_ASSERT(!task->active->Remember);
+ task->restore_context(task->active);
+ TS_ASSERT(!task->active->Remember);
+ }
};

0 comments on commit dbe8229

Please sign in to comment.
Something went wrong with that request. Please try again.