Browse files

More work on the new locals code

  • Loading branch information...
1 parent 71df599 commit 14e2b207ab2fbf8596774670695c6cdb95a3d790 @evanphx evanphx committed Nov 29, 2007
View
8 Rakefile
@@ -103,8 +103,12 @@ def compile(name, output)
unless File.exists?(dir)
FileUtils.mkdir_p dir
end
-
- sh "shotgun/rubinius compile #{name} #{output}", :verbose => $verbose
+
+ if ENV['GDB']
+ sh "shotgun/rubinius --gdb compile #{name} #{output}", :verbose => $verbose
+ else
+ sh "shotgun/rubinius compile #{name} #{output}", :verbose => $verbose
+ end
end
task :compiler do
View
16 shotgun/lib/baker.c
@@ -102,6 +102,7 @@ OBJECT baker_gc_forwarded_object(OBJECT obj) {
if(baker_gc_forwarded_p(iobj)) { \
ret = baker_gc_forwarded_object(iobj); \
} else if(baker_gc_contains_p(g, iobj) || heap_contains_p(state->om->contexts, iobj)) { \
+ assert(((OBJECT)iobj)->klass); \
ret = baker_gc_mutate_object(st, g, iobj); \
} else { \
ret = iobj; \
@@ -277,7 +278,20 @@ void baker_gc_mutate_context(STATE, baker_gc g, OBJECT iobj, int shifted, int to
fc_mutate(block);
fc_mutate(literals);
fc_mutate(self);
- fc_mutate(locals);
+ if(!NIL_P(fc->locals) && fc->locals->gc_zone == 0) {
+ int i, fields = NUM_FIELDS(fc->locals);
+ OBJECT mut, tmp;
+ fc->locals = object_memory_context_locals(iobj);
+ for(i = 0; i < fields; i++) {
+ tmp = NTH_FIELD(fc->locals, i);
+ if(!REFERENCE_P(tmp)) continue;
+
+ mut = baker_gc_maybe_mutate(state, g, tmp);
+ fast_unsafe_set(fc->locals, i, mut);
+ }
+ } else {
+ fc_mutate(locals);
+ }
fc_mutate(method_module);
fc_mutate(name);
View
4 shotgun/lib/baker.h
@@ -1,8 +1,8 @@
-#include "heap.h"
-#include <ptr_array.h>
#ifndef __RUBINIUS_BAKER__
#define __RUBINIUS_BAKER__ 1
+#include "heap.h"
+
struct baker_gc_struct {
rheap space_a;
rheap space_b;
View
1 shotgun/lib/cpu.h
@@ -44,6 +44,7 @@
struct fast_context {
CPU_REGISTERS
+ int size;
};
#define FASTCTX(ctx) ((struct fast_context*)BYTES_OF(ctx))
View
31 shotgun/lib/cpu_instructions.c
@@ -341,7 +341,7 @@ OBJECT cpu_compile_method(STATE, OBJECT cm) {
static inline OBJECT cpu_create_context(STATE, cpu c, OBJECT recv, OBJECT mo,
OBJECT name, OBJECT mod, unsigned long int args, OBJECT block) {
OBJECT sender, ctx, ins;
- int num_lcls;
+ int num_lcls, i;
struct fast_context *fc;
sender = c->active_context;
@@ -356,12 +356,13 @@ static inline OBJECT cpu_create_context(STATE, cpu c, OBJECT recv, OBJECT mo,
cpu_flush_sp(c);
- ctx = object_memory_new_context(state->om);
+ ctx = object_memory_new_context(state->om, num_lcls);
if(ctx >= state->om->context_last) {
state->om->collect_now |= OMCollectYoung;
}
CLEAR_FLAGS(ctx);
+ ctx->gc_zone = 0;
ctx->klass = Qnil;
ctx->field_count = FASTCTX_FIELDS;
@@ -379,11 +380,23 @@ static inline OBJECT cpu_create_context(STATE, cpu c, OBJECT recv, OBJECT mo,
fc->literals = cmethod_get_literals(mo);
fc->self = recv;
if(num_lcls > 0) {
- fc->locals = tuple_new(state, num_lcls + 2);
- fc->locals->ForeverYoung = TRUE;
+ //fc->locals = tuple_new(state, num_lcls);
+
+ fc->locals = object_memory_context_locals(ctx);
+ CLEAR_FLAGS(fc->locals);
+ fc->locals->gc_zone = 0;
+ fc->locals->klass = BASIC_CLASS(tuple);
+ SET_NUM_FIELDS(fc->locals, num_lcls);
+
+ for(i = 0; i < num_lcls; i++) {
+ SET_FIELD_DIRECT(fc->locals, i, Qnil);
+ }
+
} else {
fc->locals = Qnil;
}
+ // printf("Locals for %p at %p (%d, %d)\n", ctx, fc->locals, num_lcls, FASTCTX(ctx)->size);
+
fc->argcount = args;
fc->name = name;
fc->method_module = mod;
@@ -482,6 +495,7 @@ inline void cpu_restore_context_with_home(STATE, cpu c, OBJECT ctx, OBJECT home,
context that holds all the data. So if it's a fast, we restore
it's data, then if ctx != home, we restore a little more */
+
fc = FASTCTX(home);
CHECK_PTR(fc->self);
CHECK_PTR(fc->method);
@@ -494,6 +508,9 @@ inline void cpu_restore_context_with_home(STATE, cpu c, OBJECT ctx, OBJECT home,
fc = FASTCTX(ctx);
}
+ assert(fc->sender);
+
+
c->data = fc->data;
c->type = fc->type;
@@ -532,7 +549,9 @@ inline int cpu_simple_return(STATE, cpu c, OBJECT val) {
OBJECT destination, home;
destination = cpu_current_sender(c);
-
+
+ // printf("Rtrnng frm %p (%d)\n", c->active_context, FASTCTX(c->active_context)->size);
+
if(destination == Qnil) {
object_memory_retire_context(state->om, c->active_context);
@@ -637,7 +656,7 @@ inline int cpu_return_to_sender(STATE, cpu c, OBJECT val, int consider_block, in
state->om->contexts->current = state->om->context_bottom;
/* Otherwise set it to just beyond where we're returning to */
} else {
- state->om->contexts->current = (void*)((uintptr_t) destination + CTX_SIZE);
+ state->om->contexts->current = (void*)AFTER_CTX(destination);
}
/* It's a heap context, so reset the context stack to the virtual
View
26 shotgun/lib/instructions.rb
@@ -406,7 +406,13 @@ def set_local
next_int;
t1 = stack_pop();
// printf("Set local %d to %s\\n", _int, _inspect(t1));
- tuple_put(state, cpu_current_locals(state, c), _int, t1);
+ t2 = cpu_current_locals(state, c);
+ if(t2->gc_zone == 0) {
+ sassert(_int < NUM_FIELDS(t2) && "locals tuple sized wrong");
+ fast_unsafe_set(t2, _int, t1);
+ } else {
+ tuple_put(state, t2, _int, t1);
+ }
stack_push(t1);
CODE
end
@@ -418,7 +424,14 @@ def set_local_from_fp
next_int;
t1 = c->stack_top[c->fp - _int];
- tuple_put(state, cpu_current_locals(state, c), k, t1);
+
+ t2 = cpu_current_locals(state, c);
+ if(t2->gc_zone == 0) {
+ sassert(k < NUM_FIELDS(t2) && "locals tuple sized wrong");
+ fast_unsafe_set(t2, k, t1);
+ } else {
+ tuple_put(state, t2, k, t1);
+ }
CODE
end
@@ -434,7 +447,14 @@ def set_local_depth
t2 = blokctx_env(state, t1);
t1 = blokenv_get_home_block(t2);
}
- tuple_put(state, blokctx_locals(state, t1), _int, t3);
+
+ t2 = blokctx_locals(state, t1);
+ if(t2->gc_zone == 0) {
+ sassert(_int < NUM_FIELDS(t2) && "locals tuple sized wrong");
+ fast_unsafe_set(t2, _int, t3);
+ } else {
+ tuple_put(state, t2, _int, t3);
+ }
stack_push(t3);
CODE
View
1 shotgun/lib/machine.h
@@ -1,5 +1,4 @@
#include "shotgun.h"
-#include "cpu.h"
#include <signal.h>
struct rubinius_machine {
View
62 shotgun/lib/methctx.c
@@ -103,10 +103,12 @@ OBJECT blokenv_s_under_context2(STATE, OBJECT cmethod, OBJECT ctx, OBJECT ctx_bl
OBJECT blokenv_create_context(STATE, OBJECT self, OBJECT sender, int sp) {
OBJECT ctx, ins;
- int cnt;
+ int cnt, i;
struct fast_context *fc;
- ctx = object_memory_new_context(state->om);
+ cnt = FIXNUM_TO_INT(blokenv_get_local_count(self));
+
+ ctx = object_memory_new_context(state->om, cnt);
if(ctx >= state->om->context_last) {
state->om->collect_now |= OMCollectYoung;
}
@@ -116,7 +118,9 @@ OBJECT blokenv_create_context(STATE, OBJECT self, OBJECT sender, int sp) {
}
CLEAR_FLAGS(ctx);
+ ctx->gc_zone = 0;
ctx->field_count = FASTCTX_FIELDS;
+ ctx->klass = Qnil;
fc = FASTCTX(ctx);
fc->sender = sender;
@@ -139,9 +143,19 @@ OBJECT blokenv_create_context(STATE, OBJECT self, OBJECT sender, int sp) {
fc->block = Qnil;
fc->method_module = Qnil;
- cnt = FIXNUM_TO_INT(blokenv_get_local_count(self));
if(cnt > 0) {
- fc->locals = tuple_new(state, cnt);
+ // fc->locals = tuple_new(state, cnt);
+
+ fc->locals = object_memory_context_locals(ctx);
+ CLEAR_FLAGS(fc->locals);
+ fc->locals->gc_zone = 0;
+ fc->locals->klass = BASIC_CLASS(tuple);
+ SET_NUM_FIELDS(fc->locals, cnt);
+
+ for(i = 0; i < cnt; i++) {
+ SET_FIELD_DIRECT(fc->locals, i, Qnil);
+ }
+
} else {
fc->locals = Qnil;
}
@@ -156,3 +170,43 @@ OBJECT blokenv_create_context(STATE, OBJECT self, OBJECT sender, int sp) {
fc->type = FASTCTX_BLOCK;
return ctx;
}
+
+void methctx_reference(STATE, OBJECT ctx) {
+ struct fast_context *fc;
+ /* Don't do it again. */
+ if(!stack_context_p(ctx)) return;
+
+ /* Has to be done first because this uses informated we're about
+ to overwrite. */
+ object_memory_context_referenced(state->om, ctx);
+
+ CLEAR_FLAGS(ctx);
+ ctx->gc_zone = YoungObjectZone;
+ switch(FASTCTX(ctx)->type) {
+ case FASTCTX_NORMAL:
+ ctx->klass = BASIC_CLASS(fastctx);
+ ctx->obj_type = MContextType;
+ ctx->CTXFast = TRUE;
+ break;
+ case FASTCTX_BLOCK:
+ ctx->klass = BASIC_CLASS(blokctx);
+ ctx->obj_type = BContextType;
+ break;
+ case FASTCTX_NMC:
+ ctx->klass = BASIC_CLASS(nmc);
+ ctx->obj_type = MContextType;
+ break;
+ }
+ SET_NUM_FIELDS(ctx, FASTCTX_FIELDS);
+ ctx->StoresBytes = TRUE;
+ ctx->ForeverYoung = TRUE;
+
+ fc = FASTCTX(ctx);
+
+ /* Fixup the locals tuple. */
+ if(!NIL_P(fc->locals) && fc->locals->gc_zone == 0) {
+ fc->locals->gc_zone = YoungObjectZone;
+ }
+
+}
+
View
26 shotgun/lib/methctx.h
@@ -18,9 +18,19 @@ OBJECT methctx_dup_chain(STATE, OBJECT ctx, OBJECT *also);
#define blokctx_env(state, self) (FASTCTX(self)->name)
#define blokctx_locals(state, self) (FASTCTX(self)->locals)
-static inline void methctx_reference(STATE, OBJECT ctx) {
+void methctx_reference(STATE, OBJECT ctx);
+
+
+#if 0
+static inline void methctx_reference(STATE, OBJECT ctx) {
+ struct fast_context *fc;
/* Don't do it again. */
if(!stack_context_p(ctx)) return;
+
+ /* Has to be done first because this uses informated we're about
+ to overwrite. */
+ object_memory_context_referenced(state->om, ctx);
+
CLEAR_FLAGS(ctx);
ctx->gc_zone = YoungObjectZone;
switch(FASTCTX(ctx)->type) {
@@ -40,8 +50,18 @@ static inline void methctx_reference(STATE, OBJECT ctx) {
}
SET_NUM_FIELDS(ctx, FASTCTX_FIELDS);
ctx->StoresBytes = TRUE;
- ctx->ForeverYoung = TRUE;
- object_memory_context_referenced(state->om, ctx);
+ ctx->ForeverYoung = TRUE;
+
+ fc = FASTCTX(ctx);
+
+ /* Fixup the locals tuple. */
+ if(!NIL_P(fc->locals)) {
+ CLEAR_FLAGS(fc->locals);
+ fc->locals->gc_zone = YoungObjectZone;
+ fc->locals->klass = BASIC_CLASS(tuple);
+ }
}
#endif
+
+#endif
View
7 shotgun/lib/object_memory-barrier.h
@@ -15,15 +15,18 @@
/* TODO: This routine MUST be optimized because it's hit constantly. */
+
+#include <assert.h>
+
static inline void object_memory_write_barrier(object_memory om, OBJECT target, OBJECT val) {
gc_zone tz, vz;
if(!REFERENCE_P(val)) return;
tz = target->gc_zone;
vz = val->gc_zone;
- xassert(tz > 0);
- xassert(vz > 0);
+ assert(tz > 0);
+ assert(vz > 0);
xassert(val->klass != Qnil);
/* if the target is in a higher numbered zone than val, then
View
12 shotgun/lib/object_memory.c
@@ -138,6 +138,7 @@ void object_memory_formalize_contexts(STATE, object_memory om) {
void object_memory_shift_contexts(STATE, object_memory om) {
OBJECT ctx, new_ctx;
int inc = 0;
+ int sz;
/* If the context_bottom is the true bottom, we haven't promoted
anything and everything can stay where it is. */
@@ -153,23 +154,24 @@ void object_memory_shift_contexts(STATE, object_memory om) {
new_ctx = (OBJECT)(om->contexts->address);
EACH_STACK_CTX(om, ctx) {
+ sz = FASTCTX(ctx)->size;
/* The top context is a little special. Either it's sender
is nil or in the heap. Let mutate context know this is the case */
if(inc == 0) {
baker_gc_mutate_context(state, om->gc, ctx, TRUE, TRUE);
- memcpy((void*)new_ctx, (void*)ctx, CTX_SIZE);
+ memcpy((void*)new_ctx, (void*)ctx, sz);
ctx->klass = new_ctx;
} else {
- memcpy((void*)new_ctx, (void*)ctx, CTX_SIZE);
+ memcpy((void*)new_ctx, (void*)ctx, sz);
ctx->klass = new_ctx;
baker_gc_mutate_context(state, om->gc, new_ctx, TRUE, FALSE);
}
- new_ctx = (OBJECT)((uintptr_t)new_ctx + CTX_SIZE);
+ new_ctx = (OBJECT)((uintptr_t)new_ctx + sz);
inc++;
} DONE_EACH_STACK_CTX(ctx);
om->contexts->current = (address)new_ctx;
- om->context_top = (void*)((uintptr_t)new_ctx - CTX_SIZE);
+ om->context_top = (void*)((uintptr_t)new_ctx - sz);
}
om->context_bottom = (OBJECT)(om->contexts->address);
@@ -195,7 +197,7 @@ void object_memory_clear_marks(STATE, object_memory om) {
while(addr < (char*)om->contexts->current) {
ctx = (OBJECT)addr;
mark_sweep_clear_mark(state, ctx);
- addr += CTX_SIZE;
+ addr += FASTCTX(ctx)->size;
}
}
View
65 shotgun/lib/object_memory.h
@@ -55,6 +55,7 @@ void object_memory_shift_contexts(STATE, object_memory om);
void object_memory_mark_contexts(STATE, object_memory om);
void object_memory_formalize_contexts(STATE, object_memory om);
+
#define FAST_NEW 1
#ifdef FAST_NEW
@@ -66,29 +67,51 @@ void object_memory_formalize_contexts(STATE, object_memory om);
#define object_memory_new_dirty_object _om_inline_new_object
#define CTX_SIZE SIZE_IN_BYTES_FIELDS(FASTCTX_FIELDS)
+
+#define BYTES_PAST(ctx, num) ((char*)ctx + num)
+#define AFTER_CTX(ctx) BYTES_PAST(ctx, FASTCTX(ctx)->size)
+
+static inline OBJECT object_memory_new_context(object_memory om, int locals) {
+ int size;
+ OBJECT ctx;
+
+ if(locals > 0) {
+ size = CTX_SIZE + SIZE_IN_BYTES_FIELDS(locals) + 4;
+ } else {
+ size = CTX_SIZE;
+ }
+
+ ctx = ((OBJECT)heap_allocate_dirty(om->contexts, size));
+ memset(ctx, 0, size);
+
+ /* not really the number of fields, rather the number of bytes
+ this context is using. */
+ FASTCTX(ctx)->size = size;
-#define object_memory_new_context(om) ((OBJECT)heap_allocate_dirty(om->contexts, CTX_SIZE))
+ return ctx;
+}
+#define object_memory_context_locals(ctx) ((OBJECT)BYTES_PAST(ctx, CTX_SIZE))
+
#define om_on_stack(om, ctx) heap_contains_p(om->contexts, ctx)
#define om_in_heap(om, ctx) heap_contains_p(om->gc->current, ctx)
#define object_memory_retire_context(om, ctx) \
if(om_on_stack(om, ctx) && (ctx >= om->context_bottom)) { \
- xassert(ctx == om->contexts->current - CTX_SIZE);\
- fast_memfill_s20((void*)ctx, 0); heap_putback(om->contexts, CTX_SIZE); \
+ fast_memfill_s20((void*)ctx, 0); heap_putback(om->contexts, FASTCTX(ctx)->size); \
}
-#define object_memory_context_referenced(om, ctx) (void)({ \
- OBJECT _nb = (OBJECT) ((uintptr_t)ctx + CTX_SIZE); \
- if(om_on_stack(om, ctx) && \
- (om->context_bottom < _nb)) { om->context_bottom = _nb; } })
+#define object_memory_context_referenced(om, ctx) (void)({ \
+ OBJECT _nb = (OBJECT)AFTER_CTX(ctx); \
+ if(om_on_stack(om, ctx) && (om->context_bottom < _nb)) { \
+ om->context_bottom = _nb; } })
#define om_context_referenced_p(om, ctx) ((ctx < om->context_bottom) && (ctx >= (OBJECT)om->contexts->address))
#define om_stack_context_p(om, ctx) (om_on_stack(om, ctx) && (ctx >= om->context_bottom))
-#define om_stack_next_ctx(ctx) ((OBJECT)(ctx + CTX_SIZE))
-#define om_stack_prev_ctx(ctx) ((OBJECT)(ctx - CTX_SIZE))
+#define om_stack_next_ctx(ctx) ((OBJECT)AFTER_CTX(ctx))
+#define om_stack_prev_ctx(ctx) ((OBJECT)BYTES_PAST(ctx, -FASTCTX(ctx)->size))
#define om_stack_sender(ctx) om_stack_prev_ctx(ctx)
#define om_valid_context_p(state, ctx) ( \
@@ -97,23 +120,23 @@ if(om_on_stack(om, ctx) && (ctx >= om->context_bottom)) { \
(om_in_heap(state->om, ctx) && (methctx_is_fast_p(state, ctx) || blokctx_s_block_context_p(state, ctx))) \
)
-#define EACH_CTX(om, addr) \
- addr = (OBJECT) om->contexts->address; \
+#define EACH_CTX(om, addr) \
+ addr = (OBJECT)om->contexts->address; \
while(addr < (OBJECT) om->contexts->current) {
-#define DONE_EACH_CTX(addr) addr = (address)( (uintptr_t)addr + CTX_SIZE); }
+#define DONE_EACH_CTX(addr) addr = (address)AFTER_CTX(addr); }
-#define EACH_REFD_CTX(om, addr) \
- addr = (OBJECT) om->contexts->address; \
- while(addr < om->context_bottom) {
-
-#define DONE_EACH_REFD_CTX(addr) addr = (address)( (uintptr_t)addr + CTX_SIZE); }
+#define EACH_REFD_CTX(om, addr) \
+ addr = (OBJECT)om->contexts->address; \
+ while(addr < (OBJECT) om->context_bottom) {
+
+#define DONE_EACH_REFD_CTX(addr) addr = (address)AFTER_CTX(addr); }
-#define EACH_STACK_CTX(om, addr) \
- addr = (OBJECT) om->context_bottom; \
- while(addr < (OBJECT)om->contexts->current) {
+#define EACH_STACK_CTX(om, addr) \
+ addr = (OBJECT)om->context_bottom; \
+ while(addr < (OBJECT) om->contexts->current) {
-#define DONE_EACH_STACK_CTX(addr) addr = (address)( (uintptr_t)addr + CTX_SIZE); }
+#define DONE_EACH_STACK_CTX(addr) addr = (address)AFTER_CTX(addr); }
#define om_no_referenced_ctxs_p(om) (om->context_bottom == (OBJECT)om->contexts->address)
View
12 shotgun/lib/state.h
@@ -5,6 +5,7 @@
#include <ucontext.h>
#include <hashtable.h>
+#include <ptr_array.h>
#include "subtend/PortableUContext.h"
#include <termios.h>
@@ -80,6 +81,12 @@ rstate rubinius_state_new();
#define STATE rstate state
+#define FASTCTX_FIELDS 18
+#define FASTCTX_NORMAL 1
+#define FASTCTX_BLOCK 3
+#define FASTCTX_NMC 4
+
+#include "cpu.h"
#include "object_memory.h"
#include "subtend/handle.h"
@@ -149,11 +156,6 @@ struct rubinius_state {
#define FIRE_STACK 3
#define FIRE_ASSERT 4
-#define FASTCTX_FIELDS 17
-#define FASTCTX_NORMAL 1
-#define FASTCTX_BLOCK 3
-#define FASTCTX_NMC 4
-
OBJECT rbs_const_set(STATE, OBJECT module, const char *name, OBJECT obj);
OBJECT rbs_const_get(STATE, OBJECT module, const char *name);
OBJECT rbs_class_new(STATE, const char *name, int fields, OBJECT obj);
View
2 shotgun/lib/subtend/nmc.c
@@ -33,7 +33,7 @@ OBJECT nmc_new(STATE, OBJECT nmethod, OBJECT sender, OBJECT recv, OBJECT name, i
OBJECT ctx, sys;
struct fast_context *fc;
- ctx = object_memory_new_context(state->om);
+ ctx = object_memory_new_context(state->om, 0);
if(ctx >= state->om->context_last) {
state->om->collect_now |= OMCollectYoung;
}

0 comments on commit 14e2b20

Please sign in to comment.