Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Big cleanup of literal table and relocation-related code
- new way to iterate over instruction_operands cleans up relocation code
- move some methods out of the VM class to clean up code
- if debugging is on, die earlier if attempting to allocate inside GC
- callback heap entries are now code_blocks
- replace block_granularity with data_alignment
  • Loading branch information
Slava Pestov committed Dec 2, 2009
1 parent 8d41693 commit ca7bca6
Show file tree
Hide file tree
Showing 25 changed files with 256 additions and 195 deletions.
4 changes: 4 additions & 0 deletions vm/allot.hpp
Expand Up @@ -7,6 +7,10 @@ namespace factor
*/
inline object *factor_vm::allot_object(cell type, cell size)
{
#ifdef FACTOR_DEBUG
assert(!current_gc);
#endif

/* If the object is smaller than the nursery, allocate it in the nursery,
after a GC if needed */
if(nursery.size > size)
Expand Down
2 changes: 1 addition & 1 deletion vm/booleans.cpp
Expand Up @@ -15,7 +15,7 @@ VM_C_API void box_boolean(bool value, factor_vm *parent)

VM_C_API bool to_boolean(cell value, factor_vm *parent)
{
return parent->to_boolean(value);
return to_boolean(value);
}

}
7 changes: 1 addition & 6 deletions vm/booleans.hpp
Expand Up @@ -4,12 +4,7 @@ namespace factor
VM_C_API void box_boolean(bool value, factor_vm *vm);
VM_C_API bool to_boolean(cell value, factor_vm *vm);

inline cell factor_vm::tag_boolean(cell untagged)
{
return (untagged ? true_object : false_object);
}

inline bool factor_vm::to_boolean(cell value)
inline static bool to_boolean(cell value)
{
return value != false_object;
}
Expand Down
35 changes: 21 additions & 14 deletions vm/callbacks.cpp
Expand Up @@ -19,35 +19,44 @@ void factor_vm::init_callbacks(cell size)
callbacks = new callback_heap(size,this);
}

void callback_heap::update(callback *stub)
void callback_heap::update(code_block *stub)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);

cell rel_class = untag_fixnum(array_nth(code_template.untagged(),1));
cell rel_type = untag_fixnum(array_nth(code_template.untagged(),2));
cell offset = untag_fixnum(array_nth(code_template.untagged(),3));

instruction_operand op(rel_class,offset + (cell)(stub + 1));
op.store_value((cell)(stub->compiled + 1));
relocation_entry rel(
(relocation_type)rel_type,
(relocation_class)rel_class,
offset);

flush_icache((cell)stub,stub->size);
instruction_operand op(rel,stub,0);
op.store_value((cell)callback_xt(stub));

stub->flush_icache();
}

callback *callback_heap::add(code_block *compiled)
code_block *callback_heap::add(cell owner)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);
tagged<byte_array> insns(array_nth(code_template.untagged(),0));
cell size = array_capacity(insns.untagged());

cell bump = align(size,sizeof(cell)) + sizeof(callback);
cell bump = align(size + sizeof(code_block),data_alignment);
if(here + bump > seg->end) fatal_error("Out of callback space",0);

callback *stub = (callback *)here;
stub->compiled = compiled;
memcpy(stub + 1,insns->data<void>(),size);

stub->size = align(size,sizeof(cell));
free_heap_block *free_block = (free_heap_block *)here;
free_block->make_free(bump);
here += bump;

code_block *stub = (code_block *)free_block;
stub->owner = owner;
stub->literals = false_object;
stub->relocation = false_object;

memcpy(stub->xt(),insns->data<void>(),size);
update(stub);

return stub;
Expand All @@ -57,9 +66,7 @@ void factor_vm::primitive_callback()
{
tagged<word> w(dpop());
w.untag_check(this);

callback *stub = callbacks->add(w->code);
box_alien(stub + 1);
box_alien(callbacks->add(w.value())->xt());
}

}
47 changes: 35 additions & 12 deletions vm/callbacks.hpp
@@ -1,11 +1,28 @@
namespace factor
{

struct callback {
cell size;
code_block *compiled;
void *code() { return (void *)(this + 1); }
};
/* The callback heap is used to store the machine code that alien-callbacks
actually jump to when C code invokes them.
The callback heap has entries that look like code_blocks from the code heap,
but callback heap entries are allocated contiguously, never deallocated, and all
fields but the owner are set to false_object. The owner points to the callback
bottom word, whose XT is the callback body itself, generated by the optimizing
compiler. The machine code that follows a callback stub consists of a single
CALLBACK_STUB machine code template, which performs a jump to a "far" address
(on PowerPC and x86-64, its loaded into a register first).
GC updates the CALLBACK_STUB code if the code block of the callback bottom word
is ever moved. The callback stub itself won't move, though, and is never
deallocated. This means that the callback stub itself is a stable function
pointer that C code can hold on to until the associated Factor VM exits.
Since callback stubs are GC roots, and are never deallocated, the associated
callback code in the code heap is also never deallocated.
The callback heap is not saved in the image. Running GC in a new session after
saving the image will deallocate any code heap entries that were only reachable
from the callback heap in the previous session when the image was saved. */

struct callback_heap {
segment *seg;
Expand All @@ -15,18 +32,24 @@ struct callback_heap {
explicit callback_heap(cell size, factor_vm *parent);
~callback_heap();

callback *add(code_block *compiled);
void update(callback *stub);
void *callback_xt(code_block *stub)
{
word *w = (word *)UNTAG(stub->owner);
return w->xt;
}

void update(code_block *stub);
code_block *add(cell owner);

callback *next(callback *stub)
code_block *next(code_block *stub)
{
return (callback *)((cell)stub + stub->size + sizeof(callback));
return (code_block *)((cell)stub + stub->size());
}

template<typename Iterator> void iterate(Iterator &iter)
template<typename Iterator> void each_callback(Iterator &iter)
{
callback *scan = (callback *)seg->start;
callback *end = (callback *)here;
code_block *scan = (code_block *)seg->start;
code_block *end = (code_block *)here;
while(scan < end)
{
iter(scan);
Expand Down
43 changes: 14 additions & 29 deletions vm/code_block_visitor.hpp
@@ -1,6 +1,17 @@
namespace factor
{

/* Code block visitors iterate over sets of code blocks, applying a functor to
each one. The functor returns a new code_block pointer, which may or may not
equal the old one. This is stored back to the original location.
This is used by GC's sweep and compact phases, and the implementation of the
modify-code-heap primitive.
Iteration is driven by visit_*() methods. Some of them define GC roots:
- visit_context_code_blocks()
- visit_callback_code_blocks() */

template<typename Visitor> struct code_block_visitor {
factor_vm *parent;
Visitor visitor;
Expand All @@ -12,7 +23,6 @@ template<typename Visitor> struct code_block_visitor {
void visit_object_code_block(object *obj);
void visit_embedded_code_pointers(code_block *compiled);
void visit_context_code_blocks();
void visit_callback_code_blocks();
};

template<typename Visitor>
Expand Down Expand Up @@ -81,14 +91,11 @@ struct embedded_code_pointers_visitor {

explicit embedded_code_pointers_visitor(Visitor visitor_) : visitor(visitor_) {}

void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
relocation_type type = op.rel_type();
if(type == RT_XT || type == RT_XT_PIC || type == RT_XT_PIC_TAIL)
{
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
op.store_code_block(visitor(op.load_code_block()));
}
}
};

Expand All @@ -98,7 +105,7 @@ void code_block_visitor<Visitor>::visit_embedded_code_pointers(code_block *compi
if(!parent->code->needs_fixup_p(compiled))
{
embedded_code_pointers_visitor<Visitor> visitor(this->visitor);
parent->iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
}
}

Expand All @@ -109,26 +116,4 @@ void code_block_visitor<Visitor>::visit_context_code_blocks()
parent->iterate_active_frames(call_frame_visitor);
}

template<typename Visitor>
struct callback_code_block_visitor {
callback_heap *callbacks;
Visitor visitor;

explicit callback_code_block_visitor(callback_heap *callbacks_, Visitor visitor_) :
callbacks(callbacks_), visitor(visitor_) {}

void operator()(callback *stub)
{
stub->compiled = visitor(stub->compiled);
callbacks->update(stub);
}
};

template<typename Visitor>
void code_block_visitor<Visitor>::visit_callback_code_blocks()
{
callback_code_block_visitor<Visitor> callback_visitor(parent->callbacks,visitor);
parent->callbacks->iterate(callback_visitor);
}

}
34 changes: 16 additions & 18 deletions vm/code_blocks.cpp
Expand Up @@ -150,12 +150,9 @@ struct update_word_references_relocation_visitor {

explicit update_word_references_relocation_visitor(factor_vm *parent_) : parent(parent_) {}

void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
relocation_type type = rel.rel_type();
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());

switch(type)
switch(op.rel_type())
{
case RT_XT:
{
Expand Down Expand Up @@ -201,7 +198,7 @@ void factor_vm::update_word_references(code_block *compiled)
else
{
update_word_references_relocation_visitor visitor(this);
iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}
}
Expand All @@ -218,13 +215,13 @@ struct relocate_code_block_relocation_visitor {

explicit relocate_code_block_relocation_visitor(factor_vm *parent_) : parent(parent_) {}

void operator()(relocation_entry rel, cell index, code_block *compiled)
void operator()(instruction_operand op)
{
instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
array *literals = (parent->to_boolean(compiled->literals)
? untag<array>(compiled->literals) : NULL);
code_block *compiled = op.parent_code_block();
array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
cell index = op.parameter_index();

switch(rel.rel_type())
switch(op.rel_type())
{
case RT_PRIMITIVE:
op.store_value(parent->compute_primitive_relocation(array_nth(literals,index)));
Expand All @@ -245,7 +242,7 @@ struct relocate_code_block_relocation_visitor {
op.store_value(parent->compute_xt_pic_tail_relocation(array_nth(literals,index)));
break;
case RT_HERE:
op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
Expand All @@ -269,7 +266,7 @@ struct relocate_code_block_relocation_visitor {
op.store_value(parent->decks_offset);
break;
default:
critical_error("Bad rel type",rel.rel_type());
critical_error("Bad rel type",op.rel_type());
break;
}
}
Expand All @@ -280,23 +277,24 @@ void factor_vm::relocate_code_block(code_block *compiled)
{
code->needs_fixup.erase(compiled);
relocate_code_block_relocation_visitor visitor(this);
iterate_relocations(compiled,visitor);
compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}

/* Fixup labels. This is done at compile time, not image load time */
void factor_vm::fixup_labels(array *labels, code_block *compiled)
{
cell i;
cell size = array_capacity(labels);

for(i = 0; i < size; i += 3)
for(cell i = 0; i < size; i += 3)
{
cell rel_class = untag_fixnum(array_nth(labels,i));
relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(labels,i));
cell offset = untag_fixnum(array_nth(labels,i + 1));
cell target = untag_fixnum(array_nth(labels,i + 2));

instruction_operand op(rel_class,offset + (cell)compiled->xt());
relocation_entry new_entry(RT_HERE,rel_class,offset);

instruction_operand op(new_entry,compiled,0);
op.store_value(target + (cell)compiled->xt());
}
}
Expand Down
18 changes: 18 additions & 0 deletions vm/code_blocks.hpp
Expand Up @@ -48,6 +48,24 @@ struct code_block
{
factor::flush_icache((cell)this,size());
}

template<typename Iterator> void each_instruction_operand(Iterator &iter)
{
if(to_boolean(relocation))
{
byte_array *rels = (byte_array *)UNTAG(relocation);

cell index = 0;
cell length = (rels->capacity >> TAG_BITS) / sizeof(relocation_entry);

for(cell i = 0; i < length; i++)
{
relocation_entry rel = rels->data<relocation_entry>()[i];
iter(instruction_operand(rel,this,index));
index += rel.number_of_parameters();
}
}
}
};

}
9 changes: 7 additions & 2 deletions vm/code_heap.cpp
Expand Up @@ -59,6 +59,11 @@ void code_heap::code_heap_free(code_block *compiled)
allocator->free(compiled);
}

void code_heap::flush_icache()
{
factor::flush_icache(seg->start,seg->size);
}

/* Allocate a code heap during startup */
void factor_vm::init_code_heap(cell size)
{
Expand Down Expand Up @@ -86,7 +91,7 @@ defining a new word. */
void factor_vm::update_code_heap_words()
{
word_updater updater(this);
iterate_code_heap(updater);
each_code_block(updater);
}

void factor_vm::primitive_modify_code_heap()
Expand Down Expand Up @@ -171,7 +176,7 @@ struct stack_trace_stripper {
void factor_vm::primitive_strip_stack_traces()
{
stack_trace_stripper stripper;
iterate_code_heap(stripper);
each_code_block(stripper);
}

}
1 change: 1 addition & 0 deletions vm/code_heap.hpp
Expand Up @@ -26,6 +26,7 @@ struct code_heap {
void set_marked_p(code_block *compiled);
void clear_mark_bits();
void code_heap_free(code_block *compiled);
void flush_icache();
};

struct code_heap_room {
Expand Down

0 comments on commit ca7bca6

Please sign in to comment.