Skip to content

Commit

Permalink
Port invokebuiltin* insns to the new backend IR (#375)
Browse files Browse the repository at this point in the history
* Port invokebuiltin* insns to the new backend IR

* Fix the C_ARG_OPNDS check boundary
  • Loading branch information
k0kubun committed Aug 25, 2022
1 parent 371c572 commit cee548e
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 42 deletions.
22 changes: 22 additions & 0 deletions bootstraptest/test_yjit_new_backend.rb
Expand Up @@ -303,6 +303,28 @@ def foo
foo
}

# invokebuiltin
assert_equal '123', %q{
def foo(obj)
obj.foo = 123
end
struct = Struct.new(:foo)
obj = struct.new
foo(obj)
}

# invokebuiltin_delegate
assert_equal '.', %q{
def foo(path)
Dir.open(path).path
end
foo(".")
}

# opt_invokebuiltin_delegate_leave
assert_equal '[0]', %q{"\x00".unpack("c")}

# opt_case_dispatch
assert_equal 'true', %q{
case 2
Expand Down
2 changes: 1 addition & 1 deletion yjit/src/backend/arm64/mod.rs
Expand Up @@ -196,7 +196,7 @@ impl Assembler
}
},
Op::CCall => {
assert!(opnds.len() < C_ARG_OPNDS.len());
assert!(opnds.len() <= C_ARG_OPNDS.len());

// For each of the operands we're going to first load them
// into a register and then move them into the correct
Expand Down
2 changes: 1 addition & 1 deletion yjit/src/backend/x86_64/mod.rs
Expand Up @@ -400,7 +400,7 @@ impl Assembler
// C function call
Op::CCall => {
// Temporary
assert!(insn.opnds.len() < _C_ARG_OPNDS.len());
assert!(insn.opnds.len() <= _C_ARG_OPNDS.len());

// For each operand
for (idx, opnd) in insn.opnds.iter().enumerate() {
Expand Down
65 changes: 25 additions & 40 deletions yjit/src/codegen.rs
Expand Up @@ -5818,45 +5818,40 @@ fn gen_getblockparam(
KeepCompiling
}
*/

fn gen_invokebuiltin(
jit: &mut JITState,
ctx: &mut Context,
cb: &mut CodeBlock,
asm: &mut Assembler,
_ocb: &mut OutlinedCb,
) -> CodegenStatus {
let bf: *const rb_builtin_function = jit_get_arg(jit, 0).as_ptr();
let bf_argc: usize = unsafe { (*bf).argc }.try_into().expect("non negative argc");

// ec, self, and arguments
if bf_argc + 2 > C_ARG_REGS.len() {
if bf_argc + 2 > C_ARG_OPNDS.len() {
return CantCompile;
}

// If the calls don't allocate, do they need up to date PC, SP?
jit_prepare_routine_call(jit, ctx, cb, REG0);
jit_prepare_routine_call(jit, ctx, asm);

// Call the builtin func (ec, recv, arg1, arg2, ...)
mov(cb, C_ARG_REGS[0], REG_EC);
mov(
cb,
C_ARG_REGS[1],
mem_opnd(64, REG_CFP, RUBY_OFFSET_CFP_SELF),
);
let mut args = vec![EC, Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SELF)];

// Copy arguments from locals
for i in 0..bf_argc {
let stack_opnd = ctx.stack_opnd((bf_argc - i - 1) as i32);
let c_arg_reg = C_ARG_REGS[2 + i];
mov(cb, c_arg_reg, stack_opnd);
args.push(stack_opnd);
}

call_ptr(cb, REG0, unsafe { (*bf).func_ptr } as *const u8);
let val = asm.ccall(unsafe { (*bf).func_ptr } as *const u8, args);

// Push the return value
ctx.stack_pop(bf_argc);
let stack_ret = ctx.stack_push(Type::Unknown);
mov(cb, stack_ret, RAX);
asm.mov(stack_ret, val);

KeepCompiling
}
Expand All @@ -5867,52 +5862,44 @@ fn gen_invokebuiltin(
fn gen_opt_invokebuiltin_delegate(
jit: &mut JITState,
ctx: &mut Context,
cb: &mut CodeBlock,
asm: &mut Assembler,
_ocb: &mut OutlinedCb,
) -> CodegenStatus {
let bf: *const rb_builtin_function = jit_get_arg(jit, 0).as_ptr();
let bf_argc = unsafe { (*bf).argc };
let start_index = jit_get_arg(jit, 1).as_i32();

// ec, self, and arguments
if bf_argc + 2 > (C_ARG_REGS.len() as i32) {
if bf_argc + 2 > (C_ARG_OPNDS.len() as i32) {
return CantCompile;
}

// If the calls don't allocate, do they need up to date PC, SP?
jit_prepare_routine_call(jit, ctx, cb, REG0);
if bf_argc > 0 {
// Load environment pointer EP from CFP
mov(cb, REG0, mem_opnd(64, REG_CFP, RUBY_OFFSET_CFP_EP));
}
jit_prepare_routine_call(jit, ctx, asm);

// Call the builtin func (ec, recv, arg1, arg2, ...)
mov(cb, C_ARG_REGS[0], REG_EC);
mov(
cb,
C_ARG_REGS[1],
mem_opnd(64, REG_CFP, RUBY_OFFSET_CFP_SELF),
);
let mut args = vec![EC, Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SELF)];

// Copy arguments from locals
for i in 0..bf_argc {
let table_size = unsafe { get_iseq_body_local_table_size(jit.iseq) };
let offs: i32 = -(table_size as i32) - (VM_ENV_DATA_SIZE as i32) + 1 + start_index + i;
let local_opnd = mem_opnd(64, REG0, offs * (SIZEOF_VALUE as i32));
let offs: usize = (i + 2) as usize;
let c_arg_reg = C_ARG_REGS[offs];
mov(cb, c_arg_reg, local_opnd);
if bf_argc > 0 {
// Load environment pointer EP from CFP
let ep = asm.load(Opnd::mem(64, CFP, RUBY_OFFSET_CFP_EP));

for i in 0..bf_argc {
let table_size = unsafe { get_iseq_body_local_table_size(jit.iseq) };
let offs: i32 = -(table_size as i32) - (VM_ENV_DATA_SIZE as i32) + 1 + start_index + i;
let local_opnd = Opnd::mem(64, ep, offs * (SIZEOF_VALUE as i32));
args.push(local_opnd);
}
}
call_ptr(cb, REG0, unsafe { (*bf).func_ptr } as *const u8);
let val = asm.ccall(unsafe { (*bf).func_ptr } as *const u8, args);

// Push the return value
let stack_ret = ctx.stack_push(Type::Unknown);
mov(cb, stack_ret, RAX);
asm.mov(stack_ret, val);

KeepCompiling
}
*/

/// Maps a YARV opcode to a code generation function (if supported)
fn get_gen_fn(opcode: VALUE) -> Option<InsnGenFn> {
Expand Down Expand Up @@ -5982,12 +5969,10 @@ fn get_gen_fn(opcode: VALUE) -> Option<InsnGenFn> {
YARVINSN_opt_size => Some(gen_opt_size),
YARVINSN_opt_length => Some(gen_opt_length),
YARVINSN_opt_regexpmatch2 => Some(gen_opt_regexpmatch2),
/*
YARVINSN_opt_getinlinecache => Some(gen_opt_getinlinecache),
//YARVINSN_opt_getinlinecache => Some(gen_opt_getinlinecache),
YARVINSN_invokebuiltin => Some(gen_invokebuiltin),
YARVINSN_opt_invokebuiltin_delegate => Some(gen_opt_invokebuiltin_delegate),
YARVINSN_opt_invokebuiltin_delegate_leave => Some(gen_opt_invokebuiltin_delegate),
*/
YARVINSN_opt_case_dispatch => Some(gen_opt_case_dispatch),
YARVINSN_branchif => Some(gen_branchif),
YARVINSN_branchunless => Some(gen_branchunless),
Expand Down

0 comments on commit cee548e

Please sign in to comment.