Skip to content

Commit

Permalink
YJIT: Avoid cloning a Context for each insn
Browse files Browse the repository at this point in the history
  • Loading branch information
k0kubun committed Apr 14, 2023
1 parent cbd868b commit ea2e452
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 50 deletions.
23 changes: 8 additions & 15 deletions yjit/src/backend/arm64/mod.rs
Expand Up @@ -377,12 +377,10 @@ impl Assembler
}

let mut asm_local = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let insn_ctx = take(&mut self.insn_ctx);
let asm = &mut asm_local;
let mut iterator = self.into_draining_iter();

while let Some((index, mut insn)) = iterator.next_mapped() {
asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx
// Here we're going to map the operands of the instruction to load
// any Opnd::Value operands into registers if they are heap objects
// such that only the Op::Load instruction needs to handle that
Expand Down Expand Up @@ -792,14 +790,9 @@ impl Assembler
target: Target,
asm: &mut Assembler,
ocb: &mut Option<&mut OutlinedCb>,
insn_idx: usize,
) -> Target {
if let Target::SideExit { counter, pc, stack_size } = target {
let side_exit_context = SideExitContext {
pc: pc.unwrap(),
ctx: asm.insn_ctx[insn_idx].with_stack_size(stack_size.unwrap()),
};
let side_exit = asm.get_side_exit(&side_exit_context, counter, ocb.as_mut().unwrap());
if let Target::SideExit { counter, context } = target {
let side_exit = asm.get_side_exit(&context.unwrap(), counter, ocb.as_mut().unwrap());
Target::SideExitPtr(side_exit)
} else {
target
Expand Down Expand Up @@ -1042,7 +1035,7 @@ impl Assembler
br(cb, opnd.into());
},
Insn::Jmp(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(dst_ptr) => {
emit_jmp_ptr(cb, dst_ptr, true);
},
Expand All @@ -1066,19 +1059,19 @@ impl Assembler
};
},
Insn::Je(target) | Insn::Jz(target) => {
emit_conditional_jump::<{Condition::EQ}>(cb, compile_side_exit(*target, self, ocb, insn_idx));
emit_conditional_jump::<{Condition::EQ}>(cb, compile_side_exit(*target, self, ocb));
},
Insn::Jne(target) | Insn::Jnz(target) => {
emit_conditional_jump::<{Condition::NE}>(cb, compile_side_exit(*target, self, ocb, insn_idx));
emit_conditional_jump::<{Condition::NE}>(cb, compile_side_exit(*target, self, ocb));
},
Insn::Jl(target) => {
emit_conditional_jump::<{Condition::LT}>(cb, compile_side_exit(*target, self, ocb, insn_idx));
emit_conditional_jump::<{Condition::LT}>(cb, compile_side_exit(*target, self, ocb));
},
Insn::Jbe(target) => {
emit_conditional_jump::<{Condition::LS}>(cb, compile_side_exit(*target, self, ocb, insn_idx));
emit_conditional_jump::<{Condition::LS}>(cb, compile_side_exit(*target, self, ocb));
},
Insn::Jo(target) => {
emit_conditional_jump::<{Condition::VS}>(cb, compile_side_exit(*target, self, ocb, insn_idx));
emit_conditional_jump::<{Condition::VS}>(cb, compile_side_exit(*target, self, ocb));
},
Insn::IncrCounter { mem, value } => {
let label = cb.new_label("incr_counter_loop".to_string());
Expand Down
26 changes: 9 additions & 17 deletions yjit/src/backend/ir.rs
Expand Up @@ -285,7 +285,7 @@ pub enum Target
/// Pointer to a piece of YJIT-generated code
CodePtr(CodePtr),
/// Side exit with a counter
SideExit { counter: Option<Counter>, pc: Option<*mut VALUE>, stack_size: Option<u8> },
SideExit { counter: Option<Counter>, context: Option<SideExitContext> },
/// Pointer to a side exit code
SideExitPtr(CodePtr),
/// A label within the generated code
Expand All @@ -295,7 +295,7 @@ pub enum Target
impl Target
{
pub fn side_exit(counter: Option<Counter>) -> Target {
Target::SideExit { counter, pc: None, stack_size: None }
Target::SideExit { counter, context: None }
}

pub fn unwrap_label_idx(&self) -> usize {
Expand Down Expand Up @@ -911,7 +911,7 @@ impl fmt::Debug for Insn {
}

/// Set of variables used for generating side exits
#[derive(Clone, Eq, Hash, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct SideExitContext {
/// PC of the instruction being compiled
pub pc: *mut VALUE,
Expand Down Expand Up @@ -939,10 +939,6 @@ pub struct Assembler {
/// Context for generating the current insn
pub ctx: Context,

/// Parallel vec with insns
/// Past Context for this insn
pub(super) insn_ctx: Vec<Context>,

/// Side exit caches for each SideExitContext
pub(super) side_exits: HashMap<SideExitContext, CodePtr>,

Expand All @@ -966,7 +962,6 @@ impl Assembler
reg_temps: Vec::default(),
label_names,
ctx: Context::default(),
insn_ctx: Vec::default(),
side_exits,
side_exit_pc: None,
side_exit_stack_size: None,
Expand Down Expand Up @@ -1037,18 +1032,19 @@ impl Assembler

// Set a side exit context to Target::SideExit
let mut insn = insn;
if let Some(Target::SideExit { pc, stack_size, .. }) = insn.target_mut() {
if let Some(Target::SideExit { context, .. }) = insn.target_mut() {
// We should skip this when this instruction is being copied from another Assembler.
if let (None, None) = (&pc, &stack_size) {
*pc = self.side_exit_pc;
*stack_size = self.side_exit_stack_size;
if context.is_none() {
*context = Some(SideExitContext {
pc: self.side_exit_pc.unwrap(),
ctx: self.ctx.with_stack_size(self.side_exit_stack_size.unwrap()),
});
}
}

self.insns.push(insn);
self.live_ranges.push(insn_idx);
self.reg_temps.push(reg_temps);
self.insn_ctx.push(self.ctx.clone());
}

/// Get stack temps that are currently in a register
Expand Down Expand Up @@ -1110,13 +1106,11 @@ impl Assembler
}

let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let insn_ctx = take(&mut self.insn_ctx);
let regs = Assembler::get_temp_regs();
let reg_temps = take(&mut self.reg_temps);
let mut iterator = self.into_draining_iter();

while let Some((index, mut insn)) = iterator.next_mapped() {
asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx
match &insn {
// The original insn is pushed to the new asm to satisfy ccall's reg_temps assertion.
Insn::RegTemps(_) => {} // noop
Expand Down Expand Up @@ -1268,11 +1262,9 @@ impl Assembler

let live_ranges: Vec<usize> = take(&mut self.live_ranges);
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let insn_ctx = take(&mut self.insn_ctx);
let mut iterator = self.into_draining_iter();

while let Some((index, mut insn)) = iterator.next_unmapped() {
asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx
// Check if this is the last instruction that uses an operand that
// spans more than one instruction. In that case, return the
// allocated register to the pool.
Expand Down
27 changes: 10 additions & 17 deletions yjit/src/backend/x86_64/mod.rs
Expand Up @@ -116,12 +116,10 @@ impl Assembler
fn x86_split(mut self) -> Assembler
{
let live_ranges: Vec<usize> = take(&mut self.live_ranges);
let insn_ctx = take(&mut self.insn_ctx);
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let mut iterator = self.into_draining_iter();

while let Some((index, mut insn)) = iterator.next_unmapped() {
asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx
// When we're iterating through the instructions with x86_split, we
// need to know the previous live ranges in order to tell if a
// register lasts beyond the current instruction. So instead of
Expand Down Expand Up @@ -419,14 +417,9 @@ impl Assembler
target: Target,
asm: &mut Assembler,
ocb: &mut Option<&mut OutlinedCb>,
insn_idx: usize,
) -> Target {
if let Target::SideExit { counter, pc, stack_size } = target {
let side_exit_context = SideExitContext {
pc: pc.unwrap(),
ctx: asm.insn_ctx[insn_idx].with_stack_size(stack_size.unwrap()),
};
let side_exit = asm.get_side_exit(&side_exit_context, counter, ocb.as_mut().unwrap());
if let Target::SideExit { counter, context } = target {
let side_exit = asm.get_side_exit(&context.unwrap(), counter, ocb.as_mut().unwrap());
Target::SideExitPtr(side_exit)
} else {
target
Expand Down Expand Up @@ -647,63 +640,63 @@ impl Assembler

// Conditional jump to a label
Insn::Jmp(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jmp_ptr(cb, code_ptr),
Target::Label(label_idx) => jmp_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
}

Insn::Je(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => je_ptr(cb, code_ptr),
Target::Label(label_idx) => je_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
}

Insn::Jne(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jne_ptr(cb, code_ptr),
Target::Label(label_idx) => jne_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
}

Insn::Jl(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jl_ptr(cb, code_ptr),
Target::Label(label_idx) => jl_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
},

Insn::Jbe(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jbe_ptr(cb, code_ptr),
Target::Label(label_idx) => jbe_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
},

Insn::Jz(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jz_ptr(cb, code_ptr),
Target::Label(label_idx) => jz_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
}

Insn::Jnz(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jnz_ptr(cb, code_ptr),
Target::Label(label_idx) => jnz_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
}
}

Insn::Jo(target) => {
match compile_side_exit(*target, self, ocb, insn_idx) {
match compile_side_exit(*target, self, ocb) {
Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jo_ptr(cb, code_ptr),
Target::Label(label_idx) => jo_label(cb, label_idx),
Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"),
Expand Down
2 changes: 1 addition & 1 deletion yjit/src/core.rs
Expand Up @@ -417,7 +417,7 @@ impl RegTemps {
/// Code generation context
/// Contains information we can use to specialize/optimize code
/// There are a lot of context objects so we try to keep the size small.
#[derive(Clone, Default, Eq, Hash, PartialEq, Debug)]
#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Debug)]
pub struct Context {
// Number of values currently on the temporary stack
stack_size: u8,
Expand Down

0 comments on commit ea2e452

Please sign in to comment.