From ea2e452c7af179927d00247f6829289cc772a667 Mon Sep 17 00:00:00 2001 From: Takashi Kokubun Date: Fri, 14 Apr 2023 12:14:36 -0700 Subject: [PATCH] YJIT: Avoid cloning a Context for each insn --- yjit/src/backend/arm64/mod.rs | 23 ++++++++--------------- yjit/src/backend/ir.rs | 26 +++++++++----------------- yjit/src/backend/x86_64/mod.rs | 27 ++++++++++----------------- yjit/src/core.rs | 2 +- 4 files changed, 28 insertions(+), 50 deletions(-) diff --git a/yjit/src/backend/arm64/mod.rs b/yjit/src/backend/arm64/mod.rs index e5a8963d63d7e6..8aa812408911e4 100644 --- a/yjit/src/backend/arm64/mod.rs +++ b/yjit/src/backend/arm64/mod.rs @@ -377,12 +377,10 @@ impl Assembler } let mut asm_local = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits)); - let insn_ctx = take(&mut self.insn_ctx); let asm = &mut asm_local; let mut iterator = self.into_draining_iter(); while let Some((index, mut insn)) = iterator.next_mapped() { - asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx // Here we're going to map the operands of the instruction to load // any Opnd::Value operands into registers if they are heap objects // such that only the Op::Load instruction needs to handle that @@ -792,14 +790,9 @@ impl Assembler target: Target, asm: &mut Assembler, ocb: &mut Option<&mut OutlinedCb>, - insn_idx: usize, ) -> Target { - if let Target::SideExit { counter, pc, stack_size } = target { - let side_exit_context = SideExitContext { - pc: pc.unwrap(), - ctx: asm.insn_ctx[insn_idx].with_stack_size(stack_size.unwrap()), - }; - let side_exit = asm.get_side_exit(&side_exit_context, counter, ocb.as_mut().unwrap()); + if let Target::SideExit { counter, context } = target { + let side_exit = asm.get_side_exit(&context.unwrap(), counter, ocb.as_mut().unwrap()); Target::SideExitPtr(side_exit) } else { target @@ -1042,7 +1035,7 @@ impl Assembler br(cb, opnd.into()); }, Insn::Jmp(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(dst_ptr) => { emit_jmp_ptr(cb, dst_ptr, true); }, @@ -1066,19 +1059,19 @@ impl Assembler }; }, Insn::Je(target) | Insn::Jz(target) => { - emit_conditional_jump::<{Condition::EQ}>(cb, compile_side_exit(*target, self, ocb, insn_idx)); + emit_conditional_jump::<{Condition::EQ}>(cb, compile_side_exit(*target, self, ocb)); }, Insn::Jne(target) | Insn::Jnz(target) => { - emit_conditional_jump::<{Condition::NE}>(cb, compile_side_exit(*target, self, ocb, insn_idx)); + emit_conditional_jump::<{Condition::NE}>(cb, compile_side_exit(*target, self, ocb)); }, Insn::Jl(target) => { - emit_conditional_jump::<{Condition::LT}>(cb, compile_side_exit(*target, self, ocb, insn_idx)); + emit_conditional_jump::<{Condition::LT}>(cb, compile_side_exit(*target, self, ocb)); }, Insn::Jbe(target) => { - emit_conditional_jump::<{Condition::LS}>(cb, compile_side_exit(*target, self, ocb, insn_idx)); + emit_conditional_jump::<{Condition::LS}>(cb, compile_side_exit(*target, self, ocb)); }, Insn::Jo(target) => { - emit_conditional_jump::<{Condition::VS}>(cb, compile_side_exit(*target, self, ocb, insn_idx)); + emit_conditional_jump::<{Condition::VS}>(cb, compile_side_exit(*target, self, ocb)); }, Insn::IncrCounter { mem, value } => { let label = cb.new_label("incr_counter_loop".to_string()); diff --git a/yjit/src/backend/ir.rs b/yjit/src/backend/ir.rs index 7326c8f0794604..1bf2dca04ee643 100644 --- a/yjit/src/backend/ir.rs +++ b/yjit/src/backend/ir.rs @@ -285,7 +285,7 @@ pub enum Target /// Pointer to a piece of YJIT-generated code CodePtr(CodePtr), /// Side exit with a counter - SideExit { counter: Option, pc: Option<*mut VALUE>, stack_size: Option }, + SideExit { counter: Option, context: Option }, /// Pointer to a side exit code SideExitPtr(CodePtr), /// A label within the generated code @@ -295,7 +295,7 @@ pub enum Target impl Target { pub fn side_exit(counter: Option) -> Target { - Target::SideExit { counter, pc: None, stack_size: None } + Target::SideExit { counter, context: None } } pub fn unwrap_label_idx(&self) -> usize { @@ -911,7 +911,7 @@ impl fmt::Debug for Insn { } /// Set of variables used for generating side exits -#[derive(Clone, Eq, Hash, PartialEq)] +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub struct SideExitContext { /// PC of the instruction being compiled pub pc: *mut VALUE, @@ -939,10 +939,6 @@ pub struct Assembler { /// Context for generating the current insn pub ctx: Context, - /// Parallel vec with insns - /// Past Context for this insn - pub(super) insn_ctx: Vec, - /// Side exit caches for each SideExitContext pub(super) side_exits: HashMap, @@ -966,7 +962,6 @@ impl Assembler reg_temps: Vec::default(), label_names, ctx: Context::default(), - insn_ctx: Vec::default(), side_exits, side_exit_pc: None, side_exit_stack_size: None, @@ -1037,18 +1032,19 @@ impl Assembler // Set a side exit context to Target::SideExit let mut insn = insn; - if let Some(Target::SideExit { pc, stack_size, .. }) = insn.target_mut() { + if let Some(Target::SideExit { context, .. }) = insn.target_mut() { // We should skip this when this instruction is being copied from another Assembler. - if let (None, None) = (&pc, &stack_size) { - *pc = self.side_exit_pc; - *stack_size = self.side_exit_stack_size; + if context.is_none() { + *context = Some(SideExitContext { + pc: self.side_exit_pc.unwrap(), + ctx: self.ctx.with_stack_size(self.side_exit_stack_size.unwrap()), + }); } } self.insns.push(insn); self.live_ranges.push(insn_idx); self.reg_temps.push(reg_temps); - self.insn_ctx.push(self.ctx.clone()); } /// Get stack temps that are currently in a register @@ -1110,13 +1106,11 @@ impl Assembler } let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits)); - let insn_ctx = take(&mut self.insn_ctx); let regs = Assembler::get_temp_regs(); let reg_temps = take(&mut self.reg_temps); let mut iterator = self.into_draining_iter(); while let Some((index, mut insn)) = iterator.next_mapped() { - asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx match &insn { // The original insn is pushed to the new asm to satisfy ccall's reg_temps assertion. Insn::RegTemps(_) => {} // noop @@ -1268,11 +1262,9 @@ impl Assembler let live_ranges: Vec = take(&mut self.live_ranges); let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits)); - let insn_ctx = take(&mut self.insn_ctx); let mut iterator = self.into_draining_iter(); while let Some((index, mut insn)) = iterator.next_unmapped() { - asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx // Check if this is the last instruction that uses an operand that // spans more than one instruction. In that case, return the // allocated register to the pool. diff --git a/yjit/src/backend/x86_64/mod.rs b/yjit/src/backend/x86_64/mod.rs index 7977fdd86971fa..de38cd98d5af2a 100644 --- a/yjit/src/backend/x86_64/mod.rs +++ b/yjit/src/backend/x86_64/mod.rs @@ -116,12 +116,10 @@ impl Assembler fn x86_split(mut self) -> Assembler { let live_ranges: Vec = take(&mut self.live_ranges); - let insn_ctx = take(&mut self.insn_ctx); let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits)); let mut iterator = self.into_draining_iter(); while let Some((index, mut insn)) = iterator.next_unmapped() { - asm.ctx = insn_ctx[index].clone(); // propagate insn_ctx // When we're iterating through the instructions with x86_split, we // need to know the previous live ranges in order to tell if a // register lasts beyond the current instruction. So instead of @@ -419,14 +417,9 @@ impl Assembler target: Target, asm: &mut Assembler, ocb: &mut Option<&mut OutlinedCb>, - insn_idx: usize, ) -> Target { - if let Target::SideExit { counter, pc, stack_size } = target { - let side_exit_context = SideExitContext { - pc: pc.unwrap(), - ctx: asm.insn_ctx[insn_idx].with_stack_size(stack_size.unwrap()), - }; - let side_exit = asm.get_side_exit(&side_exit_context, counter, ocb.as_mut().unwrap()); + if let Target::SideExit { counter, context } = target { + let side_exit = asm.get_side_exit(&context.unwrap(), counter, ocb.as_mut().unwrap()); Target::SideExitPtr(side_exit) } else { target @@ -647,7 +640,7 @@ impl Assembler // Conditional jump to a label Insn::Jmp(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jmp_ptr(cb, code_ptr), Target::Label(label_idx) => jmp_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -655,7 +648,7 @@ impl Assembler } Insn::Je(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => je_ptr(cb, code_ptr), Target::Label(label_idx) => je_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -663,7 +656,7 @@ impl Assembler } Insn::Jne(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jne_ptr(cb, code_ptr), Target::Label(label_idx) => jne_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -671,7 +664,7 @@ impl Assembler } Insn::Jl(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jl_ptr(cb, code_ptr), Target::Label(label_idx) => jl_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -679,7 +672,7 @@ impl Assembler }, Insn::Jbe(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jbe_ptr(cb, code_ptr), Target::Label(label_idx) => jbe_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -687,7 +680,7 @@ impl Assembler }, Insn::Jz(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jz_ptr(cb, code_ptr), Target::Label(label_idx) => jz_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -695,7 +688,7 @@ impl Assembler } Insn::Jnz(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jnz_ptr(cb, code_ptr), Target::Label(label_idx) => jnz_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), @@ -703,7 +696,7 @@ impl Assembler } Insn::Jo(target) => { - match compile_side_exit(*target, self, ocb, insn_idx) { + match compile_side_exit(*target, self, ocb) { Target::CodePtr(code_ptr) | Target::SideExitPtr(code_ptr) => jo_ptr(cb, code_ptr), Target::Label(label_idx) => jo_label(cb, label_idx), Target::SideExit { .. } => unreachable!("Target::SideExit should have been compiled by compile_side_exit"), diff --git a/yjit/src/core.rs b/yjit/src/core.rs index be569af6887a5c..5362618a285d27 100644 --- a/yjit/src/core.rs +++ b/yjit/src/core.rs @@ -417,7 +417,7 @@ impl RegTemps { /// Code generation context /// Contains information we can use to specialize/optimize code /// There are a lot of context objects so we try to keep the size small. -#[derive(Clone, Default, Eq, Hash, PartialEq, Debug)] +#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Debug)] pub struct Context { // Number of values currently on the temporary stack stack_size: u8,