@@ -216,47 +216,12 @@ static int skip_nops(u8 *buf, int offset, int len)
216216 return offset ;
217217}
218218
219- /*
220- * Optimize a sequence of NOPs, possibly preceded by an unconditional jump
221- * to the end of the NOP sequence into a single NOP.
222- */
223- static bool
224- __optimize_nops (const u8 * const instr , u8 * buf , size_t len , struct insn * insn , int * next , int * prev , int * target )
225- {
226- int i = * next - insn -> length ;
227-
228- switch (insn -> opcode .bytes [0 ]) {
229- case JMP8_INSN_OPCODE :
230- case JMP32_INSN_OPCODE :
231- * prev = i ;
232- * target = * next + insn -> immediate .value ;
233- return false;
234- }
235-
236- if (insn_is_nop (insn )) {
237- int nop = i ;
238-
239- * next = skip_nops (buf , * next , len );
240- if (* target && * next == * target )
241- nop = * prev ;
242-
243- add_nop (buf + nop , * next - nop );
244- DUMP_BYTES (ALT , buf , len , "%px: [%d:%d) optimized NOPs: " , instr , nop , * next );
245- return true;
246- }
247-
248- * target = 0 ;
249- return false;
250- }
251-
252219/*
253220 * "noinline" to cause control flow change and thus invalidate I$ and
254221 * cause refetch after modification.
255222 */
256- static void __init_or_module noinline optimize_nops (const u8 * const instr , u8 * buf , size_t len )
223+ static void noinline optimize_nops (const u8 * const instr , u8 * buf , size_t len )
257224{
258- int prev , target = 0 ;
259-
260225 for (int next , i = 0 ; i < len ; i = next ) {
261226 struct insn insn ;
262227
@@ -265,7 +230,14 @@ static void __init_or_module noinline optimize_nops(const u8 * const instr, u8 *
265230
266231 next = i + insn .length ;
267232
268- __optimize_nops (instr , buf , len , & insn , & next , & prev , & target );
233+ if (insn_is_nop (& insn )) {
234+ int nop = i ;
235+
236+ next = skip_nops (buf , next , len );
237+
238+ add_nop (buf + nop , next - nop );
239+ DUMP_BYTES (ALT , buf , len , "%px: [%d:%d) optimized NOPs: " , instr , nop , next );
240+ }
269241 }
270242}
271243
@@ -339,10 +311,8 @@ bool need_reloc(unsigned long offset, u8 *src, size_t src_len)
339311 return (target < src || target > src + src_len );
340312}
341313
342- void apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
314+ static void __apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
343315{
344- int prev , target = 0 ;
345-
346316 for (int next , i = 0 ; i < instrlen ; i = next ) {
347317 struct insn insn ;
348318
@@ -351,9 +321,6 @@ void apply_relocation(u8 *buf, const u8 * const instr, size_t instrlen, u8 *repl
351321
352322 next = i + insn .length ;
353323
354- if (__optimize_nops (instr , buf , instrlen , & insn , & next , & prev , & target ))
355- continue ;
356-
357324 switch (insn .opcode .bytes [0 ]) {
358325 case 0x0f :
359326 if (insn .opcode .bytes [1 ] < 0x80 ||
@@ -398,6 +365,12 @@ void apply_relocation(u8 *buf, const u8 * const instr, size_t instrlen, u8 *repl
398365 }
399366}
400367
368+ void apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
369+ {
370+ __apply_relocation (buf , instr , instrlen , repl , repl_len );
371+ optimize_nops (instr , buf , repl_len );
372+ }
373+
401374/* Low-level backend functions usable from alternative code replacements. */
402375DEFINE_ASM_FUNC (nop_func , "" , .entry .text );
403376EXPORT_SYMBOL_GPL (nop_func );
0 commit comments