-
Notifications
You must be signed in to change notification settings - Fork 1
List of code transformations
Harishankar Vishwanathan edited this page Sep 10, 2023
·
9 revisions
-
fls()
andfls64()
are converted to some llvm intrinsics by clang when default compilation flags are used. Use an alternate implementation offls()
andfls64()
from here. -
mark_reg_known()
(resp.mark_reg_unknown()
) usesmemset
to set (resp. clear) bpf_reg_state fields to some defaults (resp. 0). We don't need this in the llvm IR. -
insert alu wrapper A function like so is injected for each alu bpf instruction:
void adjust_scalar_min_max_vals_BPF_AND(struct bpf_reg_state *dst_reg,
struct bpf_reg_state *src_reg)
{{
struct bpf_verifier_env env;
struct bpf_insn insn = BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2);
dst_reg->type = SCALAR_VALUE;
src_reg->type = SCALAR_VALUE;
adjust_scalar_min_max_vals(&env, &insn, dst_reg, *src_reg);
}}
- insert jump wrapper
# v5.13, v5.10, v5.7,
wrapper_jmp_4 = '''
void check_cond_jmp_op_BPF_JSGE(struct bpf_reg_state *dst_reg,
struct bpf_reg_state *src_reg,
struct bpf_reg_state *other_branch_dst_reg,
struct bpf_reg_state *other_branch_src_reg)
{{
/* Setup */
struct bpf_insn insn;
u8 opcode;
bool is_jmp32;
int pred = -1;
insn = BPF_JMP_REG({}, BPF_REG_1, BPF_REG_2, 0);
opcode = BPF_OP(insn.code);
dst_reg->type = SCALAR_VALUE;
src_reg->type = SCALAR_VALUE;
/* Perform custom push_stack to make sure we have don't have garbage values
for other_branch_regs in case pred != -1 */
push_stack___(other_branch_dst_reg, dst_reg);
push_stack___(other_branch_src_reg, src_reg);
/* Kernel copy-pasted code begins */
is_jmp32 = BPF_CLASS(insn.code) == BPF_JMP32;
if (BPF_SRC(insn.code) == BPF_K) {{
pred = is_branch_taken(dst_reg, insn.imm, opcode, is_jmp32);
}} else if (src_reg->type == SCALAR_VALUE &&
is_jmp32 && tnum_is_const(tnum_subreg(src_reg->var_off))) {{
pred = is_branch_taken(dst_reg,
tnum_subreg(src_reg->var_off).value,
opcode,
is_jmp32);
}} else if (src_reg->type == SCALAR_VALUE &&
!is_jmp32 && tnum_is_const(src_reg->var_off)) {{
pred = is_branch_taken(dst_reg,
src_reg->var_off.value,
opcode,
is_jmp32);
}}
if (pred == 1) {{
return;
}} else if (pred == 0) {{
return;
}}
if (BPF_SRC(insn.code) == BPF_X) {{
if (dst_reg->type == SCALAR_VALUE &&
src_reg->type == SCALAR_VALUE) {{
if (tnum_is_const(src_reg->var_off) ||
(is_jmp32 &&
tnum_is_const(tnum_subreg(src_reg->var_off))))
reg_set_min_max(
other_branch_dst_reg, dst_reg,
src_reg->var_off.value,
tnum_subreg(src_reg->var_off).value,
opcode, is_jmp32);
else if (tnum_is_const(dst_reg->var_off) ||
(is_jmp32 &&
tnum_is_const(tnum_subreg(dst_reg->var_off))))
reg_set_min_max_inv(
other_branch_src_reg, src_reg,
dst_reg->var_off.value,
tnum_subreg(dst_reg->var_off).value,
opcode, is_jmp32);
else if (!is_jmp32 &&
(opcode == BPF_JEQ || opcode == BPF_JNE))
/* Comparing for equality, we can combine knowledge */
reg_combine_min_max(other_branch_src_reg,
other_branch_dst_reg,
src_reg, dst_reg, opcode);
}}
}} else if (dst_reg->type == SCALAR_VALUE) {{
reg_set_min_max(other_branch_dst_reg, dst_reg, insn.imm,
(u32)insn.imm, opcode, is_jmp32);
}}
}}
'''
sanitize_needed, sanitize_val_alu, can_skip_alu_sanitation, update_alu_sanitation_state
"verbose",
"verbose_invalid_scalar",
"verbose_linfo",
"print_verifier_state",
"print_liveness",
"print_verification_stats",
"bpf_verifier_log_write",
"bpf_vlog_reset",
"bpf_log",
"bpf_verifier_vlog",
"print_bpf_insn"