Lines Matching refs:prog

33 	do { prog = emit_code(prog, bytes, len); cnt += len; } while (0)
236 u8 *prog = *pprog; in push_callee_regs() local
247 *pprog = prog; in push_callee_regs()
252 u8 *prog = *pprog; in pop_callee_regs() local
263 *pprog = prog; in pop_callee_regs()
274 u8 *prog = *pprog; in emit_prologue() local
280 memcpy(prog, ideal_nops[NOP_ATOMIC5], cnt); in emit_prologue()
281 prog += cnt; in emit_prologue()
295 *pprog = prog; in emit_prologue()
300 u8 *prog = *pprog; in emit_patch() local
310 *pprog = prog; in emit_patch()
331 u8 *prog; in __bpf_arch_text_poke() local
336 prog = old_insn; in __bpf_arch_text_poke()
338 emit_call(&prog, old_addr, ip) : in __bpf_arch_text_poke()
339 emit_jump(&prog, old_addr, ip); in __bpf_arch_text_poke()
346 prog = new_insn; in __bpf_arch_text_poke()
348 emit_call(&prog, new_addr, ip) : in __bpf_arch_text_poke()
349 emit_jump(&prog, new_addr, ip); in __bpf_arch_text_poke()
386 u8 *prog = *pprog; in emit_indirect_jump() local
394 emit_jump(&prog, &__x86_indirect_thunk_array[reg], ip); in emit_indirect_jump()
399 *pprog = prog; in emit_indirect_jump()
404 u8 *prog = *pprog; in emit_return() local
408 emit_jump(&prog, &__x86_return_thunk, ip); in emit_return()
415 *pprog = prog; in emit_return()
437 u8 *prog = *pprog, *start = *pprog; in emit_bpf_tail_call_indirect() local
454 offset = ctx->tail_call_indirect_label - (prog + 2 - start); in emit_bpf_tail_call_indirect()
464 offset = ctx->tail_call_indirect_label - (prog + 2 - start); in emit_bpf_tail_call_indirect()
479 offset = ctx->tail_call_indirect_label - (prog + 2 - start); in emit_bpf_tail_call_indirect()
482 pop_callee_regs(&prog, callee_regs_used); in emit_bpf_tail_call_indirect()
499 emit_indirect_jump(&prog, 1 /* rcx */, ip + (prog - start)); in emit_bpf_tail_call_indirect()
502 ctx->tail_call_indirect_label = prog - start; in emit_bpf_tail_call_indirect()
503 *pprog = prog; in emit_bpf_tail_call_indirect()
512 u8 *prog = *pprog, *start = *pprog; in emit_bpf_tail_call_direct() local
522 offset = ctx->tail_call_direct_label - (prog + 2 - start); in emit_bpf_tail_call_direct()
527 poke->tailcall_bypass = ip + (prog - start); in emit_bpf_tail_call_direct()
532 emit_jump(&prog, (u8 *)poke->tailcall_target + X86_PATCH_SIZE, in emit_bpf_tail_call_direct()
535 pop_callee_regs(&prog, callee_regs_used); in emit_bpf_tail_call_direct()
540 memcpy(prog, ideal_nops[NOP_ATOMIC5], X86_PATCH_SIZE); in emit_bpf_tail_call_direct()
541 prog += X86_PATCH_SIZE; in emit_bpf_tail_call_direct()
544 ctx->tail_call_direct_label = prog - start; in emit_bpf_tail_call_direct()
546 *pprog = prog; in emit_bpf_tail_call_direct()
549 static void bpf_tail_call_direct_fixup(struct bpf_prog *prog) in bpf_tail_call_direct_fixup() argument
556 for (i = 0; i < prog->aux->size_poke_tab; i++) { in bpf_tail_call_direct_fixup()
557 poke = &prog->aux->poke_tab[i]; in bpf_tail_call_direct_fixup()
595 u8 *prog = *pprog; in emit_mov_imm32() local
630 *pprog = prog; in emit_mov_imm32()
636 u8 *prog = *pprog; in emit_mov_imm64() local
646 emit_mov_imm32(&prog, false, dst_reg, imm32_lo); in emit_mov_imm64()
654 *pprog = prog; in emit_mov_imm64()
659 u8 *prog = *pprog; in emit_mov_reg() local
672 *pprog = prog; in emit_mov_reg()
678 u8 *prog = *pprog; in emit_ldx() local
711 *pprog = prog; in emit_ldx()
717 u8 *prog = *pprog; in emit_stx() local
749 *pprog = prog; in emit_stx()
795 u8 *prog = temp; in do_jit() local
803 emit_prologue(&prog, bpf_prog->aux->stack_depth, in do_jit()
806 push_callee_regs(&prog, callee_regs_used); in do_jit()
807 addrs[0] = prog - temp; in do_jit()
847 emit_mov_reg(&prog, in do_jit()
914 emit_mov_imm32(&prog, BPF_CLASS(insn->code) == BPF_ALU64, in do_jit()
919 emit_mov_imm64(&prog, dst_reg, insn[1].imm, insn[0].imm); in do_jit()
989 emit_mov_reg(&prog, is64, BPF_REG_0, src_reg); in do_jit()
991 emit_mov_imm32(&prog, is64, BPF_REG_0, imm32); in do_jit()
1173 emit_stx(&prog, BPF_SIZE(insn->code), dst_reg, src_reg, insn->off); in do_jit()
1185 emit_ldx(&prog, BPF_SIZE(insn->code), dst_reg, src_reg, insn->off); in do_jit()
1226 ex->fixup = (prog - temp) | (reg2pt_regs[dst_reg] << 8); in do_jit()
1254 if (!imm32 || emit_call(&prog, func, image + addrs[i - 1] + 7)) in do_jit()
1257 if (!imm32 || emit_call(&prog, func, image + addrs[i - 1])) in do_jit()
1265 &prog, image + addrs[i - 1], in do_jit()
1270 emit_bpf_tail_call_indirect(&prog, in do_jit()
1457 pop_callee_regs(&prog, callee_regs_used); in do_jit()
1459 emit_return(&prog, image + addrs[i - 1] + (prog - temp)); in do_jit()
1473 ilen = prog - temp; in do_jit()
1497 prog = temp; in do_jit()
1507 static void save_regs(const struct btf_func_model *m, u8 **prog, int nr_args, in save_regs() argument
1517 emit_stx(prog, bytes_to_bpf_size(m->arg_size[i]), in save_regs()
1523 static void restore_regs(const struct btf_func_model *m, u8 **prog, int nr_args, in restore_regs() argument
1534 emit_ldx(prog, bytes_to_bpf_size(m->arg_size[i]), in restore_regs()
1543 u8 *prog = *pprog; in invoke_bpf_prog() local
1547 if (emit_call(&prog, __bpf_prog_enter_sleepable, prog)) in invoke_bpf_prog()
1550 if (emit_call(&prog, __bpf_prog_enter, prog)) in invoke_bpf_prog()
1553 emit_mov_reg(&prog, true, BPF_REG_6, BPF_REG_0); in invoke_bpf_prog()
1560 emit_mov_imm64(&prog, BPF_REG_2, in invoke_bpf_prog()
1564 if (emit_call(&prog, p->bpf_func, prog)) in invoke_bpf_prog()
1576 emit_stx(&prog, BPF_DW, BPF_REG_FP, BPF_REG_0, -8); in invoke_bpf_prog()
1579 if (emit_call(&prog, __bpf_prog_exit_sleepable, prog)) in invoke_bpf_prog()
1583 emit_mov_imm64(&prog, BPF_REG_1, (long) p >> 32, in invoke_bpf_prog()
1586 emit_mov_reg(&prog, true, BPF_REG_2, BPF_REG_6); in invoke_bpf_prog()
1587 if (emit_call(&prog, __bpf_prog_exit, prog)) in invoke_bpf_prog()
1591 *pprog = prog; in invoke_bpf_prog()
1598 u8 *prog = *pprog; in emit_nops() local
1612 *pprog = prog; in emit_nops()
1617 u8 *target, *prog = *pprog; in emit_align() local
1619 target = PTR_ALIGN(prog, align); in emit_align()
1620 if (target != prog) in emit_align()
1621 emit_nops(&prog, target - prog); in emit_align()
1623 *pprog = prog; in emit_align()
1628 u8 *prog = *pprog; in emit_cond_near_jump() local
1638 *pprog = prog; in emit_cond_near_jump()
1647 u8 *prog = *pprog; in invoke_bpf() local
1650 if (invoke_bpf_prog(m, &prog, tp->progs[i], stack_size, in invoke_bpf()
1654 *pprog = prog; in invoke_bpf()
1662 u8 *prog = *pprog; in invoke_bpf_mod_ret() local
1668 emit_mov_imm32(&prog, false, BPF_REG_0, 0); in invoke_bpf_mod_ret()
1669 emit_stx(&prog, BPF_DW, BPF_REG_FP, BPF_REG_0, -8); in invoke_bpf_mod_ret()
1671 if (invoke_bpf_prog(m, &prog, tp->progs[i], stack_size, true)) in invoke_bpf_mod_ret()
1686 branches[i] = prog; in invoke_bpf_mod_ret()
1687 emit_nops(&prog, 4 + 2); in invoke_bpf_mod_ret()
1690 *pprog = prog; in invoke_bpf_mod_ret()
1782 u8 *prog; in arch_prepare_bpf_trampoline() local
1803 prog = image; in arch_prepare_bpf_trampoline()
1810 save_regs(m, &prog, nr_args, stack_size); in arch_prepare_bpf_trampoline()
1814 emit_mov_imm64(&prog, BPF_REG_1, (long) im >> 32, (u32) (long) im); in arch_prepare_bpf_trampoline()
1815 if (emit_call(&prog, __bpf_tramp_enter, prog)) { in arch_prepare_bpf_trampoline()
1822 if (invoke_bpf(m, &prog, fentry, stack_size, in arch_prepare_bpf_trampoline()
1832 if (invoke_bpf_mod_ret(m, &prog, fmod_ret, stack_size, in arch_prepare_bpf_trampoline()
1840 restore_regs(m, &prog, nr_args, stack_size); in arch_prepare_bpf_trampoline()
1843 if (emit_call(&prog, orig_call, prog)) { in arch_prepare_bpf_trampoline()
1848 emit_stx(&prog, BPF_DW, BPF_REG_FP, BPF_REG_0, -8); in arch_prepare_bpf_trampoline()
1849 im->ip_after_call = prog; in arch_prepare_bpf_trampoline()
1850 memcpy(prog, ideal_nops[NOP_ATOMIC5], X86_PATCH_SIZE); in arch_prepare_bpf_trampoline()
1851 prog += X86_PATCH_SIZE; in arch_prepare_bpf_trampoline()
1860 emit_align(&prog, 16); in arch_prepare_bpf_trampoline()
1865 emit_cond_near_jump(&branches[i], prog, branches[i], in arch_prepare_bpf_trampoline()
1870 if (invoke_bpf(m, &prog, fexit, stack_size, false)) { in arch_prepare_bpf_trampoline()
1876 restore_regs(m, &prog, nr_args, stack_size); in arch_prepare_bpf_trampoline()
1883 im->ip_epilogue = prog; in arch_prepare_bpf_trampoline()
1885 emit_mov_imm64(&prog, BPF_REG_1, (long) im >> 32, (u32) (long) im); in arch_prepare_bpf_trampoline()
1886 if (emit_call(&prog, __bpf_tramp_exit, prog)) { in arch_prepare_bpf_trampoline()
1893 emit_ldx(&prog, BPF_DW, BPF_REG_0, BPF_REG_FP, -8); in arch_prepare_bpf_trampoline()
1900 emit_return(&prog, prog); in arch_prepare_bpf_trampoline()
1902 if (WARN_ON_ONCE(prog > (u8 *)image_end - BPF_INSN_SAFETY)) { in arch_prepare_bpf_trampoline()
1906 ret = prog - (u8 *)image; in arch_prepare_bpf_trampoline()
1915 u8 *jg_reloc, *prog = *pprog; in emit_bpf_dispatcher() local
1928 err = emit_cond_near_jump(&prog, /* je func */ in emit_bpf_dispatcher()
1929 (void *)progs[a], prog, in emit_bpf_dispatcher()
1934 emit_indirect_jump(&prog, 2 /* rdx */, prog); in emit_bpf_dispatcher()
1936 *pprog = prog; in emit_bpf_dispatcher()
1956 jg_reloc = prog; in emit_bpf_dispatcher()
1958 err = emit_bpf_dispatcher(&prog, a, a + pivot, /* emit lower_part */ in emit_bpf_dispatcher()
1968 emit_align(&prog, 16); in emit_bpf_dispatcher()
1969 jg_offset = prog - jg_reloc; in emit_bpf_dispatcher()
1972 err = emit_bpf_dispatcher(&prog, a + pivot + 1, /* emit upper_part */ in emit_bpf_dispatcher()
1977 *pprog = prog; in emit_bpf_dispatcher()
1995 u8 *prog = image; in arch_prepare_bpf_dispatcher() local
1998 return emit_bpf_dispatcher(&prog, 0, num_funcs - 1, funcs); in arch_prepare_bpf_dispatcher()
2009 struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog) in bpf_int_jit_compile() argument
2012 struct bpf_prog *tmp, *orig_prog = prog; in bpf_int_jit_compile()
2023 if (!prog->jit_requested) in bpf_int_jit_compile()
2026 tmp = bpf_jit_blind_constants(prog); in bpf_int_jit_compile()
2033 if (tmp != prog) { in bpf_int_jit_compile()
2035 prog = tmp; in bpf_int_jit_compile()
2038 jit_data = prog->aux->jit_data; in bpf_int_jit_compile()
2042 prog = orig_prog; in bpf_int_jit_compile()
2045 prog->aux->jit_data = jit_data; in bpf_int_jit_compile()
2056 addrs = kvmalloc_array(prog->len + 1, sizeof(*addrs), GFP_KERNEL); in bpf_int_jit_compile()
2058 prog = orig_prog; in bpf_int_jit_compile()
2066 for (proglen = 0, i = 0; i <= prog->len; i++) { in bpf_int_jit_compile()
2080 proglen = do_jit(prog, addrs, image, oldproglen, &ctx); in bpf_int_jit_compile()
2086 prog = orig_prog; in bpf_int_jit_compile()
2105 u32 extable_size = prog->aux->num_exentries * in bpf_int_jit_compile()
2112 prog = orig_prog; in bpf_int_jit_compile()
2115 prog->aux->extable = (void *) image + roundup(proglen, align); in bpf_int_jit_compile()
2122 bpf_jit_dump(prog->len, proglen, pass + 1, image); in bpf_int_jit_compile()
2125 if (!prog->is_func || extra_pass) { in bpf_int_jit_compile()
2126 bpf_tail_call_direct_fixup(prog); in bpf_int_jit_compile()
2135 prog->bpf_func = (void *)image; in bpf_int_jit_compile()
2136 prog->jited = 1; in bpf_int_jit_compile()
2137 prog->jited_len = proglen; in bpf_int_jit_compile()
2139 prog = orig_prog; in bpf_int_jit_compile()
2142 if (!image || !prog->is_func || extra_pass) { in bpf_int_jit_compile()
2144 bpf_prog_fill_jited_linfo(prog, addrs + 1); in bpf_int_jit_compile()
2148 prog->aux->jit_data = NULL; in bpf_int_jit_compile()
2152 bpf_jit_prog_release_other(prog, prog == orig_prog ? in bpf_int_jit_compile()
2154 return prog; in bpf_int_jit_compile()